diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
new file mode 100644
index 000000000..762062604
--- /dev/null
+++ b/.github/workflows/docs.yaml
@@ -0,0 +1,108 @@
+name: Docs
+
+on:
+ push:
+ branches: [main]
+ paths:
+ - 'docs/**'
+ - 'mkdocs.yml'
+ - 'flixopt/**'
+ pull_request:
+ paths:
+ - 'docs/**'
+ - 'mkdocs.yml'
+ workflow_dispatch:
+ workflow_call:
+ inputs:
+ deploy:
+ type: boolean
+ default: false
+ version:
+ type: string
+ required: false
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+env:
+ PYTHON_VERSION: "3.11"
+ MPLBACKEND: Agg
+ PLOTLY_RENDERER: json
+
+jobs:
+ build:
+ name: Build documentation
+ runs-on: ubuntu-24.04
+ timeout-minutes: 30
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ fetch-depth: 0
+
+ - uses: astral-sh/setup-uv@v6
+ with:
+ version: "0.9.10"
+ enable-cache: true
+
+ - uses: actions/setup-python@v6
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+
+ - name: Extract changelog
+ run: |
+ cp CHANGELOG.md docs/changelog.md
+ python scripts/format_changelog.py
+
+ - name: Install dependencies
+ run: uv pip install --system ".[docs,full]"
+
+ - name: Build docs
+ run: mkdocs build --strict
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: docs
+ path: site/
+ retention-days: 7
+
+ deploy:
+ name: Deploy documentation
+ needs: build
+ if: ${{ inputs.deploy == true }}
+ runs-on: ubuntu-24.04
+ permissions:
+ contents: write
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ fetch-depth: 0
+
+ - uses: astral-sh/setup-uv@v6
+ with:
+ version: "0.9.10"
+ enable-cache: true
+
+ - uses: actions/setup-python@v6
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+
+ - name: Install mike
+ run: uv pip install --system mike
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: docs
+ path: site/
+
+ - name: Configure Git
+ run: |
+ git config user.name "github-actions[bot]"
+ git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
+
+ - name: Deploy docs
+ run: |
+ VERSION=${{ inputs.version }}
+ VERSION=${VERSION#v}
+ mike deploy --push --update-aliases --no-build $VERSION latest
+ mike set-default --push latest
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 4f1edd3e8..2bf49bfc1 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -188,37 +188,7 @@ jobs:
name: Deploy documentation
needs: [create-release]
if: "!contains(github.ref, 'alpha') && !contains(github.ref, 'beta') && !contains(github.ref, 'rc')"
- runs-on: ubuntu-24.04
- permissions:
- contents: write
- steps:
- - uses: actions/checkout@v5
- with:
- fetch-depth: 0
-
- - uses: astral-sh/setup-uv@v6
- with:
- version: "0.9.10"
-
- - uses: actions/setup-python@v6
- with:
- python-version: ${{ env.PYTHON_VERSION }}
-
- - name: Extract changelog
- run: |
- uv pip install --system packaging
- python scripts/extract_changelog.py
-
- - name: Install docs dependencies
- run: uv pip install --system ".[docs]"
-
- - name: Configure Git
- run: |
- git config user.name github-actions[bot]
- git config user.email 41898282+github-actions[bot]@users.noreply.github.com
-
- - name: Deploy docs
- run: |
- VERSION=${GITHUB_REF#refs/tags/v}
- mike deploy --push --update-aliases $VERSION latest
- mike set-default --push latest
+ uses: ./.github/workflows/docs.yaml
+ with:
+ deploy: true
+ version: ${{ github.ref_name }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e39033067..cfe4d0770 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,6 +7,7 @@ repos:
- id: check-yaml
exclude: ^mkdocs\.yml$ # Skip mkdocs.yml
- id: check-added-large-files
+ exclude: ^examples/resources/Zeitreihen2020\.csv$
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.4
@@ -14,3 +15,9 @@ repos:
- id: ruff-check
args: [ --fix ]
- id: ruff-format
+
+ - repo: https://github.com/kynan/nbstripout
+ rev: 0.8.1
+ hooks:
+ - id: nbstripout
+ files: ^docs/examples.*\.ipynb$
diff --git a/docs/examples/00-Minimal Example.md b/docs/examples/00-Minimal Example.md
deleted file mode 100644
index a568cd9c9..000000000
--- a/docs/examples/00-Minimal Example.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Minimal Example
-
-```python
-{! ../examples/00_Minmal/minimal_example.py !}
-```
diff --git a/docs/examples/01-Basic Example.md b/docs/examples/01-Basic Example.md
deleted file mode 100644
index 6c6bfbee3..000000000
--- a/docs/examples/01-Basic Example.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Simple example
-
-```python
-{! ../examples/01_Simple/simple_example.py !}
-```
diff --git a/docs/examples/02-Complex Example.md b/docs/examples/02-Complex Example.md
deleted file mode 100644
index 48868cdb0..000000000
--- a/docs/examples/02-Complex Example.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# Complex example
-This saves the results of a calculation to file and reloads them to analyze the results
-## Build the Model
-```python
-{! ../examples/02_Complex/complex_example.py !}
-```
-## Load the Results from file
-```python
-{! ../examples/02_Complex/complex_example_results.py !}
-```
diff --git a/docs/examples/03-Optimization Modes.md b/docs/examples/03-Optimization Modes.md
deleted file mode 100644
index 880366906..000000000
--- a/docs/examples/03-Optimization Modes.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Optimization Modes Comparison
-**Note:** This example relies on time series data. You can find it in the `examples` folder of the FlixOpt repository.
-```python
-{! ../examples/03_Optimization_modes/example_optimization_modes.py !}
-```
diff --git a/docs/examples/04-Scenarios.md b/docs/examples/04-Scenarios.md
deleted file mode 100644
index b528bb6f3..000000000
--- a/docs/examples/04-Scenarios.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Scenario example
-
-```python
-{! ../examples/04_Scenarios/scenario_example.py !}
-```
diff --git a/docs/examples/05-Two-stage-optimization.md b/docs/examples/05-Two-stage-optimization.md
deleted file mode 100644
index 5cb94e325..000000000
--- a/docs/examples/05-Two-stage-optimization.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Two-stage optimization
-
-```python
-{! ../examples/05_Two-stage-optimization/two_stage_optimization.py !}
-```
diff --git a/docs/examples/index.md b/docs/examples/index.md
deleted file mode 100644
index b5534b8e3..000000000
--- a/docs/examples/index.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# Examples
-
-Here you can find a collection of examples that demonstrate how to use FlixOpt.
-
-We work on improving this gallery. If you have something to share, please contact us!
-
-## Available Examples
-
-1. [Minimal Example](00-Minimal Example.md) - The simplest possible FlixOpt model
-2. [Simple Example](01-Basic Example.md) - A basic example with more features
-3. [Complex Example](02-Complex Example.md) - A comprehensive example with result saving and loading
-4. [Optimization Modes](03-Optimization Modes.md) - Comparison of different optimization modes
-5. [Scenarios](04-Scenarios.md) - Working with scenarios in FlixOpt
-6. [Two-stage Optimization](05-Two-stage-optimization.md) - Two-stage optimization approach
diff --git a/docs/home/citing.md b/docs/home/citing.md
index 6fd1a6020..a4f900d18 100644
--- a/docs/home/citing.md
+++ b/docs/home/citing.md
@@ -12,7 +12,7 @@ If you've published research using flixOpt, please let us know! We'd love to fea
### List of Publications
-*Coming soon: A list of academic publications that have used flixOpt*
+Coming soon: A list of academic publications that have used flixOpt.
## Contributing Back
diff --git a/docs/home/installation.md b/docs/home/installation.md
index afb24172b..d61022074 100644
--- a/docs/home/installation.md
+++ b/docs/home/installation.md
@@ -87,5 +87,5 @@ For more details on logging configuration, see the [`CONFIG.Logging`][flixopt.co
## Next Steps
- Follow the [Quick Start](quick-start.md) guide
-- Explore the [Minimal Example](../examples/00-Minimal Example.md)
+- Explore the [Minimal Example](../notebooks/01-quickstart.ipynb)
- Read about [Core Concepts](../user-guide/core-concepts.md)
diff --git a/docs/home/license.md b/docs/home/license.md
index d00755a0b..e0b0266a4 100644
--- a/docs/home/license.md
+++ b/docs/home/license.md
@@ -4,7 +4,7 @@ flixOpt is released under the MIT License.
## MIT License
-```
+```text
MIT License
Copyright (c) 2022 Chair of Building Energy Systems and Heat Supply - TU Dresden
diff --git a/docs/home/quick-start.md b/docs/home/quick-start.md
index b0bdef7da..7bbc88172 100644
--- a/docs/home/quick-start.md
+++ b/docs/home/quick-start.md
@@ -88,19 +88,41 @@ battery = fx.Storage(
flow_system.add_elements(solar, demand, battery, electricity_bus)
```
-### 5. Run Optimization
+### 5. Visualize and Run Optimization
```python
-# Create and run optimization
-optimization = fx.Optimization('solar_battery_optimization', flow_system)
-optimization.solve(fx.solvers.HighsSolver())
+# Optional: visualize your system structure
+flow_system.topology.plot(path='system.html')
+
+# Run optimization
+flow_system.optimize(fx.solvers.HighsSolver())
```
-### 6. Save Results
+### 6. Access and Visualize Results
```python
-# This includes the modeled FlowSystem. SO you can restore both results and inputs
-optimization.results.to_file()
+# Access raw solution data
+print(flow_system.solution)
+
+# Use statistics for aggregated data
+print(flow_system.statistics.flow_hours)
+
+# Access component-specific results
+print(flow_system.components['battery'].solution)
+
+# Visualize results
+flow_system.statistics.plot.balance('electricity')
+flow_system.statistics.plot.storage('battery')
+```
+
+### 7. Save Results (Optional)
+
+```python
+# Save the flow system (includes inputs and solution)
+flow_system.to_netcdf('results/solar_battery.nc')
+
+# Load it back later
+loaded_fs = fx.FlowSystem.from_netcdf('results/solar_battery.nc')
```
## What's Next?
@@ -108,7 +130,7 @@ optimization.results.to_file()
Now that you've created your first model, you can:
- **Learn the concepts** - Read the [Core Concepts](../user-guide/core-concepts.md) guide
-- **Explore examples** - Check out more [Examples](../examples/index.md)
+- **Explore examples** - Check out more [Examples](../notebooks/index.md)
- **Deep dive** - Study the [Mathematical Formulation](../user-guide/mathematical-notation/index.md)
- **Build complex models** - Use [Recipes](../user-guide/recipes/index.md) for common patterns
@@ -120,8 +142,10 @@ Most flixOpt projects follow this pattern:
2. **Create flow system** - Initialize with time series and effects
3. **Add buses** - Define connection points
4. **Add components** - Create generators, storage, converters, loads
-5. **Run optimization** - Solve the optimization
-6. **Save Results** - For later analysis. Or only extract needed data
+5. **Verify structure** - Use `flow_system.topology.plot()` to visualize
+6. **Run optimization** - Call `flow_system.optimize(solver)`
+7. **Analyze results** - Via `flow_system.statistics` and `.solution`
+8. **Visualize** - Use `flow_system.statistics.plot.*` methods
## Tips
diff --git a/docs/index.md b/docs/index.md
index 70fd15bf4..f8509e24c 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -14,7 +14,7 @@ title: Home
🚀 Get Started
- 💡 View Examples
+ 💡 View Examples
⭐ GitHub
@@ -30,7 +30,7 @@ title: Home
New to FlixOpt? Start here with installation and your first model
-- :bulb: **[Examples Gallery](examples/)**
+- :bulb: **[Examples Gallery](notebooks/)**
---
diff --git a/docs/notebooks/01-quickstart.ipynb b/docs/notebooks/01-quickstart.ipynb
new file mode 100644
index 000000000..ba4becd0c
--- /dev/null
+++ b/docs/notebooks/01-quickstart.ipynb
@@ -0,0 +1,279 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Quickstart\n\nHeat a small workshop with a gas boiler - the minimal working example.\n\nThis notebook introduces the **core concepts** of flixopt:\n\n- **FlowSystem**: The container for your energy system model\n- **Bus**: Balance nodes where energy flows meet\n- **Effect**: Quantities to track and optimize (costs, emissions)\n- **Components**: Equipment like boilers, sources, and sinks\n- **Flow**: Connections between components and buses"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## Define the Time Horizon\n",
+ "\n",
+ "Every optimization needs a time horizon. Here we model a simple 4-hour period:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "timesteps = pd.date_range('2024-01-15 08:00', periods=4, freq='h')\n",
+ "print(f'Optimizing from {timesteps[0]} to {timesteps[-1]}')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5",
+ "metadata": {},
+ "source": [
+ "## Define the Heat Demand\n",
+ "\n",
+ "The workshop has varying heat demand throughout the morning:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Heat demand in kW for each hour - using xarray\n",
+ "heat_demand = xr.DataArray(\n",
+ " [30, 50, 45, 25],\n",
+ " dims=['time'],\n",
+ " coords={'time': timesteps},\n",
+ " name='Heat Demand [kW]',\n",
+ ")\n",
+ "\n",
+ "# Visualize the demand with plotly\n",
+ "fig = px.bar(x=heat_demand.time.values, y=heat_demand.values, labels={'x': 'Time', 'y': 'Heat Demand [kW]'})\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "## Build the Energy System Model\n",
+ "\n",
+ "Now we create the FlowSystem and add all components:\n",
+ "\n",
+ "```\n",
+ " Gas Supply ──► [Gas Bus] ──► Boiler ──► [Heat Bus] ──► Workshop\n",
+ " € η=90% Demand\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Create the FlowSystem container\n",
+ "flow_system = fx.FlowSystem(timesteps)\n",
+ "\n",
+ "flow_system.add_elements(\n",
+ " # === Buses: Balance nodes for energy carriers ===\n",
+ " fx.Bus('Gas'), # Natural gas network connection\n",
+ " fx.Bus('Heat'), # Heat distribution within workshop\n",
+ " # === Effect: What we want to minimize ===\n",
+ " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
+ " # === Gas Supply: Unlimited gas at 0.08 €/kWh ===\n",
+ " fx.Source(\n",
+ " 'GasGrid',\n",
+ " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.08)],\n",
+ " ),\n",
+ " # === Boiler: Converts gas to heat at 90% efficiency ===\n",
+ " fx.linear_converters.Boiler(\n",
+ " 'Boiler',\n",
+ " thermal_efficiency=0.9,\n",
+ " thermal_flow=fx.Flow('Heat', bus='Heat', size=100), # 100 kW capacity\n",
+ " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " ),\n",
+ " # === Workshop: Heat demand that must be met ===\n",
+ " fx.Sink(\n",
+ " 'Workshop',\n",
+ " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand.values)],\n",
+ " ),\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "## Run the Optimization\n",
+ "\n",
+ "Now we solve the model using the HiGHS solver (open-source, included with flixopt):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.optimize(fx.solvers.HighsSolver());"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11",
+ "metadata": {},
+ "source": [
+ "## Analyze Results\n",
+ "\n",
+ "### Heat Balance\n",
+ "\n",
+ "The `statistics.plot.balance()` method shows how each bus is balanced:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "12",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {},
+ "source": [
+ "### Total Costs\n",
+ "\n",
+ "Access the optimized objective value:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "14",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "total_costs = flow_system.solution['costs'].item()\n",
+ "total_heat = float(heat_demand.sum())\n",
+ "gas_consumed = total_heat / 0.9 # Account for boiler efficiency\n",
+ "\n",
+ "print(f'Total heat demand: {total_heat:.1f} kWh')\n",
+ "print(f'Gas consumed: {gas_consumed:.1f} kWh')\n",
+ "print(f'Total costs: {total_costs:.2f} €')\n",
+ "print(f'Average cost: {total_costs / total_heat:.3f} €/kWh_heat')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "15",
+ "metadata": {},
+ "source": [
+ "### Flow Rates Over Time\n",
+ "\n",
+ "Visualize all flow rates using the built-in plotting accessor:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "16",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Plot all flow rates\n",
+ "flow_system.statistics.plot.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "17",
+ "metadata": {},
+ "source": [
+ "### Energy Flow Sankey\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows through the system:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "18",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19",
+ "metadata": {},
+ "source": [
+ "## Summary\n",
+ "\n",
+ "In this quickstart, you learned the **basic workflow**:\n",
+ "\n",
+ "1. **Create** a `FlowSystem` with timesteps\n",
+ "2. **Add** buses, effects, and components\n",
+ "3. **Optimize** with `flow_system.optimize(solver)`\n",
+ "4. **Analyze** results via `flow_system.statistics`\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[02-heat-system](02-heat-system.ipynb)**: Add thermal storage to shift loads\n",
+ "- **[03-investment-optimization](03-investment-optimization.ipynb)**: Optimize equipment sizing"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/02-heat-system.ipynb b/docs/notebooks/02-heat-system.ipynb
new file mode 100644
index 000000000..f1392c72f
--- /dev/null
+++ b/docs/notebooks/02-heat-system.ipynb
@@ -0,0 +1,361 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Heat System\n\nDistrict heating with thermal storage and time-varying prices.\n\nThis notebook introduces:\n\n- **Storage**: Thermal buffer tanks with charging/discharging\n- **Time series data**: Using real demand profiles\n- **Multiple components**: Combining boiler, storage, and loads\n- **Result visualization**: Heatmaps, balance plots, and charge states"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## Define Time Horizon and Demand\n",
+ "\n",
+ "We model one week with hourly resolution. The office has typical weekday patterns:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# One week, hourly resolution\n",
+ "timesteps = pd.date_range('2024-01-15', periods=168, freq='h')\n",
+ "\n",
+ "# Create realistic office heat demand pattern\n",
+ "hours = np.arange(168)\n",
+ "hour_of_day = hours % 24\n",
+ "day_of_week = (hours // 24) % 7\n",
+ "\n",
+ "# Base demand pattern (kW)\n",
+ "base_demand = np.where(\n",
+ " (hour_of_day >= 7) & (hour_of_day <= 18), # Office hours\n",
+ " 80, # Daytime\n",
+ " 30, # Night setback\n",
+ ")\n",
+ "\n",
+ "# Reduce on weekends (days 5, 6)\n",
+ "weekend_factor = np.where(day_of_week >= 5, 0.5, 1.0)\n",
+ "heat_demand = base_demand * weekend_factor\n",
+ "\n",
+ "# Add some random variation\n",
+ "np.random.seed(42)\n",
+ "heat_demand = heat_demand + np.random.normal(0, 5, len(heat_demand))\n",
+ "heat_demand = np.clip(heat_demand, 20, 100)\n",
+ "\n",
+ "print(f'Time range: {timesteps[0]} to {timesteps[-1]}')\n",
+ "print(f'Peak demand: {heat_demand.max():.1f} kW')\n",
+ "print(f'Total demand: {heat_demand.sum():.0f} kWh')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize the demand pattern with plotly\n",
+ "demand_series = xr.DataArray(heat_demand, dims=['time'], coords={'time': timesteps}, name='Heat Demand [kW]')\n",
+ "fig = px.line(\n",
+ " x=demand_series.time.values,\n",
+ " y=demand_series.values,\n",
+ " title='Office Heat Demand Profile',\n",
+ " labels={'x': 'Time', 'y': 'kW'},\n",
+ ")\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6",
+ "metadata": {},
+ "source": [
+ "## Define Gas Prices\n",
+ "\n",
+ "Gas prices vary with time-of-use tariffs:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Time-of-use gas prices (€/kWh)\n",
+ "gas_price = np.where(\n",
+ " (hour_of_day >= 6) & (hour_of_day <= 22),\n",
+ " 0.08, # Peak: 6am-10pm\n",
+ " 0.05, # Off-peak: 10pm-6am\n",
+ ")\n",
+ "\n",
+ "fig = px.line(x=timesteps, y=gas_price, title='Gas Price [€/kWh]', labels={'x': 'Time', 'y': '€/kWh'})\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8",
+ "metadata": {},
+ "source": [
+ "## Build the Energy System\n",
+ "\n",
+ "The system includes:\n",
+ "- Gas boiler (150 kW thermal capacity)\n",
+ "- Thermal storage tank (500 kWh capacity)\n",
+ "- Office building heat demand\n",
+ "\n",
+ "```\n",
+ "Gas Grid ──► [Gas] ──► Boiler ──► [Heat] ◄──► Storage\n",
+ " │\n",
+ " ▼\n",
+ " Office\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9",
+ "metadata": {},
+ "outputs": [],
+ "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Heat', carrier='heat'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === Gas Supply with time-varying price ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n ),\n # === Gas Boiler: 150 kW, 92% efficiency ===\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Heat', bus='Heat', size=150),\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Thermal Storage: 500 kWh tank ===\n fx.Storage(\n 'ThermalStorage',\n capacity_in_flow_hours=500, # 500 kWh capacity\n initial_charge_state=250, # Start half-full\n minimal_final_charge_state=200, # End with at least 200 kWh\n eta_charge=0.98, # 98% charging efficiency\n eta_discharge=0.98, # 98% discharging efficiency\n relative_loss_per_hour=0.005, # 0.5% heat loss per hour\n charging=fx.Flow('Charge', bus='Heat', size=100), # Max 100 kW charging\n discharging=fx.Flow('Discharge', bus='Heat', size=100), # Max 100 kW discharging\n ),\n # === Office Heat Demand ===\n fx.Sink(\n 'Office',\n inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n ),\n)"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "10",
+ "metadata": {},
+ "source": [
+ "## Run Optimization"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "11",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12",
+ "metadata": {},
+ "source": [
+ "## Analyze Results\n",
+ "\n",
+ "### Heat Balance\n",
+ "\n",
+ "See how the boiler and storage work together to meet demand:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "13",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "14",
+ "metadata": {},
+ "source": [
+ "### Storage Charge State\n",
+ "\n",
+ "Track how the storage level varies over time:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "15",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('ThermalStorage')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "16",
+ "metadata": {},
+ "source": [
+ "### Heatmap Visualization\n",
+ "\n",
+ "Heatmaps show patterns across hours and days:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "17",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.heatmap('Boiler(Heat)')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "18",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.heatmap('ThermalStorage')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19",
+ "metadata": {},
+ "source": [
+ "### Cost Analysis"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "20",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "total_costs = flow_system.solution['costs'].item()\n",
+ "total_heat = heat_demand.sum()\n",
+ "\n",
+ "print(f'Total operating costs: {total_costs:.2f} €')\n",
+ "print(f'Total heat delivered: {total_heat:.0f} kWh')\n",
+ "print(f'Average cost: {total_costs / total_heat * 100:.2f} ct/kWh')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "21",
+ "metadata": {},
+ "source": [
+ "### Flow Rates and Charge States\n",
+ "\n",
+ "Visualize all flow rates and storage charge states:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "22",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Plot all flow rates\n",
+ "flow_system.statistics.plot.flows()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "23",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Plot storage charge states\n",
+ "flow_system.statistics.plot.storage('ThermalStorage')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "24",
+ "metadata": {},
+ "source": [
+ "### Energy Flow Sankey\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows through the system:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "25",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "26",
+ "metadata": {},
+ "source": [
+ "## Key Insights\n",
+ "\n",
+ "The optimization reveals how storage enables **load shifting**:\n",
+ "\n",
+ "1. **Charge during off-peak**: When gas is cheap (night), the boiler runs at higher output to charge the storage\n",
+ "2. **Discharge during peak**: During expensive periods, storage supplements the boiler\n",
+ "3. **Weekend patterns**: Lower demand allows more storage cycling\n",
+ "\n",
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Add **Storage** components with efficiency and losses\n",
+ "- Use **time-varying prices** in effects\n",
+ "- Visualize results with **heatmaps** and **balance plots**\n",
+ "- Access raw data via **statistics.flow_rates** and **statistics.charge_states**\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[03-investment-optimization](03-investment-optimization.ipynb)**: Optimize storage size\n",
+ "- **[04-operational-constraints](04-operational-constraints.ipynb)**: Add startup costs and minimum run times"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/03-investment-optimization.ipynb b/docs/notebooks/03-investment-optimization.ipynb
new file mode 100644
index 000000000..ff62fe037
--- /dev/null
+++ b/docs/notebooks/03-investment-optimization.ipynb
@@ -0,0 +1,404 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Sizing\n\nSize a solar heating system - let the optimizer decide equipment sizes.\n\nThis notebook introduces:\n\n- **InvestParameters**: Define investment decisions with size bounds and costs\n- **Investment costs**: Fixed costs and size-dependent costs\n- **Optimal sizing**: Let the optimizer find the best equipment sizes\n- **Trade-off analysis**: Balance investment vs. operating costs"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## System Description\n",
+ "\n",
+ "The swimming pool heating system:\n",
+ "\n",
+ "- **Solar collectors**: Convert solar radiation to heat (size to be optimized)\n",
+ "- **Gas boiler**: Backup heating when solar is insufficient (existing, 200 kW)\n",
+ "- **Buffer tank**: Store excess solar heat (size to be optimized)\n",
+ "- **Pool**: Constant heat demand of 150 kW during operating hours\n",
+ "\n",
+ "```\n",
+ " ☀️ Solar ──► [Heat] ◄── Boiler ◄── [Gas]\n",
+ " │\n",
+ " ▼\n",
+ " Buffer Tank\n",
+ " │\n",
+ " ▼\n",
+ " Pool 🏊\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4",
+ "metadata": {},
+ "source": [
+ "## Define Time Horizon and Profiles\n",
+ "\n",
+ "We model one representative summer week:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# One week in summer, hourly\n",
+ "timesteps = pd.date_range('2024-07-15', periods=168, freq='h')\n",
+ "hours = np.arange(168)\n",
+ "hour_of_day = hours % 24\n",
+ "\n",
+ "# Solar radiation profile (kW/m² equivalent, simplified)\n",
+ "# Peak around noon, zero at night\n",
+ "solar_profile = np.maximum(0, np.sin((hour_of_day - 6) * np.pi / 12)) * 0.8\n",
+ "solar_profile = np.where((hour_of_day >= 6) & (hour_of_day <= 20), solar_profile, 0)\n",
+ "\n",
+ "# Add some cloud variation\n",
+ "np.random.seed(42)\n",
+ "cloud_factor = np.random.uniform(0.6, 1.0, len(timesteps))\n",
+ "solar_profile = solar_profile * cloud_factor\n",
+ "\n",
+ "# Pool operates 8am-10pm, constant demand when open\n",
+ "pool_demand = np.where((hour_of_day >= 8) & (hour_of_day <= 22), 150, 50) # kW\n",
+ "\n",
+ "print(f'Peak solar: {solar_profile.max():.2f} kW/kW_installed')\n",
+ "print(f'Pool demand: {pool_demand.max():.0f} kW (open), {pool_demand.min():.0f} kW (closed)')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize profiles with plotly - using xarray and faceting\n",
+ "profiles = xr.Dataset(\n",
+ " {\n",
+ " 'Solar Profile [kW/kW]': xr.DataArray(solar_profile, dims=['time'], coords={'time': timesteps}),\n",
+ " 'Pool Demand [kW]': xr.DataArray(pool_demand, dims=['time'], coords={'time': timesteps}),\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "# Convert to long format for faceting\n",
+ "df = profiles.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n",
+ "fig = px.line(df, x='time', y='value', facet_col='variable', height=300)\n",
+ "fig.update_yaxes(matches=None, showticklabels=True)\n",
+ "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "## Define Costs\n",
+ "\n",
+ "Investment costs are **annualized** (€/year) to compare with operating costs:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Cost parameters\n",
+ "GAS_PRICE = 0.12 # €/kWh - high gas price makes solar attractive\n",
+ "\n",
+ "# Solar collectors: 400 €/kW installed, 20-year lifetime → ~25 €/kW/year annualized\n",
+ "# (simplified, real calculation would include interest rate)\n",
+ "SOLAR_COST_PER_KW = 20 # €/kW/year\n",
+ "\n",
+ "# Buffer tank: 50 €/kWh capacity, 30-year lifetime → ~2 €/kWh/year\n",
+ "TANK_COST_PER_KWH = 1.5 # €/kWh/year\n",
+ "\n",
+ "# Scale factor: We model 1 week, but costs are annual\n",
+ "# So we scale investment costs to weekly equivalent\n",
+ "WEEKS_PER_YEAR = 52\n",
+ "SOLAR_COST_WEEKLY = SOLAR_COST_PER_KW / WEEKS_PER_YEAR\n",
+ "TANK_COST_WEEKLY = TANK_COST_PER_KWH / WEEKS_PER_YEAR\n",
+ "\n",
+ "print(f'Solar cost: {SOLAR_COST_WEEKLY:.3f} €/kW/week')\n",
+ "print(f'Tank cost: {TANK_COST_WEEKLY:.4f} €/kWh/week')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "## Build the System with Investment Options\n",
+ "\n",
+ "Use `InvestParameters` to define which sizes should be optimized:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10",
+ "metadata": {},
+ "outputs": [],
+ "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # === Buses ===\n fx.Bus('Heat', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n # === Effects ===\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n ),\n # === Gas Boiler (existing, fixed size) ===\n fx.linear_converters.Boiler(\n 'GasBoiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Heat', bus='Heat', size=200), # 200 kW existing\n fuel_flow=fx.Flow('Gas', bus='Gas'),\n ),\n # === Solar Collectors (size to be optimized) ===\n fx.Source(\n 'SolarCollectors',\n outputs=[\n fx.Flow(\n 'Heat',\n bus='Heat',\n # Investment optimization: find optimal size between 0-500 kW\n size=fx.InvestParameters(\n minimum_size=0,\n maximum_size=500,\n effects_of_investment_per_size={'costs': SOLAR_COST_WEEKLY},\n ),\n # Solar output depends on radiation profile\n fixed_relative_profile=solar_profile,\n )\n ],\n ),\n # === Buffer Tank (size to be optimized) ===\n fx.Storage(\n 'BufferTank',\n # Investment optimization: find optimal capacity between 0-2000 kWh\n capacity_in_flow_hours=fx.InvestParameters(\n minimum_size=0,\n maximum_size=2000,\n effects_of_investment_per_size={'costs': TANK_COST_WEEKLY},\n ),\n initial_charge_state=0,\n eta_charge=0.95,\n eta_discharge=0.95,\n relative_loss_per_hour=0.01, # 1% loss per hour\n charging=fx.Flow('Charge', bus='Heat', size=200),\n discharging=fx.Flow('Discharge', bus='Heat', size=200),\n ),\n # === Pool Heat Demand ===\n fx.Sink(\n 'Pool',\n inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n ),\n)"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11",
+ "metadata": {},
+ "source": [
+ "## Run Optimization"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "12",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {},
+ "source": [
+ "## Analyze Investment Decisions\n",
+ "\n",
+ "### Optimal Sizes"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "14",
+ "metadata": {},
+ "outputs": [],
+ "source": "solar_size = flow_system.statistics.sizes['SolarCollectors(Heat)'].item()\ntank_size = flow_system.statistics.sizes['BufferTank'].item()\n\nprint('=== Optimal Investment Decisions ===')\nprint(f'Solar collectors: {solar_size:.1f} kW')\nprint(f'Buffer tank: {tank_size:.1f} kWh')\nprint(f'Tank-to-solar ratio: {tank_size / solar_size:.1f} kWh/kW' if solar_size > 0 else 'N/A')"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "15",
+ "metadata": {},
+ "source": [
+ "### Visualize Sizes"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "16",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.sizes()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "17",
+ "metadata": {},
+ "source": [
+ "### Cost Breakdown"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "18",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "total_costs = flow_system.solution['costs'].item()\n",
+ "\n",
+ "# Calculate cost components\n",
+ "solar_invest = solar_size * SOLAR_COST_WEEKLY\n",
+ "tank_invest = tank_size * TANK_COST_WEEKLY\n",
+ "gas_costs = total_costs - solar_invest - tank_invest\n",
+ "\n",
+ "print('=== Weekly Cost Breakdown ===')\n",
+ "print(f'Solar investment: {solar_invest:.2f} € ({solar_invest / total_costs * 100:.1f}%)')\n",
+ "print(f'Tank investment: {tank_invest:.2f} € ({tank_invest / total_costs * 100:.1f}%)')\n",
+ "print(f'Gas operating: {gas_costs:.2f} € ({gas_costs / total_costs * 100:.1f}%)')\n",
+ "print('─────────────────────────────')\n",
+ "print(f'Total: {total_costs:.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19",
+ "metadata": {},
+ "source": [
+ "### System Operation"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "20",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "21",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.heatmap('SolarCollectors(Heat)')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "22",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('BufferTank')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "23",
+ "metadata": {},
+ "source": [
+ "## Compare: What if No Solar?\n",
+ "\n",
+ "Let's see how much the solar system saves:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "24",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Gas-only scenario\n",
+ "total_demand = pool_demand.sum()\n",
+ "gas_only_cost = total_demand / 0.92 * GAS_PRICE # All heat from gas boiler\n",
+ "\n",
+ "savings = gas_only_cost - total_costs\n",
+ "savings_pct = savings / gas_only_cost * 100\n",
+ "\n",
+ "print('=== Comparison with Gas-Only ===')\n",
+ "print(f'Gas-only cost: {gas_only_cost:.2f} €/week')\n",
+ "print(f'With solar: {total_costs:.2f} €/week')\n",
+ "print(f'Savings: {savings:.2f} €/week ({savings_pct:.1f}%)')\n",
+ "print(f'Annual savings: {savings * 52:.0f} €/year')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "25",
+ "metadata": {},
+ "source": [
+ "### Energy Flow Sankey\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows through the system:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "26",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "27",
+ "metadata": {},
+ "source": [
+ "## Key Concepts\n",
+ "\n",
+ "### InvestParameters Options\n",
+ "\n",
+ "```python\n",
+ "fx.InvestParameters(\n",
+ " minimum_size=0, # Lower bound (can be 0 for optional)\n",
+ " maximum_size=500, # Upper bound\n",
+ " fixed_size=100, # Or: fixed size (binary decision)\n",
+ " mandatory=True, # Force investment to happen\n",
+ " effects_of_investment={'costs': 1000}, # Fixed cost if invested\n",
+ " effects_of_investment_per_size={'costs': 25}, # Cost per unit size\n",
+ ")\n",
+ "```\n",
+ "\n",
+ "### Where to Use InvestParameters\n",
+ "\n",
+ "- **Flow.size**: Optimize converter/source/sink capacity\n",
+ "- **Storage.capacity_in_flow_hours**: Optimize storage capacity\n",
+ "\n",
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Define **investment decisions** with `InvestParameters`\n",
+ "- Set **size bounds** (minimum/maximum)\n",
+ "- Add **investment costs** (per-size and fixed)\n",
+ "- Access **optimal sizes** via `statistics.sizes`\n",
+ "- Visualize sizes with `statistics.plot.sizes()`\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[04-operational-constraints](04-operational-constraints.ipynb)**: Add startup costs and minimum run times\n",
+ "- **[05-multi-carrier-system](05-multi-carrier-system.ipynb)**: Model combined heat and power"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/04-operational-constraints.ipynb b/docs/notebooks/04-operational-constraints.ipynb
new file mode 100644
index 000000000..9090b172b
--- /dev/null
+++ b/docs/notebooks/04-operational-constraints.ipynb
@@ -0,0 +1,443 @@
+{
+ "cells": [
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "# Constraints\n",
+ "\n",
+ "Industrial boiler with startup costs, minimum uptime, and load constraints.\n",
+ "\n",
+ "This notebook introduces:\n",
+ "\n",
+ "- **StatusParameters**: Model on/off decisions with constraints\n",
+ "- **Startup costs**: Penalties for turning equipment on\n",
+ "- **Minimum uptime/downtime**: Prevent rapid cycling\n",
+ "- **Minimum load**: Equipment can't run below a certain output"
+ ],
+ "id": "217ee38bd32426e5"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": "## Setup",
+ "id": "73f6d18d567c6329"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ],
+ "id": "e8a50bb05c1400f2"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "## System Description\n",
+ "\n",
+ "The factory has:\n",
+ "\n",
+ "- **Industrial boiler**: 500 kW capacity, startup cost of 50€, minimum 4h uptime\n",
+ "- **Small backup boiler**: 100 kW, no startup constraints (always available)\n",
+ "- **Steam demand**: Varies with production schedule (high during shifts, low overnight)\n",
+ "\n",
+ "The main boiler is more efficient but has operational constraints. The backup is less efficient but flexible."
+ ],
+ "id": "54d9decc2ccf8235"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": "## Define Time Horizon and Demand",
+ "id": "65694ad43e7a1f42"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "# 3 days, hourly resolution\n",
+ "timesteps = pd.date_range('2024-03-11', periods=72, freq='h')\n",
+ "hours = np.arange(72)\n",
+ "hour_of_day = hours % 24\n",
+ "\n",
+ "# Factory operates in shifts:\n",
+ "# - Day shift (6am-2pm): 400 kW\n",
+ "# - Evening shift (2pm-10pm): 350 kW\n",
+ "# - Night (10pm-6am): 80 kW (maintenance heating only)\n",
+ "\n",
+ "steam_demand = np.select(\n",
+ " [\n",
+ " (hour_of_day >= 6) & (hour_of_day < 14), # Day shift\n",
+ " (hour_of_day >= 14) & (hour_of_day < 22), # Evening shift\n",
+ " ],\n",
+ " [400, 350],\n",
+ " default=80, # Night\n",
+ ")\n",
+ "\n",
+ "# Add some variation\n",
+ "np.random.seed(123)\n",
+ "steam_demand = steam_demand + np.random.normal(0, 20, len(steam_demand))\n",
+ "steam_demand = np.clip(steam_demand, 50, 450).astype(float)\n",
+ "\n",
+ "print(f'Peak demand: {steam_demand.max():.0f} kW')\n",
+ "print(f'Min demand: {steam_demand.min():.0f} kW')"
+ ],
+ "id": "8c606ee48c294628"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "px.line(x=timesteps, y=steam_demand, title='Factory Steam Demand', labels={'x': 'Time', 'y': 'kW'})",
+ "id": "fd4f46fa717b1572"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": "## Build System with Operational Constraints",
+ "id": "2d823131e625dcfa"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "flow_system = fx.FlowSystem(timesteps)\n",
+ "\n",
+ "# Define and register custom carriers\n",
+ "flow_system.add_carriers(\n",
+ " fx.Carrier('gas', '#3498db', 'kW'),\n",
+ " fx.Carrier('steam', '#87CEEB', 'kW_th', 'Process steam'),\n",
+ ")\n",
+ "\n",
+ "flow_system.add_elements(\n",
+ " # === Buses ===\n",
+ " fx.Bus('Gas', carrier='gas'),\n",
+ " fx.Bus('Steam', carrier='steam'),\n",
+ " # === Effect ===\n",
+ " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
+ " # === Gas Supply ===\n",
+ " fx.Source(\n",
+ " 'GasGrid',\n",
+ " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n",
+ " ),\n",
+ " # === Main Industrial Boiler (with operational constraints) ===\n",
+ " fx.linear_converters.Boiler(\n",
+ " 'MainBoiler',\n",
+ " thermal_efficiency=0.94, # High efficiency\n",
+ " # StatusParameters define on/off behavior\n",
+ " status_parameters=fx.StatusParameters(\n",
+ " effects_per_startup={'costs': 50}, # 50€ startup cost\n",
+ " min_uptime=4, # Must run at least 4 hours once started\n",
+ " min_downtime=2, # Must stay off at least 2 hours\n",
+ " ),\n",
+ " thermal_flow=fx.Flow(\n",
+ " 'Steam',\n",
+ " bus='Steam',\n",
+ " size=500,\n",
+ " relative_minimum=0.3, # Minimum load: 30% = 150 kW\n",
+ " ),\n",
+ " fuel_flow=fx.Flow('Gas', bus='Gas', size=600), # Size required for status_parameters\n",
+ " ),\n",
+ " # === Backup Boiler (flexible, but less efficient) ===\n",
+ " fx.linear_converters.Boiler(\n",
+ " 'BackupBoiler',\n",
+ " thermal_efficiency=0.85, # Lower efficiency\n",
+ " # No status parameters = can turn on/off freely\n",
+ " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n",
+ " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " ),\n",
+ " # === Factory Steam Demand ===\n",
+ " fx.Sink(\n",
+ " 'Factory',\n",
+ " inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n",
+ " ),\n",
+ ")"
+ ],
+ "id": "736dfa9a935f6c7e"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": "## Run Optimization",
+ "id": "70ae8aaa82997d51"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));",
+ "id": "76f27e3afe64f8c5"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "## Analyze Results\n",
+ "\n",
+ "### Steam Balance\n",
+ "\n",
+ "See how the two boilers share the load:"
+ ],
+ "id": "c42e2778fd0a8ca"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "flow_system.statistics.plot.balance('Steam')",
+ "id": "9da80bc8faca05cd"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "### Main Boiler Operation\n",
+ "\n",
+ "Notice how the main boiler:\n",
+ "- Runs continuously during production (respecting min uptime)\n",
+ "- Stays above minimum load (30%)\n",
+ "- Shuts down during low-demand periods"
+ ],
+ "id": "c885d25675d71371"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "flow_system.statistics.plot.heatmap('MainBoiler(Steam)')",
+ "id": "5a549b8b60f32745"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "### On/Off Status\n",
+ "\n",
+ "Track the boiler's operational status:"
+ ],
+ "id": "66816d462d2f2654"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "# Merge solution DataArrays directly - xarray aligns coordinates automatically\n",
+ "status_ds = xr.Dataset(\n",
+ " {\n",
+ " 'Status': flow_system.solution['MainBoiler|status'],\n",
+ " 'Steam Production [kW]': flow_system.solution['MainBoiler(Steam)|flow_rate'],\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "df = status_ds.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n",
+ "fig = px.line(df, x='time', y='value', facet_col='variable', height=300, title='Main Boiler Operation')\n",
+ "fig.update_yaxes(matches=None, showticklabels=True)\n",
+ "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n",
+ "fig"
+ ],
+ "id": "41801a37f07aa265"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": "### Startup Count and Costs",
+ "id": "7ca893f03606362"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "total_startups = int(flow_system.solution['MainBoiler|startup'].sum().item())\n",
+ "total_costs = flow_system.solution['costs'].item()\n",
+ "startup_costs = total_startups * 50\n",
+ "gas_costs = total_costs - startup_costs\n",
+ "\n",
+ "print('=== Cost Breakdown ===')\n",
+ "print(f'Number of startups: {total_startups}')\n",
+ "print(f'Startup costs: {startup_costs:.0f} €')\n",
+ "print(f'Gas costs: {gas_costs:.2f} €')\n",
+ "print(f'Total costs: {total_costs:.2f} €')"
+ ],
+ "id": "a95273c9775e1fd9"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "### Duration Curves\n",
+ "\n",
+ "See how often each boiler operates at different load levels:"
+ ],
+ "id": "e29cf8ae428387bd"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "flow_system.statistics.plot.duration_curve('MainBoiler(Steam)')",
+ "id": "14e906ea8912de10"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "flow_system.statistics.plot.duration_curve('BackupBoiler(Steam)')",
+ "id": "15d6068612a73f84"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "## Compare: Without Operational Constraints\n",
+ "\n",
+ "What if the main boiler had no startup costs or minimum uptime?"
+ ],
+ "id": "8354cd68733d5086"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "# Build unconstrained system\n",
+ "fs_unconstrained = fx.FlowSystem(timesteps)\n",
+ "fs_unconstrained.add_carriers(\n",
+ " fx.Carrier('gas', '#3498db', 'kW'),\n",
+ " fx.Carrier('steam', '#87CEEB', 'kW_th', 'Process steam'),\n",
+ ")\n",
+ "\n",
+ "fs_unconstrained.add_elements(\n",
+ " fx.Bus('Gas', carrier='gas'),\n",
+ " fx.Bus('Steam', carrier='steam'),\n",
+ " fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
+ " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " # Main boiler WITHOUT status parameters\n",
+ " fx.linear_converters.Boiler(\n",
+ " 'MainBoiler',\n",
+ " thermal_efficiency=0.94,\n",
+ " thermal_flow=fx.Flow('Steam', bus='Steam', size=500),\n",
+ " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " ),\n",
+ " fx.linear_converters.Boiler(\n",
+ " 'BackupBoiler',\n",
+ " thermal_efficiency=0.85,\n",
+ " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n",
+ " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " ),\n",
+ " fx.Sink('Factory', inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n",
+ ")\n",
+ "\n",
+ "fs_unconstrained.optimize(fx.solvers.HighsSolver())\n",
+ "unconstrained_costs = fs_unconstrained.solution['costs'].item()\n",
+ "\n",
+ "print('=== Comparison ===')\n",
+ "print(f'With constraints: {total_costs:.2f} €')\n",
+ "print(f'Without constraints: {unconstrained_costs:.2f} €')\n",
+ "print(\n",
+ " f'Constraint cost: {total_costs - unconstrained_costs:.2f} € ({(total_costs - unconstrained_costs) / unconstrained_costs * 100:.1f}%)'\n",
+ ")"
+ ],
+ "id": "8769dbda34dd4ccf"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "### Energy Flow Sankey\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows through the system:"
+ ],
+ "id": "64ddc254af867367"
+ },
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": "flow_system.statistics.plot.sankey.flows()",
+ "id": "f2742f4b0a7c5323"
+ },
+ {
+ "metadata": {},
+ "cell_type": "markdown",
+ "source": [
+ "## Key Concepts\n",
+ "\n",
+ "### StatusParameters Options\n",
+ "\n",
+ "```python\n",
+ "fx.StatusParameters(\n",
+ " # Startup/shutdown costs\n",
+ " effects_per_startup={'costs': 50}, # Cost per startup event\n",
+ " effects_per_shutdown={'costs': 10}, # Cost per shutdown event\n",
+ " \n",
+ " # Time constraints\n",
+ " min_uptime=4, # Minimum hours running once started\n",
+ " min_downtime=2, # Minimum hours off once stopped\n",
+ " \n",
+ " # Startup limits\n",
+ " max_startups=10, # Maximum startups per period\n",
+ ")\n",
+ "```\n",
+ "\n",
+ "### Minimum Load\n",
+ "\n",
+ "Set via `Flow.relative_minimum`:\n",
+ "```python\n",
+ "fx.Flow('Steam', bus='Steam', size=500, relative_minimum=0.3) # Min 30% load\n",
+ "```\n",
+ "\n",
+ "### When Status is Active\n",
+ "\n",
+ "- When `StatusParameters` is set, a binary on/off variable is created\n",
+ "- Flow is zero when status=0, within bounds when status=1\n",
+ "- Without `StatusParameters`, flow can vary continuously from 0 to max\n",
+ "\n",
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Add **startup costs** with `effects_per_startup`\n",
+ "- Set **minimum run times** with `min_uptime` and `min_downtime`\n",
+ "- Define **minimum load** with `relative_minimum`\n",
+ "- Access **status variables** from the solution\n",
+ "- Use **duration curves** to analyze operation patterns\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[05-multi-carrier-system](05-multi-carrier-system.ipynb)**: Model CHP with electricity and heat\n",
+ "- **[06a-time-varying-parameters](06a-time-varying-parameters.ipynb)**: Variable efficiency based on external conditions"
+ ],
+ "id": "2f9951587227304f"
+ }
+ ],
+ "metadata": {},
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/05-multi-carrier-system.ipynb b/docs/notebooks/05-multi-carrier-system.ipynb
new file mode 100644
index 000000000..76de7e69a
--- /dev/null
+++ b/docs/notebooks/05-multi-carrier-system.ipynb
@@ -0,0 +1,370 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Multi-Carrier\n\nHospital with CHP producing both electricity and heat.\n\nThis notebook introduces:\n\n- **Multiple energy carriers**: Electricity, heat, and gas in one system\n- **CHP (Cogeneration)**: Equipment producing multiple outputs\n- **Electricity market**: Buying and selling to the grid\n- **Carrier colors**: Visual distinction between energy types"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## System Description\n",
+ "\n",
+ "The hospital energy system:\n",
+ "\n",
+ "```\n",
+ " Grid Buy ──►\n",
+ " [Electricity] ──► Hospital Elec. Load\n",
+ " Grid Sell ◄── ▲\n",
+ " │\n",
+ " Gas Grid ──► [Gas] ──► CHP ──────┘\n",
+ " │ │\n",
+ " │ ▼\n",
+ " │ [Heat] ──► Hospital Heat Load\n",
+ " │ ▲\n",
+ " └──► Boiler\n",
+ "```\n",
+ "\n",
+ "**Equipment:**\n",
+ "- **CHP**: 200 kW electrical, ~250 kW thermal (η_el=40%, η_th=50%)\n",
+ "- **Gas Boiler**: 400 kW thermal backup\n",
+ "- **Grid**: Buy electricity at variable prices, sell at lower prices"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4",
+ "metadata": {},
+ "source": [
+ "## Define Time Horizon and Demand Profiles"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# One week, hourly\n",
+ "timesteps = pd.date_range('2024-02-05', periods=168, freq='h')\n",
+ "hours = np.arange(168)\n",
+ "hour_of_day = hours % 24\n",
+ "\n",
+ "# Hospital electricity demand (kW)\n",
+ "# Base load + daily pattern (higher during day for equipment, lighting)\n",
+ "elec_base = 150 # 24/7 critical systems\n",
+ "elec_daily = 100 * np.sin((hour_of_day - 6) * np.pi / 12) # Peak at noon\n",
+ "elec_daily = np.maximum(0, elec_daily)\n",
+ "electricity_demand = elec_base + elec_daily\n",
+ "\n",
+ "# Hospital heat demand (kW)\n",
+ "# Higher in morning, drops during day, increases for hot water in evening\n",
+ "heat_pattern = np.select(\n",
+ " [\n",
+ " (hour_of_day >= 5) & (hour_of_day < 9), # Morning warmup\n",
+ " (hour_of_day >= 9) & (hour_of_day < 17), # Daytime\n",
+ " (hour_of_day >= 17) & (hour_of_day < 22), # Evening\n",
+ " ],\n",
+ " [350, 250, 300],\n",
+ " default=200, # Night\n",
+ ")\n",
+ "heat_demand = heat_pattern.astype(float)\n",
+ "\n",
+ "# Add random variation\n",
+ "np.random.seed(456)\n",
+ "electricity_demand += np.random.normal(0, 15, len(timesteps))\n",
+ "heat_demand += np.random.normal(0, 20, len(timesteps))\n",
+ "electricity_demand = np.clip(electricity_demand, 100, 300)\n",
+ "heat_demand = np.clip(heat_demand, 150, 400)\n",
+ "\n",
+ "print(f'Electricity: {electricity_demand.min():.0f} - {electricity_demand.max():.0f} kW')\n",
+ "print(f'Heat: {heat_demand.min():.0f} - {heat_demand.max():.0f} kW')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Electricity prices (€/kWh)\n",
+ "# Time-of-use: expensive during day, cheaper at night\n",
+ "elec_buy_price = np.where(\n",
+ " (hour_of_day >= 7) & (hour_of_day <= 21),\n",
+ " 0.35, # Peak - high electricity prices make CHP attractive\n",
+ " 0.20, # Off-peak\n",
+ ")\n",
+ "\n",
+ "# Feed-in tariff (sell price) - allows selling excess CHP electricity\n",
+ "elec_sell_price = 0.12 # Fixed feed-in rate\n",
+ "\n",
+ "# Gas price - relatively low, favoring gas-based generation\n",
+ "gas_price = 0.05 # €/kWh"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize demands and prices with plotly - using xarray and faceting\n",
+ "profiles = xr.Dataset(\n",
+ " {\n",
+ " 'Electricity Demand [kW]': xr.DataArray(electricity_demand, dims=['time'], coords={'time': timesteps}),\n",
+ " 'Heat Demand [kW]': xr.DataArray(heat_demand, dims=['time'], coords={'time': timesteps}),\n",
+ " 'Elec. Buy Price [€/kWh]': xr.DataArray(elec_buy_price, dims=['time'], coords={'time': timesteps}),\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "df = profiles.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n",
+ "fig = px.line(df, x='time', y='value', facet_col='variable', height=300)\n",
+ "fig.update_yaxes(matches=None, showticklabels=True)\n",
+ "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8",
+ "metadata": {},
+ "source": [
+ "## Build the Multi-Carrier System"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9",
+ "metadata": {},
+ "outputs": [],
+ "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # === Buses with carriers for visual distinction ===\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus('Heat', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n # === Effects ===\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n fx.Effect('CO2', 'kg', 'CO2 Emissions'), # Track emissions too\n # === Gas Supply ===\n fx.Source(\n 'GasGrid',\n outputs=[\n fx.Flow(\n 'Gas',\n bus='Gas',\n size=1000,\n effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2}, # Gas: 0.2 kg CO2/kWh\n )\n ],\n ),\n # === Electricity Grid (buy) ===\n fx.Source(\n 'GridBuy',\n outputs=[\n fx.Flow(\n 'Electricity',\n bus='Electricity',\n size=500,\n effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}, # Grid: 0.4 kg CO2/kWh\n )\n ],\n ),\n # === Electricity Grid (sell) - negative cost = revenue ===\n fx.Sink(\n 'GridSell',\n inputs=[\n fx.Flow(\n 'Electricity',\n bus='Electricity',\n size=200,\n effects_per_flow_hour={'costs': -elec_sell_price}, # Negative = income\n )\n ],\n ),\n # === CHP Unit (Combined Heat and Power) ===\n fx.linear_converters.CHP(\n 'CHP',\n electrical_efficiency=0.40, # 40% to electricity\n thermal_efficiency=0.50, # 50% to heat (total: 90%)\n status_parameters=fx.StatusParameters(\n effects_per_startup={'costs': 30},\n min_uptime=3,\n ),\n electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n fuel_flow=fx.Flow(\n 'Q_fuel',\n bus='Gas',\n size=500,\n relative_minimum=0.4, # Min 40% load\n ),\n ),\n # === Gas Boiler (heat only) ===\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=400),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n # === Hospital Loads ===\n fx.Sink(\n 'HospitalElec',\n inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n ),\n fx.Sink(\n 'HospitalHeat',\n inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n ),\n)"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "10",
+ "metadata": {},
+ "source": [
+ "## Run Optimization"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "11",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12",
+ "metadata": {},
+ "source": [
+ "## Analyze Results\n",
+ "\n",
+ "### Electricity Balance"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "13",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Electricity')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "14",
+ "metadata": {},
+ "source": [
+ "### Heat Balance"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "15",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "16",
+ "metadata": {},
+ "source": [
+ "### Gas Balance"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "17",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Gas')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "18",
+ "metadata": {},
+ "source": [
+ "### CHP Operation Pattern"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "19",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.heatmap('CHP(P_el)')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "20",
+ "metadata": {},
+ "source": [
+ "### Cost and Emissions Summary"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "21",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "total_costs = flow_system.solution['costs'].item()\n",
+ "total_co2 = flow_system.solution['CO2'].item()\n",
+ "\n",
+ "# Energy flows\n",
+ "flow_rates = flow_system.statistics.flow_rates\n",
+ "grid_buy = flow_rates['GridBuy(Electricity)'].sum().item()\n",
+ "grid_sell = flow_rates['GridSell(Electricity)'].sum().item()\n",
+ "chp_elec = flow_rates['CHP(P_el)'].sum().item()\n",
+ "chp_heat = flow_rates['CHP(Q_th)'].sum().item()\n",
+ "boiler_heat = flow_rates['Boiler(Q_th)'].sum().item()\n",
+ "\n",
+ "total_elec = electricity_demand.sum()\n",
+ "total_heat = heat_demand.sum()\n",
+ "\n",
+ "print('=== Energy Summary ===')\n",
+ "print(f'Total electricity demand: {total_elec:.0f} kWh')\n",
+ "print(f' - From CHP: {chp_elec:.0f} kWh ({chp_elec / total_elec * 100:.1f}%)')\n",
+ "print(f' - From Grid: {grid_buy:.0f} kWh ({grid_buy / total_elec * 100:.1f}%)')\n",
+ "print(f' - Sold to Grid: {grid_sell:.0f} kWh')\n",
+ "print()\n",
+ "print(f'Total heat demand: {total_heat:.0f} kWh')\n",
+ "print(f' - From CHP: {chp_heat:.0f} kWh ({chp_heat / total_heat * 100:.1f}%)')\n",
+ "print(f' - From Boiler: {boiler_heat:.0f} kWh ({boiler_heat / total_heat * 100:.1f}%)')\n",
+ "print()\n",
+ "print('=== Costs & Emissions ===')\n",
+ "print(f'Total costs: {total_costs:.2f} €')\n",
+ "print(f'Total CO2: {total_co2:.0f} kg')\n",
+ "print(f'Specific costs: {total_costs / (total_elec + total_heat) * 100:.2f} ct/kWh')\n",
+ "print(f'Specific CO2: {total_co2 / (total_elec + total_heat) * 1000:.1f} g/kWh')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "22",
+ "metadata": {},
+ "source": [
+ "### Compare: What if No CHP?\n",
+ "\n",
+ "How much does the CHP save compared to buying all electricity?"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "23",
+ "metadata": {},
+ "outputs": [],
+ "source": "# Build system without CHP\nfs_no_chp = fx.FlowSystem(timesteps)\nfs_no_chp.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nfs_no_chp.add_elements(\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus('Heat', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n fx.Effect('CO2', 'kg', 'CO2 Emissions'),\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n ),\n fx.Source(\n 'GridBuy',\n outputs=[\n fx.Flow(\n 'Electricity', bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}\n )\n ],\n ),\n # Only boiler for heat\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n fx.Sink(\n 'HospitalElec', inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]\n ),\n fx.Sink('HospitalHeat', inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n)\n\nfs_no_chp.optimize(fx.solvers.HighsSolver())\n\nno_chp_costs = fs_no_chp.solution['costs'].item()\nno_chp_co2 = fs_no_chp.solution['CO2'].item()\n\nprint('=== CHP Benefit Analysis ===')\nprint(f'Without CHP: {no_chp_costs:.2f} € / {no_chp_co2:.0f} kg CO2')\nprint(f'With CHP: {total_costs:.2f} € / {total_co2:.0f} kg CO2')\nprint(f'Cost savings: {no_chp_costs - total_costs:.2f} € ({(no_chp_costs - total_costs) / no_chp_costs * 100:.1f}%)')\nprint(f'CO2 reduction: {no_chp_co2 - total_co2:.0f} kg ({(no_chp_co2 - total_co2) / no_chp_co2 * 100:.1f}%)')"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "24",
+ "metadata": {},
+ "source": [
+ "### Energy Flow Sankey\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows through the multi-carrier system:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "25",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "26",
+ "metadata": {},
+ "source": "## Key Concepts\n\n### Multi-Carrier Systems\n\n- Multiple buses for different energy carriers (electricity, heat, gas)\n- Components can connect to multiple buses (CHP produces both electricity and heat)\n- Carriers enable automatic coloring in visualizations\n\n### CHP Modeling\n\n```python\nfx.linear_converters.CHP(\n 'CHP',\n electrical_efficiency=0.40, # Fuel → Electricity\n thermal_efficiency=0.50, # Fuel → Heat\n # Total efficiency = 0.40 + 0.50 = 0.90 (90%)\n electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas', size=500),\n)\n```\n\n### Electricity Markets\n\n- **Buy**: Source with positive cost\n- **Sell**: Sink with negative cost (= revenue)\n- Different prices for buy vs. sell (spread)\n\n### Tracking Multiple Effects\n\n```python\nfx.Effect('costs', '€', 'Total Costs', is_objective=True) # Minimize this\nfx.Effect('CO2', 'kg', 'CO2 Emissions') # Just track, don't optimize\n```\n\n## Summary\n\nYou learned how to:\n\n- Model **multiple energy carriers** (electricity, heat, gas)\n- Use **CHP** for combined heat and power production\n- Model **electricity markets** with buy/sell prices\n- Track **multiple effects** (costs and emissions)\n- Analyze **multi-carrier balances**\n\n### Next Steps\n\n- **[06a-time-varying-parameters](06a-time-varying-parameters.ipynb)**: Variable efficiency based on conditions\n- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Plan under uncertainty"
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/06a-time-varying-parameters.ipynb b/docs/notebooks/06a-time-varying-parameters.ipynb
new file mode 100644
index 000000000..9856aa095
--- /dev/null
+++ b/docs/notebooks/06a-time-varying-parameters.ipynb
@@ -0,0 +1,339 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": [
+ "# Time-Varying Parameters\n",
+ "\n",
+ "Model equipment with efficiency that changes based on external conditions.\n",
+ "\n",
+ "This notebook covers:\n",
+ "\n",
+ "- **Time-varying conversion factors**: Efficiency depends on external conditions\n",
+ "- **Temperature-dependent COP**: Heat pump performance varies with weather\n",
+ "- **Practical application**: Using arrays in conversion factor definitions"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "2",
+ "metadata": {},
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## The Problem: Variable Heat Pump Efficiency\n",
+ "\n",
+ "A heat pump's COP (Coefficient of Performance) depends on the temperature difference between source and sink:\n",
+ "\n",
+ "- **Mild weather** (10°C outside): COP ≈ 4.5 (1 kWh electricity → 4.5 kWh heat)\n",
+ "- **Cold weather** (-5°C outside): COP ≈ 2.5 (1 kWh electricity → 2.5 kWh heat)\n",
+ "\n",
+ "This time-varying relationship can be modeled directly using arrays in the conversion factors.\n",
+ "\n",
+ "### When to Use This Approach\n",
+ "\n",
+ "Use time-varying conversion factors when:\n",
+ "- Efficiency depends on **external conditions** (temperature, solar irradiance, humidity)\n",
+ "- The relationship is **independent of the load level**\n",
+ "- You have **measured or forecast data** for the efficiency profile"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4",
+ "metadata": {},
+ "source": [
+ "## Define Time Series Data"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "5",
+ "metadata": {},
+ "source": [
+ "# One winter week\n",
+ "timesteps = pd.date_range('2024-01-22', periods=168, freq='h')\n",
+ "hours = np.arange(168)\n",
+ "hour_of_day = hours % 24\n",
+ "\n",
+ "# Outdoor temperature: daily cycle with cold nights\n",
+ "temp_base = 2 # Average temp in °C\n",
+ "temp_amplitude = 5 # Daily variation\n",
+ "outdoor_temp = temp_base + temp_amplitude * np.sin((hour_of_day - 6) * np.pi / 12)\n",
+ "\n",
+ "# Add day-to-day variation for realism\n",
+ "np.random.seed(789)\n",
+ "daily_offset = np.repeat(np.random.uniform(-3, 3, 7), 24)\n",
+ "outdoor_temp = outdoor_temp + daily_offset"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "id": "6",
+ "metadata": {},
+ "source": [
+ "# Heat demand: inversely related to outdoor temp (higher demand when colder)\n",
+ "heat_demand = 200 - 8 * outdoor_temp\n",
+ "heat_demand = np.clip(heat_demand, 100, 300)"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "# Visualize input profiles\n",
+ "profiles = xr.Dataset(\n",
+ " {\n",
+ " 'Outdoor Temp [°C]': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n",
+ " 'Heat Demand [kW]': xr.DataArray(heat_demand, dims=['time'], coords={'time': timesteps}),\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "df = profiles.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n",
+ "fig = px.line(df, x='time', y='value', facet_col='variable', height=300)\n",
+ "fig.update_yaxes(matches=None, showticklabels=True)\n",
+ "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n",
+ "fig"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8",
+ "metadata": {},
+ "source": [
+ "## Calculate Time-Varying COP\n",
+ "\n",
+ "The COP depends on outdoor temperature. We use a simplified Carnot-based formula:\n",
+ "\n",
+ "$$\\text{COP}_{\\text{real}} \\approx 0.45 \\times \\text{COP}_{\\text{Carnot}} = 0.45 \\times \\frac{T_{\\text{supply}}}{T_{\\text{supply}} - T_{\\text{source}}}$$\n",
+ "\n",
+ "where temperatures are in Kelvin."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "# COP calculation\n",
+ "T_supply = 45 + 273.15 # Supply temperature 45°C in Kelvin\n",
+ "T_source = outdoor_temp + 273.15 # Outdoor temp in Kelvin\n",
+ "\n",
+ "carnot_cop = T_supply / (T_supply - T_source)\n",
+ "real_cop = 0.45 * carnot_cop\n",
+ "real_cop = np.clip(real_cop, 2.0, 5.0) # Physical limits"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "id": "10",
+ "metadata": {},
+ "source": [
+ "# Visualize COP vs temperature relationship\n",
+ "px.scatter(\n",
+ " x=outdoor_temp,\n",
+ " y=real_cop,\n",
+ " title='Heat Pump COP vs Outdoor Temperature',\n",
+ " labels={'x': 'Outdoor Temperature [°C]', 'y': 'COP'},\n",
+ " opacity=0.5,\n",
+ ")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11",
+ "metadata": {},
+ "source": [
+ "## Build the Model\n",
+ "\n",
+ "The key is passing the COP array directly to `conversion_factors`. The equation becomes:\n",
+ "\n",
+ "$$\\text{Elec} \\times \\text{COP}(t) = \\text{Heat} \\times 1$$\n",
+ "\n",
+ "where `COP(t)` varies at each timestep."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "12",
+ "metadata": {},
+ "source": "flow_system = fx.FlowSystem(timesteps)\nflow_system.add_carriers(\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nflow_system.add_elements(\n # Buses\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus('Heat', carrier='heat'),\n # Effect for cost tracking\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # Grid electricity source\n fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n # Heat pump with TIME-VARYING COP\n fx.LinearConverter(\n 'HeatPump',\n inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n conversion_factors=[{'Elec': real_cop, 'Heat': 1}], # <-- Array for time-varying COP\n ),\n # Heat demand\n fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n)\n\nflow_system.optimize(fx.solvers.HighsSolver())",
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {},
+ "source": [
+ "## Analyze Results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "14",
+ "metadata": {},
+ "source": [
+ "flow_system.statistics.plot.balance('Heat')"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "id": "15",
+ "metadata": {},
+ "source": [
+ "flow_system.statistics.plot.balance('Electricity')"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "id": "16",
+ "metadata": {},
+ "source": [
+ "# Compare electricity consumption vs heat output using xarray for alignment\n",
+ "# Create dataset with solution and input data - xarray auto-aligns by time coordinate\n",
+ "comparison = xr.Dataset(\n",
+ " {\n",
+ " 'elec_consumption': flow_system.solution['HeatPump(Elec)|flow_rate'],\n",
+ " 'heat_output': flow_system.solution['HeatPump(Heat)|flow_rate'],\n",
+ " 'outdoor_temp': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "# Calculate effective COP at each timestep\n",
+ "comparison['effective_cop'] = xr.where(\n",
+ " comparison['elec_consumption'] > 0.1, comparison['heat_output'] / comparison['elec_consumption'], np.nan\n",
+ ")\n",
+ "\n",
+ "px.scatter(\n",
+ " x=comparison['outdoor_temp'].values,\n",
+ " y=comparison['effective_cop'].values,\n",
+ " title='Actual Operating COP vs Outdoor Temperature',\n",
+ " labels={'x': 'Outdoor Temperature [°C]', 'y': 'Operating COP'},\n",
+ ")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "17",
+ "metadata": {},
+ "source": [
+ "## Key Concepts\n",
+ "\n",
+ "### Conversion Factor Syntax\n",
+ "\n",
+ "The `conversion_factors` parameter accepts a list of dictionaries where values can be:\n",
+ "- **Scalars**: Constant efficiency (e.g., `{'Fuel': 1, 'Heat': 0.9}`)\n",
+ "- **Arrays**: Time-varying efficiency (e.g., `{'Elec': cop_array, 'Heat': 1}`)\n",
+ "- **TimeSeriesData**: For more complex data with metadata\n",
+ "\n",
+ "```python\n",
+ "fx.LinearConverter(\n",
+ " 'HeatPump',\n",
+ " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n",
+ " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n",
+ " conversion_factors=[{'Elec': cop_array, 'Heat': 1}], # Time-varying\n",
+ ")\n",
+ "```\n",
+ "\n",
+ "### Physical Interpretation\n",
+ "\n",
+ "The conversion equation at each timestep:\n",
+ "$$\\text{Input}_1 \\times \\text{factor}_1(t) + \\text{Input}_2 \\times \\text{factor}_2(t) + ... = 0$$\n",
+ "\n",
+ "For a heat pump: `Elec * COP(t) - Heat * 1 = 0` → `Heat = Elec * COP(t)`\n",
+ "\n",
+ "### Common Use Cases\n",
+ "\n",
+ "| Equipment | Varying Parameter | External Driver |\n",
+ "|-----------|-------------------|------------------|\n",
+ "| Heat pump | COP | Outdoor temperature |\n",
+ "| Solar PV | Capacity factor | Solar irradiance |\n",
+ "| Cooling tower | Efficiency | Wet bulb temperature |\n",
+ "| Gas turbine | Heat rate | Ambient temperature |"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "18",
+ "metadata": {},
+ "source": [
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Model **time-varying efficiency** using arrays in conversion factors\n",
+ "- Calculate **temperature-dependent COP** for heat pumps\n",
+ "- Analyze the **resulting operation** with varying efficiency\n",
+ "\n",
+ "### When to Use This vs Other Approaches\n",
+ "\n",
+ "| Approach | Use When | Example |\n",
+ "|----------|----------|--------|\n",
+ "| **Time-varying factors** (this notebook) | Efficiency varies with external conditions | Heat pump COP vs temperature |\n",
+ "| **PiecewiseConversion** | Efficiency varies with load level | Gas engine efficiency curve |\n",
+ "| **PiecewiseEffects** | Costs vary non-linearly with size | Economies of scale |\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[06b-piecewise-conversion](06b-piecewise-conversion.ipynb)**: Load-dependent efficiency curves\n",
+ "- **[06c-piecewise-effects](06c-piecewise-effects.ipynb)**: Non-linear cost functions"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.10.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/06b-piecewise-conversion.ipynb b/docs/notebooks/06b-piecewise-conversion.ipynb
new file mode 100644
index 000000000..6493a843c
--- /dev/null
+++ b/docs/notebooks/06b-piecewise-conversion.ipynb
@@ -0,0 +1,4371 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": [
+ "# Piecewise Conversion\n",
+ "\n",
+ "Model equipment with **load-dependent efficiency** using piecewise linear approximation.\n",
+ "\n",
+ "**User Story:** A gas engine's efficiency varies with load - lower at part-load, optimal at mid-load. We want to capture this non-linear behavior in our optimization."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "1",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T13:46:20.634505Z",
+ "start_time": "2025-12-13T13:46:16.763911Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "flixopt.config.CONFIG"
+ ]
+ },
+ "execution_count": 1,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2",
+ "metadata": {},
+ "source": [
+ "## The Problem\n",
+ "\n",
+ "Real equipment efficiency varies with operating point:\n",
+ "\n",
+ "| Load Level | Electrical Efficiency | Reason |\n",
+ "|------------|----------------------|--------|\n",
+ "| 25-50% (part load) | 32-38% | Throttling losses |\n",
+ "| 50-75% (mid load) | 38-42% | Near design point |\n",
+ "| 75-100% (full load) | 42-40% | Thermal limits |\n",
+ "\n",
+ "A constant efficiency assumption misses this behavior."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## Define the Efficiency Curve\n",
+ "\n",
+ "Each `Piece` defines corresponding fuel input and electricity output ranges:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "4",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T13:46:20.692018Z",
+ "start_time": "2025-12-13T13:46:20.688366Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "piecewise_efficiency = fx.PiecewiseConversion(\n",
+ " {\n",
+ " 'Fuel': fx.Piecewise(\n",
+ " [\n",
+ " fx.Piece(start=78, end=132), # Part load\n",
+ " fx.Piece(start=132, end=179), # Mid load\n",
+ " fx.Piece(start=179, end=250), # Full load\n",
+ " ]\n",
+ " ),\n",
+ " 'Elec': fx.Piecewise(\n",
+ " [\n",
+ " fx.Piece(start=25, end=50), # 32% -> 38% efficiency\n",
+ " fx.Piece(start=50, end=75), # 38% -> 42% efficiency\n",
+ " fx.Piece(start=75, end=100), # 42% -> 40% efficiency\n",
+ " ]\n",
+ " ),\n",
+ " }\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5",
+ "metadata": {},
+ "source": [
+ "## Build and Solve"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "6",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T13:46:21.272711Z",
+ "start_time": "2025-12-13T13:46:20.704350Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Running HiGHS 1.12.0 (git hash: 755a8e0): Copyright (c) 2025 HiGHS under MIT licence terms\n",
+ "MIP linopy-problem-0haogvbp has 298 rows; 394 cols; 1070 nonzeros; 72 integer variables (72 binary)\n",
+ "Coefficient ranges:\n",
+ " Matrix [5e-02, 2e+02]\n",
+ " Cost [1e+00, 1e+00]\n",
+ " Bound [1e+00, 3e+02]\n",
+ " RHS [1e+00, 1e+00]\n",
+ "Presolving model\n",
+ "168 rows, 240 cols, 672 nonzeros 0s\n",
+ "119 rows, 214 cols, 428 nonzeros 0s\n",
+ "97 rows, 60 cols, 115 nonzeros 0s\n",
+ "6 rows, 10 cols, 20 nonzeros 0s\n",
+ "Presolve reductions: rows 6(-292); columns 10(-384); nonzeros 20(-1050) \n",
+ "\n",
+ "Solving MIP model with:\n",
+ " 6 rows\n",
+ " 10 cols (2 binary, 0 integer, 0 implied int., 8 continuous, 0 domain fixed)\n",
+ " 20 nonzeros\n",
+ "\n",
+ "Src: B => Branching; C => Central rounding; F => Feasibility pump; H => Heuristic;\n",
+ " I => Shifting; J => Feasibility jump; L => Sub-MIP; P => Empty MIP; R => Randomized rounding;\n",
+ " S => Solve LP; T => Evaluate node; U => Unbounded; X => User solution; Y => HiGHS solution;\n",
+ " Z => ZI Round; l => Trivial lower; p => Trivial point; u => Trivial upper; z => Trivial zero\n",
+ "\n",
+ " Nodes | B&B Tree | Objective Bounds | Dynamic Constraints | Work \n",
+ "Src Proc. InQueue | Leaves Expl. | BestBound BestSol Gap | Cuts InLp Confl. | LpIters Time\n",
+ "\n",
+ " J 0 0 0 100.00% -inf 182.9596783 Large 0 0 0 0 0.0s\n",
+ " 1 0 1 100.00% 182.9596783 182.9596783 0.00% 0 0 0 0 0.0s\n",
+ "\n",
+ "Solving report\n",
+ " Model linopy-problem-0haogvbp\n",
+ " Status Optimal\n",
+ " Primal bound 182.959678343\n",
+ " Dual bound 182.959678343\n",
+ " Gap 0% (tolerance: 1%)\n",
+ " P-D integral 0\n",
+ " Solution status feasible\n",
+ " 182.959678343 (objective)\n",
+ " 0 (bound viol.)\n",
+ " 0 (int. viol.)\n",
+ " 0 (row viol.)\n",
+ " Timing 0.01\n",
+ " Max sub-MIP depth 0\n",
+ " Nodes 1\n",
+ " Repair LPs 0\n",
+ " LP iterations 0\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "FlowSystem\n",
+ "==========\n",
+ "Timesteps: 24 (Hour) [2024-01-22 to 2024-01-22]\n",
+ "Periods: None\n",
+ "Scenarios: None\n",
+ "Status: ✓\n",
+ "\n",
+ "Components (3 items)\n",
+ "--------------------\n",
+ " * GasEngine\n",
+ " * GasGrid\n",
+ " * Load\n",
+ "\n",
+ "Buses (2 items)\n",
+ "---------------\n",
+ " * Electricity\n",
+ " * Gas\n",
+ "\n",
+ "Effects (2 items)\n",
+ "-----------------\n",
+ " * costs\n",
+ " * Penalty\n",
+ "\n",
+ "Flows (4 items)\n",
+ "---------------\n",
+ " * GasEngine(Elec)\n",
+ " * GasEngine(Fuel)\n",
+ " * GasGrid(Gas)\n",
+ " * Load(Elec)"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "timesteps = pd.date_range('2024-01-22', periods=24, freq='h')\n",
+ "\n",
+ "# Demand varies through the day (30-90 kW, within piecewise range 25-100)\n",
+ "elec_demand = 60 + 30 * np.sin(np.arange(24) * np.pi / 12)\n",
+ "\n",
+ "fs = fx.FlowSystem(timesteps)\n",
+ "fs.add_elements(\n",
+ " fx.Bus('Gas'),\n",
+ " fx.Bus('Electricity'),\n",
+ " fx.Effect('costs', '€', is_standard=True, is_objective=True),\n",
+ " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour=0.05)]),\n",
+ " fx.LinearConverter(\n",
+ " 'GasEngine',\n",
+ " inputs=[fx.Flow('Fuel', bus='Gas')],\n",
+ " outputs=[fx.Flow('Elec', bus='Electricity')],\n",
+ " piecewise_conversion=piecewise_efficiency,\n",
+ " ),\n",
+ " fx.Sink('Load', inputs=[fx.Flow('Elec', bus='Electricity', size=1, fixed_relative_profile=elec_demand)]),\n",
+ ")\n",
+ "\n",
+ "fs.optimize(fx.solvers.HighsSolver())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "## Visualize the Efficiency Curve"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "8",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T13:46:21.384359Z",
+ "start_time": "2025-12-13T13:46:21.288290Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ " \n",
+ " \n",
+ " "
+ ]
+ },
+ "jetTransient": {
+ "display_id": null
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ]
+ },
+ "jetTransient": {
+ "display_id": null
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fs.components['GasEngine'].piecewise_conversion.plot(x_flow='Fuel')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "## Results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "10",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T13:46:22.068940Z",
+ "start_time": "2025-12-13T13:46:21.920317Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ "PlotResult(data= Size: 600B\n",
+ "Dimensions: (time: 25)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 200B 2024-01-22 ... 2024-01-23\n",
+ "Data variables:\n",
+ " GasEngine(Elec) (time) float64 200B -60.0 -67.76 -75.0 ... -45.0 -52.24 nan\n",
+ " Load(Elec) (time) float64 200B 60.0 67.76 75.0 ... 45.0 52.24 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=GasEngine(Elec)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'GasEngine(Elec)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'GasEngine(Elec)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-22T00:00:00.000000000', '2024-01-22T01:00:00.000000000',\n",
+ " '2024-01-22T02:00:00.000000000', '2024-01-22T03:00:00.000000000',\n",
+ " '2024-01-22T04:00:00.000000000', '2024-01-22T05:00:00.000000000',\n",
+ " '2024-01-22T06:00:00.000000000', '2024-01-22T07:00:00.000000000',\n",
+ " '2024-01-22T08:00:00.000000000', '2024-01-22T09:00:00.000000000',\n",
+ " '2024-01-22T10:00:00.000000000', '2024-01-22T11:00:00.000000000',\n",
+ " '2024-01-22T12:00:00.000000000', '2024-01-22T13:00:00.000000000',\n",
+ " '2024-01-22T14:00:00.000000000', '2024-01-22T15:00:00.000000000',\n",
+ " '2024-01-22T16:00:00.000000000', '2024-01-22T17:00:00.000000000',\n",
+ " '2024-01-22T18:00:00.000000000', '2024-01-22T19:00:00.000000000',\n",
+ " '2024-01-22T20:00:00.000000000', '2024-01-22T21:00:00.000000000',\n",
+ " '2024-01-22T22:00:00.000000000', '2024-01-22T23:00:00.000000000',\n",
+ " '2024-01-23T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAATsDFOq+87vBQwAAAAAAAwF' ... '///39GwHOKoYYiHkrAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Load(Elec)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Load(Elec)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Load(Elec)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-22T00:00:00.000000000', '2024-01-22T01:00:00.000000000',\n",
+ " '2024-01-22T02:00:00.000000000', '2024-01-22T03:00:00.000000000',\n",
+ " '2024-01-22T04:00:00.000000000', '2024-01-22T05:00:00.000000000',\n",
+ " '2024-01-22T06:00:00.000000000', '2024-01-22T07:00:00.000000000',\n",
+ " '2024-01-22T08:00:00.000000000', '2024-01-22T09:00:00.000000000',\n",
+ " '2024-01-22T10:00:00.000000000', '2024-01-22T11:00:00.000000000',\n",
+ " '2024-01-22T12:00:00.000000000', '2024-01-22T13:00:00.000000000',\n",
+ " '2024-01-22T14:00:00.000000000', '2024-01-22T15:00:00.000000000',\n",
+ " '2024-01-22T16:00:00.000000000', '2024-01-22T17:00:00.000000000',\n",
+ " '2024-01-22T18:00:00.000000000', '2024-01-22T19:00:00.000000000',\n",
+ " '2024-01-22T20:00:00.000000000', '2024-01-22T21:00:00.000000000',\n",
+ " '2024-01-22T22:00:00.000000000', '2024-01-22T23:00:00.000000000',\n",
+ " '2024-01-23T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAATkDFOq+87vBQQAAAAAAAwF' ... '///39GQHOKoYYiHkpAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Electricity (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "fs.statistics.plot.balance('Electricity')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "11",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T13:46:22.102836Z",
+ "start_time": "2025-12-13T13:46:22.085158Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Efficiency range: 33.8% - 41.9%\n",
+ "Total cost: 182.96 €\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Verify efficiency varies with load\n",
+ "fuel = fs.solution['GasEngine(Fuel)|flow_rate']\n",
+ "elec = fs.solution['GasEngine(Elec)|flow_rate']\n",
+ "efficiency = elec / fuel\n",
+ "\n",
+ "print(f'Efficiency range: {float(efficiency.min()):.1%} - {float(efficiency.max()):.1%}')\n",
+ "print(f'Total cost: {fs.solution[\"costs\"].item():.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12",
+ "metadata": {},
+ "source": [
+ "## Key Points\n",
+ "\n",
+ "**Syntax:**\n",
+ "```python\n",
+ "fx.PiecewiseConversion({\n",
+ " 'Input': fx.Piecewise([fx.Piece(start=a, end=b), ...]),\n",
+ " 'Output': fx.Piecewise([fx.Piece(start=x, end=y), ...]),\n",
+ "})\n",
+ "```\n",
+ "\n",
+ "**Rules:**\n",
+ "- All flows must have the **same number of segments**\n",
+ "- Segments typically **connect** (end of N = start of N+1)\n",
+ "- Efficiency = output / input at each point\n",
+ "\n",
+ "**Time-varying:** Pass arrays instead of scalars to model changing limits (e.g., temperature derating).\n",
+ "\n",
+ "**Next:** See [06c-piecewise-effects](06c-piecewise-effects.ipynb) for non-linear investment costs."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.12.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/06c-piecewise-effects.ipynb b/docs/notebooks/06c-piecewise-effects.ipynb
new file mode 100644
index 000000000..8f44b9cf2
--- /dev/null
+++ b/docs/notebooks/06c-piecewise-effects.ipynb
@@ -0,0 +1,4544 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": [
+ "# Piecewise Effects\n",
+ "\n",
+ "Model **non-linear investment costs** with economies of scale and discrete size tiers.\n",
+ "\n",
+ "This notebook demonstrates:\n",
+ "- **PiecewiseEffects**: Non-linear cost functions for investments\n",
+ "- **Gaps between pieces**: Representing discrete size tiers (unavailable sizes)\n",
+ "- How the optimizer selects from available size options"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "1",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:05.842524Z",
+ "start_time": "2025-12-13T09:37:01.302972Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "flixopt.config.CONFIG"
+ ]
+ },
+ "execution_count": 1,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2",
+ "metadata": {},
+ "source": [
+ "## The Problem: Discrete Size Tiers\n",
+ "\n",
+ "Real equipment often comes in **discrete sizes** with gaps between options:\n",
+ "\n",
+ "| Tier | Size Range | Cost per kWh | Notes |\n",
+ "|------|------------|--------------|-------|\n",
+ "| Small | 50-100 kWh | 0.20 €/kWh | Residential units |\n",
+ "| *Gap* | 100-200 kWh | *unavailable* | No products in this range |\n",
+ "| Medium | 200-400 kWh | 0.12 €/kWh | Commercial units |\n",
+ "| *Gap* | 400-500 kWh | *unavailable* | No products in this range |\n",
+ "| Large | 500-800 kWh | 0.06 €/kWh | Industrial units |\n",
+ "\n",
+ "The gaps represent size ranges where no products are available from manufacturers."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## Define the Cost Curve with Gaps\n",
+ "\n",
+ "Each piece defines a size tier. Gaps between pieces are **forbidden** zones."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "4",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:05.891430Z",
+ "start_time": "2025-12-13T09:37:05.883541Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Available size tiers:\n",
+ " Small: 50-100 kWh at 0.20 €/kWh\n",
+ " Medium: 200-400 kWh at 0.12 €/kWh\n",
+ " Large: 500-800 kWh at 0.06 €/kWh\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Piecewise costs with gaps between tiers\n",
+ "# Cost values are CUMULATIVE at each breakpoint\n",
+ "piecewise_costs = fx.PiecewiseEffects(\n",
+ " piecewise_origin=fx.Piecewise(\n",
+ " [\n",
+ " fx.Piece(start=50, end=100), # Small tier: 50-100 kWh\n",
+ " fx.Piece(start=200, end=400), # Medium tier: 200-400 kWh (gap: 100-200)\n",
+ " fx.Piece(start=500, end=800), # Large tier: 500-800 kWh (gap: 400-500)\n",
+ " ]\n",
+ " ),\n",
+ " piecewise_shares={\n",
+ " 'costs': fx.Piecewise(\n",
+ " [\n",
+ " fx.Piece(start=10, end=20), # 50kWh=10€, 100kWh=20€ → 0.20 €/kWh\n",
+ " fx.Piece(start=24, end=48), # 200kWh=24€, 400kWh=48€ → 0.12 €/kWh\n",
+ " fx.Piece(start=30, end=48), # 500kWh=30€, 800kWh=48€ → 0.06 €/kWh\n",
+ " ]\n",
+ " )\n",
+ " },\n",
+ ")\n",
+ "\n",
+ "print('Available size tiers:')\n",
+ "print(' Small: 50-100 kWh at 0.20 €/kWh')\n",
+ "print(' Medium: 200-400 kWh at 0.12 €/kWh')\n",
+ "print(' Large: 500-800 kWh at 0.06 €/kWh')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "8",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:05.919885Z",
+ "start_time": "2025-12-13T09:37:05.915254Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "timesteps = pd.date_range('2024-01-01', periods=24, freq='h')\n",
+ "\n",
+ "# Electricity price: cheap at night, expensive during day\n",
+ "elec_price = np.array(\n",
+ " [\n",
+ " 0.05,\n",
+ " 0.05,\n",
+ " 0.05,\n",
+ " 0.05,\n",
+ " 0.05,\n",
+ " 0.05, # 00-06: night (cheap)\n",
+ " 0.15,\n",
+ " 0.20,\n",
+ " 0.25,\n",
+ " 0.25,\n",
+ " 0.20,\n",
+ " 0.15, # 06-12: morning\n",
+ " 0.15,\n",
+ " 0.20,\n",
+ " 0.25,\n",
+ " 0.30,\n",
+ " 0.30,\n",
+ " 0.25, # 12-18: afternoon (expensive)\n",
+ " 0.20,\n",
+ " 0.15,\n",
+ " 0.10,\n",
+ " 0.08,\n",
+ " 0.06,\n",
+ " 0.05, # 18-24: evening\n",
+ " ]\n",
+ ")\n",
+ "\n",
+ "demand = np.full(24, 100) # 100 kW constant demand"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "120b3beb025756ef",
+ "metadata": {},
+ "source": [
+ "## Simple Arbitrage Scenario\n",
+ "\n",
+ "A battery arbitrages between cheap night and expensive day electricity."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "## Build and Solve the Model"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "10",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:07.048599Z",
+ "start_time": "2025-12-13T09:37:05.935256Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Running HiGHS 1.12.0 (git hash: 755a8e0): Copyright (c) 2025 HiGHS under MIT licence terms\n",
+ "MIP linopy-problem-wrb0ote0 has 437 rows; 294 cols; 1098 nonzeros; 54 integer variables (54 binary)\n",
+ "Coefficient ranges:\n",
+ " Matrix [1e-05, 8e+02]\n",
+ " Cost [1e+00, 1e+00]\n",
+ " Bound [1e+00, 8e+02]\n",
+ " RHS [1e+00, 1e+00]\n",
+ "Presolving model\n",
+ "253 rows, 159 cols, 588 nonzeros 0s\n",
+ "152 rows, 107 cols, 504 nonzeros 0s\n",
+ "151 rows, 107 cols, 500 nonzeros 0s\n",
+ "Presolve reductions: rows 151(-286); columns 107(-187); nonzeros 500(-598) \n",
+ "\n",
+ "Solving MIP model with:\n",
+ " 151 rows\n",
+ " 107 cols (52 binary, 0 integer, 0 implied int., 55 continuous, 0 domain fixed)\n",
+ " 500 nonzeros\n",
+ "\n",
+ "Src: B => Branching; C => Central rounding; F => Feasibility pump; H => Heuristic;\n",
+ " I => Shifting; J => Feasibility jump; L => Sub-MIP; P => Empty MIP; R => Randomized rounding;\n",
+ " S => Solve LP; T => Evaluate node; U => Unbounded; X => User solution; Y => HiGHS solution;\n",
+ " Z => ZI Round; l => Trivial lower; p => Trivial point; u => Trivial upper; z => Trivial zero\n",
+ "\n",
+ " Nodes | B&B Tree | Objective Bounds | Dynamic Constraints | Work \n",
+ "Src Proc. InQueue | Leaves Expl. | BestBound BestSol Gap | Cuts InLp Confl. | LpIters Time\n",
+ "\n",
+ " J 0 0 0 0.00% -inf 359 Large 0 0 0 0 0.0s\n",
+ " 0 0 0 0.00% 248.9944598 359 30.64% 0 0 0 62 0.0s\n",
+ " L 0 0 0 0.00% 248.9944598 248.9944598 0.00% 32 11 0 73 0.0s\n",
+ " 1 0 1 100.00% 248.9944598 248.9944598 0.00% 32 11 0 82 0.0s\n",
+ "\n",
+ "Solving report\n",
+ " Model linopy-problem-wrb0ote0\n",
+ " Status Optimal\n",
+ " Primal bound 248.994459834\n",
+ " Dual bound 248.994459834\n",
+ " Gap 0% (tolerance: 1%)\n",
+ " P-D integral 0.00660979209716\n",
+ " Solution status feasible\n",
+ " 248.994459834 (objective)\n",
+ " 0 (bound viol.)\n",
+ " 6.43929354283e-15 (int. viol.)\n",
+ " 0 (row viol.)\n",
+ " Timing 0.03\n",
+ " Max sub-MIP depth 1\n",
+ " Nodes 1\n",
+ " Repair LPs 0\n",
+ " LP iterations 82\n",
+ " 0 (strong br.)\n",
+ " 11 (separation)\n",
+ " 9 (heuristics)\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "FlowSystem\n",
+ "==========\n",
+ "Timesteps: 24 (Hour) [2024-01-01 to 2024-01-01]\n",
+ "Periods: None\n",
+ "Scenarios: None\n",
+ "Status: ✓\n",
+ "\n",
+ "Components (3 items)\n",
+ "--------------------\n",
+ " * Battery\n",
+ " * Demand\n",
+ " * Grid\n",
+ "\n",
+ "Buses (1 item)\n",
+ "--------------\n",
+ " * Elec\n",
+ "\n",
+ "Effects (2 items)\n",
+ "-----------------\n",
+ " * costs\n",
+ " * Penalty\n",
+ "\n",
+ "Flows (4 items)\n",
+ "---------------\n",
+ " * Battery(charge)\n",
+ " * Battery(discharge)\n",
+ " * Demand(Elec)\n",
+ " * Grid(Elec)"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "fs = fx.FlowSystem(timesteps)\n",
+ "\n",
+ "fs.add_elements(\n",
+ " fx.Bus('Elec'),\n",
+ " fx.Effect('costs', '€', is_standard=True, is_objective=True),\n",
+ " # Grid with time-varying price\n",
+ " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Elec', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " # Battery with PIECEWISE investment cost (discrete tiers)\n",
+ " fx.Storage(\n",
+ " 'Battery',\n",
+ " charging=fx.Flow('charge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
+ " discharging=fx.Flow('discharge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
+ " capacity_in_flow_hours=fx.InvestParameters(\n",
+ " piecewise_effects_of_investment=piecewise_costs,\n",
+ " minimum_size=0,\n",
+ " maximum_size=800,\n",
+ " ),\n",
+ " eta_charge=0.95,\n",
+ " eta_discharge=0.95,\n",
+ " initial_charge_state=0,\n",
+ " ),\n",
+ " fx.Sink('Demand', inputs=[fx.Flow('Elec', bus='Elec', size=1, fixed_relative_profile=demand)]),\n",
+ ")\n",
+ "\n",
+ "fs.optimize(fx.solvers.HighsSolver())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "be5dc58de4a3c809",
+ "metadata": {},
+ "source": [
+ "## Visualize the Cost Curve\n",
+ "\n",
+ "The\n",
+ "plot\n",
+ "shows\n",
+ "the\n",
+ "three\n",
+ "discrete\n",
+ "tiers\n",
+ "with gaps between them."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "c734d019ece6c6fe",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:07.301104Z",
+ "start_time": "2025-12-13T09:37:07.136275Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ " \n",
+ " \n",
+ " "
+ ]
+ },
+ "jetTransient": {
+ "display_id": null
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ]
+ },
+ "jetTransient": {
+ "display_id": null
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "piecewise_costs.plot(title='Battery Investment Cost (Discrete Tiers)')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "39b4ec726d6d43c1",
+ "metadata": {},
+ "source": "## Results: Which Tier Was Selected?"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "12",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:08.189381Z",
+ "start_time": "2025-12-13T09:37:08.142348Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Selected tier: Large (500-800 kWh)\n",
+ "Battery size: 800 kWh\n",
+ "Total cost: 249.0 €\n"
+ ]
+ }
+ ],
+ "source": [
+ "battery_size = fs.solution['Battery|size'].item()\n",
+ "total_cost = fs.solution['costs'].item()\n",
+ "\n",
+ "# Determine which tier was selected\n",
+ "if battery_size < 1:\n",
+ " tier = 'None'\n",
+ "elif battery_size <= 100:\n",
+ " tier = 'Small (50-100 kWh)'\n",
+ "elif battery_size <= 400:\n",
+ " tier = 'Medium (200-400 kWh)'\n",
+ "else:\n",
+ " tier = 'Large (500-800 kWh)'\n",
+ "\n",
+ "print(f'Selected tier: {tier}')\n",
+ "print(f'Battery size: {battery_size:.0f} kWh')\n",
+ "print(f'Total cost: {total_cost:.1f} €')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {},
+ "source": [
+ "## Storage Operation"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "14",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T09:37:08.407306Z",
+ "start_time": "2025-12-13T09:37:08.263634Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ "PlotResult(data= Size: 1kB\n",
+ "Dimensions: (time: 25)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 200B 2024-01-01 ... 2024-01-02\n",
+ "Data variables:\n",
+ " Grid(Elec) (time) float64 200B -100.0 -100.0 -100.0 ... -100.0 nan\n",
+ " Battery(discharge) (time) float64 200B -0.0 -7.267e-14 ... 1.243e-13 nan\n",
+ " Battery(charge) (time) float64 200B 0.0 4.425e-14 ... -1.385e-13 nan\n",
+ " Demand(Elec) (time) float64 200B 100.0 100.0 100.0 ... 100.0 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Grid(Elec)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Grid(Elec)',\n",
+ " 'marker': {'color': '#636EFA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Grid(Elec)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAWcD+//////9YwPD//////1' ... '////9YwP///////1jAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Battery(discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Battery(discharge)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Battery(discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIDs2puCeHQ0vfWsvI9KlT' ... 'zLt3xBPcy3fMu3fEE9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Battery(charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Battery(charge)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Battery(charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAADZtTcF8egoPT0r76NS5V' ... 'zLt3xDvcy3fMu3fEO9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Demand(Elec)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Demand(Elec)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Demand(Elec)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAWUAAAAAAAABZQAAAAAAAAF' ... 'AAAABZQAAAAAAAAFlAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Elec (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "fs.statistics.plot.balance('Elec')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "15",
+ "metadata": {},
+ "source": [
+ "## Best Practice: PiecewiseEffects with Gaps\n",
+ "\n",
+ "```python\n",
+ "fx.PiecewiseEffects(\n",
+ " piecewise_origin=fx.Piecewise([\n",
+ " fx.Piece(start=50, end=100), # Tier 1\n",
+ " fx.Piece(start=200, end=400), # Tier 2 (gap: 100-200 forbidden)\n",
+ " ]),\n",
+ " piecewise_shares={\n",
+ " 'costs': fx.Piecewise([\n",
+ " fx.Piece(start=10, end=20), # Cumulative cost at tier 1 boundaries\n",
+ " fx.Piece(start=24, end=48), # Cumulative cost at tier 2 boundaries\n",
+ " ])\n",
+ " },\n",
+ ")\n",
+ "```\n",
+ "\n",
+ "**Key points:**\n",
+ "- Gaps between pieces = forbidden size ranges\n",
+ "- Cost values are **cumulative** at each boundary\n",
+ "- Use when equipment comes in discrete tiers"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "16",
+ "metadata": {},
+ "source": "## Previous: Piecewise Conversion\n\nSee **[06b-piecewise-conversion](06b-piecewise-conversion.ipynb)** for modeling minimum load constraints with `PiecewiseConversion` + `StatusParameters`."
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.12.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/07-scenarios-and-periods.ipynb b/docs/notebooks/07-scenarios-and-periods.ipynb
new file mode 100644
index 000000000..1b3b761d8
--- /dev/null
+++ b/docs/notebooks/07-scenarios-and-periods.ipynb
@@ -0,0 +1,542 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Scenarios\n\nMulti-year planning with uncertain demand scenarios.\n\nThis notebook introduces:\n\n- **Periods**: Multiple planning years with different conditions\n- **Scenarios**: Uncertain futures (mild vs. harsh winter)\n- **Scenario weights**: Probability-weighted optimization\n- **Multi-dimensional data**: Parameters that vary by time, period, and scenario"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## The Planning Problem\n",
+ "\n",
+ "We're designing a heating system with:\n",
+ "\n",
+ "- **3 periods** (years): 2024, 2025, 2026 - gas prices expected to rise\n",
+ "- **2 scenarios**: \"Mild Winter\" (60% probability) and \"Harsh Winter\" (40% probability)\n",
+ "- **Investment decision**: Size of CHP unit (made once, works across all futures)\n",
+ "\n",
+ "The optimizer finds the investment that minimizes **expected cost** across all scenarios."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4",
+ "metadata": {},
+ "source": [
+ "## Define Dimensions"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Time horizon: one representative winter week\n",
+ "timesteps = pd.date_range('2024-01-15', periods=168, freq='h') # 7 days\n",
+ "\n",
+ "# Planning periods (years)\n",
+ "periods = pd.Index([2024, 2025, 2026], name='period')\n",
+ "\n",
+ "# Scenarios with probabilities\n",
+ "scenarios = pd.Index(['Mild Winter', 'Harsh Winter'], name='scenario')\n",
+ "scenario_weights = np.array([0.6, 0.4]) # 60% mild, 40% harsh\n",
+ "\n",
+ "print(f'Time dimension: {len(timesteps)} hours')\n",
+ "print(f'Periods: {list(periods)}')\n",
+ "print(f'Scenarios: {list(scenarios)}')\n",
+ "print(f'Scenario weights: {dict(zip(scenarios, scenario_weights, strict=False))}')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6",
+ "metadata": {},
+ "source": [
+ "## Create Scenario-Dependent Demand Profiles\n",
+ "\n",
+ "Heat demand differs significantly between mild and harsh winters:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "hours = np.arange(168)\n",
+ "hour_of_day = hours % 24\n",
+ "\n",
+ "# Base daily pattern (kW): higher in morning/evening\n",
+ "daily_pattern = np.select(\n",
+ " [\n",
+ " (hour_of_day >= 6) & (hour_of_day < 9), # Morning peak\n",
+ " (hour_of_day >= 9) & (hour_of_day < 17), # Daytime\n",
+ " (hour_of_day >= 17) & (hour_of_day < 22), # Evening peak\n",
+ " ],\n",
+ " [180, 120, 160],\n",
+ " default=100, # Night\n",
+ ").astype(float)\n",
+ "\n",
+ "# Add random variation\n",
+ "np.random.seed(42)\n",
+ "noise = np.random.normal(0, 10, len(timesteps))\n",
+ "\n",
+ "# Mild winter: lower demand\n",
+ "mild_demand = daily_pattern * 0.8 + noise\n",
+ "mild_demand = np.clip(mild_demand, 60, 200)\n",
+ "\n",
+ "# Harsh winter: higher demand\n",
+ "harsh_demand = daily_pattern * 1.3 + noise * 1.5\n",
+ "harsh_demand = np.clip(harsh_demand, 100, 280)\n",
+ "\n",
+ "# Create DataFrame with scenario columns (flixopt uses column names to match scenarios)\n",
+ "heat_demand = pd.DataFrame(\n",
+ " {\n",
+ " 'Mild Winter': mild_demand,\n",
+ " 'Harsh Winter': harsh_demand,\n",
+ " },\n",
+ " index=timesteps,\n",
+ ")\n",
+ "\n",
+ "print(f'Mild winter demand: {mild_demand.min():.0f} - {mild_demand.max():.0f} kW')\n",
+ "print(f'Harsh winter demand: {harsh_demand.min():.0f} - {harsh_demand.max():.0f} kW')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize demand scenarios with plotly\n",
+ "fig = px.line(\n",
+ " heat_demand.iloc[:48],\n",
+ " title='Heat Demand by Scenario (First 2 Days)',\n",
+ " labels={'index': 'Time', 'value': 'kW', 'variable': 'Scenario'},\n",
+ ")\n",
+ "fig.update_traces(mode='lines')\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "## Create Period-Dependent Prices\n",
+ "\n",
+ "Energy prices change across planning years:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Gas prices by period (€/kWh) - expected to rise\n",
+ "gas_prices = np.array([0.06, 0.08, 0.10]) # 2024, 2025, 2026\n",
+ "\n",
+ "# Electricity sell prices by period (€/kWh) - CHP revenue\n",
+ "elec_prices = np.array([0.28, 0.34, 0.43]) # Rising with gas\n",
+ "\n",
+ "print('Gas prices by period:')\n",
+ "for period, price in zip(periods, gas_prices, strict=False):\n",
+ " print(f' {period}: {price:.2f} €/kWh')\n",
+ "\n",
+ "print('\\nElectricity sell prices by period:')\n",
+ "for period, price in zip(periods, elec_prices, strict=False):\n",
+ " print(f' {period}: {price:.2f} €/kWh')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11",
+ "metadata": {},
+ "source": [
+ "## Build the Flow System\n",
+ "\n",
+ "Initialize with all dimensions:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "12",
+ "metadata": {},
+ "outputs": [],
+ "source": "flow_system = fx.FlowSystem(\n timesteps=timesteps,\n periods=periods,\n scenarios=scenarios,\n scenario_weights=scenario_weights,\n)\nflow_system.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\n\nprint(flow_system)"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {},
+ "source": [
+ "## Add Components"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "14",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.add_elements(\n",
+ " # === Buses ===\n",
+ " fx.Bus('Electricity', carrier='electricity'),\n",
+ " fx.Bus('Heat', carrier='heat'),\n",
+ " fx.Bus('Gas', carrier='gas'),\n",
+ " # === Effects ===\n",
+ " fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
+ " # === Gas Supply (price varies by period) ===\n",
+ " fx.Source(\n",
+ " 'GasGrid',\n",
+ " outputs=[\n",
+ " fx.Flow(\n",
+ " 'Gas',\n",
+ " bus='Gas',\n",
+ " size=1000,\n",
+ " effects_per_flow_hour=gas_prices, # Array = varies by period\n",
+ " )\n",
+ " ],\n",
+ " ),\n",
+ " # === CHP Unit (investment decision) ===\n",
+ " fx.linear_converters.CHP(\n",
+ " 'CHP',\n",
+ " electrical_efficiency=0.35,\n",
+ " thermal_efficiency=0.50,\n",
+ " electrical_flow=fx.Flow(\n",
+ " 'P_el',\n",
+ " bus='Electricity',\n",
+ " # Investment optimization: find optimal CHP size\n",
+ " size=fx.InvestParameters(\n",
+ " minimum_size=0,\n",
+ " maximum_size=100,\n",
+ " effects_of_investment_per_size={'costs': 50}, # 50 €/kW annualized\n",
+ " ),\n",
+ " relative_minimum=0.3,\n",
+ " ),\n",
+ " thermal_flow=fx.Flow('Q_th', bus='Heat'),\n",
+ " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " ),\n",
+ " # === Gas Boiler (existing backup) ===\n",
+ " fx.linear_converters.Boiler(\n",
+ " 'Boiler',\n",
+ " thermal_efficiency=0.90,\n",
+ " thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n",
+ " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " ),\n",
+ " # === Electricity Sales (revenue varies by period) ===\n",
+ " fx.Sink(\n",
+ " 'ElecSales',\n",
+ " inputs=[\n",
+ " fx.Flow(\n",
+ " 'P_el',\n",
+ " bus='Electricity',\n",
+ " size=100,\n",
+ " effects_per_flow_hour=-elec_prices, # Negative = revenue\n",
+ " )\n",
+ " ],\n",
+ " ),\n",
+ " # === Heat Demand (varies by scenario) ===\n",
+ " fx.Sink(\n",
+ " 'HeatDemand',\n",
+ " inputs=[\n",
+ " fx.Flow(\n",
+ " 'Q_th',\n",
+ " bus='Heat',\n",
+ " size=1,\n",
+ " fixed_relative_profile=heat_demand, # DataFrame with scenario columns\n",
+ " )\n",
+ " ],\n",
+ " ),\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "15",
+ "metadata": {},
+ "source": [
+ "## Run Optimization"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "16",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0.01));"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "17",
+ "metadata": {},
+ "source": [
+ "## Analyze Results\n",
+ "\n",
+ "### Optimal Investment Decision"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "18",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "chp_size = flow_system.statistics.sizes['CHP(P_el)']\n",
+ "total_cost = flow_system.solution['costs']\n",
+ "\n",
+ "print('=== Investment Decision ===')\n",
+ "print(f'Optimal CHP size: {chp_size.round(1).to_pandas()} kW electrical')\n",
+ "print(f'Thermal capacity: {(chp_size * 0.50 / 0.35).round(1).to_pandas()} kW')\n",
+ "print(f'\\nExpected total cost: {total_cost.round(2).to_pandas()} €')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19",
+ "metadata": {},
+ "source": [
+ "### Heat Balance by Scenario\n",
+ "\n",
+ "See how the system operates differently in each scenario:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "20",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "21",
+ "metadata": {},
+ "source": [
+ "### CHP Operation Patterns"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "22",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.heatmap('CHP(Q_th)')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "23",
+ "metadata": {},
+ "source": [
+ "### Multi-Dimensional Data Access\n",
+ "\n",
+ "Results include all dimensions (time, period, scenario):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "24",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# View dimensions\n",
+ "flow_rates = flow_system.statistics.flow_rates\n",
+ "print('Flow rates dimensions:', dict(flow_rates.sizes))\n",
+ "\n",
+ "# Plot flow rates\n",
+ "flow_system.statistics.plot.flows()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "25",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# CHP operation in harsh winter vs mild winter\n",
+ "chp_heat = flow_rates['CHP(Q_th)']\n",
+ "\n",
+ "print('CHP Heat Output Statistics:')\n",
+ "for scenario in scenarios:\n",
+ " scenario_data = chp_heat.sel(scenario=scenario)\n",
+ " print(f'\\n{scenario}:')\n",
+ " for period in periods:\n",
+ " period_data = scenario_data.sel(period=period)\n",
+ " print(f' {period}: avg={period_data.mean().item():.1f} kW, max={period_data.max().item():.1f} kW')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "26",
+ "metadata": {},
+ "source": [
+ "## Sensitivity: What if Only Mild Winter?\n",
+ "\n",
+ "Compare optimal CHP size if we only planned for mild winters:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "27",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Select only the mild winter scenario\n",
+ "fs_mild = flow_system.transform.sel(scenario='Mild Winter')\n",
+ "fs_mild.optimize(fx.solvers.HighsSolver(mip_gap=0.01))\n",
+ "\n",
+ "chp_size_mild = fs_mild.statistics.sizes['CHP(P_el)']\n",
+ "\n",
+ "print('=== Comparison ===')\n",
+ "print(f'CHP size (both scenarios): {chp_size.max(\"scenario\").round(2).values} kW')\n",
+ "print(f'CHP size (mild only): {chp_size_mild.round(2).values} kW')\n",
+ "print(f'\\nPlanning for uncertainty adds {(chp_size - chp_size_mild).round(2).values} kW capacity')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "28",
+ "metadata": {},
+ "source": [
+ "### Energy Flow Sankey\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows through the system:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "29",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flow_system.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "30",
+ "metadata": {},
+ "source": [
+ "## Key Concepts\n",
+ "\n",
+ "### Multi-Dimensional FlowSystem\n",
+ "\n",
+ "```python\n",
+ "flow_system = fx.FlowSystem(\n",
+ " timesteps=timesteps, # Time dimension\n",
+ " periods=periods, # Planning periods (years)\n",
+ " scenarios=scenarios, # Uncertain futures\n",
+ " scenario_weights=weights, # Probabilities\n",
+ ")\n",
+ "```\n",
+ "\n",
+ "### Dimension-Varying Parameters\n",
+ "\n",
+ "| Data Shape | Meaning |\n",
+ "|------------|----------|\n",
+ "| Scalar | Same for all time/period/scenario |\n",
+ "| Array (n_periods,) | Varies by period |\n",
+ "| Array (n_scenarios,) | Varies by scenario |\n",
+ "| DataFrame with columns | Columns match scenario names |\n",
+ "| Full array (time, period, scenario) | Full specification |\n",
+ "\n",
+ "### Scenario Optimization\n",
+ "\n",
+ "The optimizer minimizes **expected cost**:\n",
+ "$$\\min \\sum_s w_s \\cdot \\text{Cost}_s$$\n",
+ "\n",
+ "where $w_s$ is the scenario weight (probability).\n",
+ "\n",
+ "### Selection Methods\n",
+ "\n",
+ "```python\n",
+ "# Select specific scenario\n",
+ "fs_mild = flow_system.transform.sel(scenario='Mild Winter')\n",
+ "\n",
+ "# Select specific period\n",
+ "fs_2025 = flow_system.transform.sel(period=2025)\n",
+ "\n",
+ "# Select time range\n",
+ "fs_day1 = flow_system.transform.sel(time=slice('2024-01-15', '2024-01-16'))\n",
+ "```\n",
+ "\n",
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Define **multiple periods** for multi-year planning\n",
+ "- Create **scenarios** for uncertain futures\n",
+ "- Use **scenario weights** for probability-weighted optimization\n",
+ "- Pass **dimension-varying parameters** (arrays and DataFrames)\n",
+ "- **Select** specific scenarios or periods for analysis\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[08-large-scale-optimization](08-large-scale-optimization.ipynb)**: Speed up large problems with resampling and clustering"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/08-large-scale-optimization.ipynb b/docs/notebooks/08-large-scale-optimization.ipynb
new file mode 100644
index 000000000..61cae35c5
--- /dev/null
+++ b/docs/notebooks/08-large-scale-optimization.ipynb
@@ -0,0 +1,440 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Large-Scale\n\nSpeed up large problems with resampling and two-stage optimization.\n\nThis notebook introduces:\n\n- **Resampling**: Reduce time resolution (e.g., hourly → 4-hourly)\n- **Clustering**: Identify typical periods (e.g., 8 representative days)\n- **Two-stage optimization**: Size with reduced data, dispatch at full resolution\n- **Speed vs. accuracy trade-offs**: When to use each technique"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import timeit\n",
+ "\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## Create a Realistic Annual Dataset\n",
+ "\n",
+ "We simulate one month of hourly data (720 timesteps) to demonstrate the techniques:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# One month at hourly resolution\n",
+ "timesteps = pd.date_range('2024-01-01', periods=720, freq='h') # 30 days\n",
+ "hours = np.arange(len(timesteps))\n",
+ "hour_of_day = hours % 24\n",
+ "day_of_month = hours // 24\n",
+ "\n",
+ "print(f'Timesteps: {len(timesteps)} hours ({len(timesteps) / 24:.0f} days)')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "np.random.seed(42)\n",
+ "\n",
+ "# Heat demand: daily pattern with weekly variation\n",
+ "daily_pattern = np.select(\n",
+ " [\n",
+ " (hour_of_day >= 6) & (hour_of_day < 9),\n",
+ " (hour_of_day >= 9) & (hour_of_day < 17),\n",
+ " (hour_of_day >= 17) & (hour_of_day < 22),\n",
+ " ],\n",
+ " [200, 150, 180],\n",
+ " default=100,\n",
+ ").astype(float)\n",
+ "\n",
+ "# Add temperature effect (colder mid-month)\n",
+ "temp_effect = 1 + 0.3 * np.sin(day_of_month * np.pi / 30)\n",
+ "heat_demand = daily_pattern * temp_effect + np.random.normal(0, 10, len(timesteps))\n",
+ "heat_demand = np.clip(heat_demand, 80, 300)\n",
+ "\n",
+ "# Electricity price: time-of-use with volatility - high prices make CHP attractive\n",
+ "base_price = np.where((hour_of_day >= 7) & (hour_of_day <= 21), 0.32, 0.18)\n",
+ "elec_price = base_price * (1 + np.random.uniform(-0.15, 0.15, len(timesteps)))\n",
+ "\n",
+ "# Gas price: relatively stable and low\n",
+ "gas_price = 0.05 + np.random.uniform(-0.005, 0.005, len(timesteps))\n",
+ "\n",
+ "print(f'Heat demand: {heat_demand.min():.0f} - {heat_demand.max():.0f} kW')\n",
+ "print(f'Elec price: {elec_price.min():.3f} - {elec_price.max():.3f} €/kWh')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize first week with plotly - using xarray and faceting\n",
+ "profiles = xr.Dataset(\n",
+ " {\n",
+ " 'Heat Demand [kW]': xr.DataArray(heat_demand[:168], dims=['time'], coords={'time': timesteps[:168]}),\n",
+ " 'Electricity Price [€/kWh]': xr.DataArray(elec_price[:168], dims=['time'], coords={'time': timesteps[:168]}),\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "df = profiles.to_dataframe().reset_index().melt(id_vars='time', var_name='variable', value_name='value')\n",
+ "fig = px.line(df, x='time', y='value', facet_col='variable', height=300)\n",
+ "fig.update_yaxes(matches=None, showticklabels=True)\n",
+ "fig.for_each_annotation(lambda a: a.update(text=a.text.split('=')[-1]))\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "## Build the Base FlowSystem\n",
+ "\n",
+ "A typical district heating system with investment decisions:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8",
+ "metadata": {},
+ "outputs": [],
+ "source": "def build_system(timesteps, heat_demand, elec_price, gas_price):\n \"\"\"Build a FlowSystem with investment optimization.\"\"\"\n fs = fx.FlowSystem(timesteps)\n fs.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n )\n fs.add_elements(\n # Buses\n fx.Bus('Electricity', carrier='electricity'),\n fx.Bus(\n 'Heat', carrier='heat', imbalance_penalty_per_flow_hour=1e5\n ), # Allow for imbalance to prevent infeasibilities with fixed sizes\n fx.Bus('Gas', carrier='gas'),\n # Effects\n fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n # Gas Supply\n fx.Source(\n 'GasGrid',\n outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=gas_price)],\n ),\n # CHP with investment optimization\n fx.linear_converters.CHP(\n 'CHP',\n electrical_efficiency=0.38,\n thermal_efficiency=0.47,\n electrical_flow=fx.Flow(\n 'P_el',\n bus='Electricity',\n size=fx.InvestParameters(\n minimum_size=0,\n maximum_size=150,\n effects_of_investment_per_size={'costs': 25},\n ),\n relative_minimum=0.4,\n ),\n thermal_flow=fx.Flow('Q_th', bus='Heat'),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n # Gas Boiler with investment optimization\n fx.linear_converters.Boiler(\n 'Boiler',\n thermal_efficiency=0.92,\n thermal_flow=fx.Flow(\n 'Q_th',\n bus='Heat',\n size=fx.InvestParameters(\n minimum_size=0,\n maximum_size=400,\n effects_of_investment_per_size={'costs': 8},\n ),\n ),\n fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n ),\n # Thermal Storage with investment optimization\n fx.Storage(\n 'Storage',\n capacity_in_flow_hours=fx.InvestParameters(\n minimum_size=0,\n maximum_size=1000,\n effects_of_investment_per_size={'costs': 0.5}, # Cheap storage\n ),\n initial_charge_state=0,\n eta_charge=0.98,\n eta_discharge=0.98,\n relative_loss_per_hour=0.005, # Low losses\n charging=fx.Flow('Charge', bus='Heat', size=150),\n discharging=fx.Flow('Discharge', bus='Heat', size=150),\n ),\n # Electricity Sales\n fx.Sink(\n 'ElecSales',\n inputs=[fx.Flow('P_el', bus='Electricity', size=200, effects_per_flow_hour=-elec_price)],\n ),\n # Heat Demand\n fx.Sink(\n 'HeatDemand',\n inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n ),\n )\n\n return fs\n\n\n# Build the base system\nflow_system = build_system(timesteps, heat_demand, elec_price, gas_price)\nprint(f'Base system: {len(timesteps)} timesteps')"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "## Technique 1: Resampling\n",
+ "\n",
+ "Reduce time resolution to speed up optimization:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "solver = fx.solvers.HighsSolver(mip_gap=0.01)\n",
+ "\n",
+ "# Resample from 1h to 4h resolution\n",
+ "fs_resampled = flow_system.transform.resample('4h')\n",
+ "\n",
+ "print(f'Original: {len(flow_system.timesteps)} timesteps')\n",
+ "print(f'Resampled: {len(fs_resampled.timesteps)} timesteps')\n",
+ "print(f'Reduction: {(1 - len(fs_resampled.timesteps) / len(flow_system.timesteps)) * 100:.0f}%')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "11",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Optimize resampled system\n",
+ "start = timeit.default_timer()\n",
+ "fs_resampled.optimize(solver)\n",
+ "time_resampled = timeit.default_timer() - start\n",
+ "\n",
+ "print(f'\\nResampled optimization: {time_resampled:.2f} seconds')\n",
+ "print(f'Cost: {fs_resampled.solution[\"costs\"].item():.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12",
+ "metadata": {},
+ "source": [
+ "## Technique 2: Two-Stage Optimization\n",
+ "\n",
+ "1. **Stage 1**: Size components with resampled data (fast)\n",
+ "2. **Stage 2**: Fix sizes and optimize dispatch at full resolution"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "13",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Stage 1: Sizing with resampled data\n",
+ "start = timeit.default_timer()\n",
+ "fs_sizing = flow_system.transform.resample('4h')\n",
+ "fs_sizing.optimize(solver)\n",
+ "time_stage1 = timeit.default_timer() - start\n",
+ "\n",
+ "print('=== Stage 1: Sizing ===')\n",
+ "print(f'Time: {time_stage1:.2f} seconds')\n",
+ "print('\\nOptimized sizes:')\n",
+ "for name, size in fs_sizing.statistics.sizes.items():\n",
+ " print(f' {name}: {float(size.item()):.1f}')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "14",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Stage 2: Dispatch at full resolution with fixed sizes\n",
+ "start = timeit.default_timer()\n",
+ "fs_dispatch = flow_system.transform.fix_sizes(fs_sizing.statistics.sizes)\n",
+ "fs_dispatch.optimize(solver)\n",
+ "time_stage2 = timeit.default_timer() - start\n",
+ "\n",
+ "print('=== Stage 2: Dispatch ===')\n",
+ "print(f'Time: {time_stage2:.2f} seconds')\n",
+ "print(f'Cost: {fs_dispatch.solution[\"costs\"].item():.2f} €')\n",
+ "print(f'\\nTotal two-stage time: {time_stage1 + time_stage2:.2f} seconds')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "15",
+ "metadata": {},
+ "source": [
+ "## Technique 3: Full Optimization (Baseline)\n",
+ "\n",
+ "For comparison, solve the full problem:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "16",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "start = timeit.default_timer()\n",
+ "fs_full = flow_system.copy()\n",
+ "fs_full.optimize(solver)\n",
+ "time_full = timeit.default_timer() - start\n",
+ "\n",
+ "print('=== Full Optimization ===')\n",
+ "print(f'Time: {time_full:.2f} seconds')\n",
+ "print(f'Cost: {fs_full.solution[\"costs\"].item():.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "17",
+ "metadata": {},
+ "source": [
+ "## Compare Results"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "18",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Collect results\n",
+ "results = {\n",
+ " 'Full (baseline)': {\n",
+ " 'Time [s]': time_full,\n",
+ " 'Cost [€]': fs_full.solution['costs'].item(),\n",
+ " 'CHP Size [kW]': fs_full.statistics.sizes['CHP(P_el)'].item(),\n",
+ " 'Boiler Size [kW]': fs_full.statistics.sizes['Boiler(Q_th)'].item(),\n",
+ " 'Storage Size [kWh]': fs_full.statistics.sizes['Storage'].item(),\n",
+ " },\n",
+ " 'Resampled (4h)': {\n",
+ " 'Time [s]': time_resampled,\n",
+ " 'Cost [€]': fs_resampled.solution['costs'].item(),\n",
+ " 'CHP Size [kW]': fs_resampled.statistics.sizes['CHP(P_el)'].item(),\n",
+ " 'Boiler Size [kW]': fs_resampled.statistics.sizes['Boiler(Q_th)'].item(),\n",
+ " 'Storage Size [kWh]': fs_resampled.statistics.sizes['Storage'].item(),\n",
+ " },\n",
+ " 'Two-Stage': {\n",
+ " 'Time [s]': time_stage1 + time_stage2,\n",
+ " 'Cost [€]': fs_dispatch.solution['costs'].item(),\n",
+ " 'CHP Size [kW]': fs_dispatch.statistics.sizes['CHP(P_el)'].item(),\n",
+ " 'Boiler Size [kW]': fs_dispatch.statistics.sizes['Boiler(Q_th)'].item(),\n",
+ " 'Storage Size [kWh]': fs_dispatch.statistics.sizes['Storage'].item(),\n",
+ " },\n",
+ "}\n",
+ "\n",
+ "comparison = pd.DataFrame(results).T\n",
+ "\n",
+ "# Add relative metrics\n",
+ "baseline_cost = comparison.loc['Full (baseline)', 'Cost [€]']\n",
+ "baseline_time = comparison.loc['Full (baseline)', 'Time [s]']\n",
+ "comparison['Cost Gap [%]'] = ((comparison['Cost [€]'] - baseline_cost) / baseline_cost * 100).round(2)\n",
+ "comparison['Speedup'] = (baseline_time / comparison['Time [s]']).round(1)\n",
+ "\n",
+ "comparison.round(2)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19",
+ "metadata": {},
+ "source": [
+ "## Visual Comparison: Heat Balance"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "20",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Full optimization heat balance\n",
+ "fs_full.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "21",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Two-stage optimization heat balance\n",
+ "fs_dispatch.statistics.plot.balance('Heat')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "22",
+ "metadata": {},
+ "source": [
+ "### Energy Flow Sankey (Full Optimization)\n",
+ "\n",
+ "A Sankey diagram visualizes the total energy flows:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "23",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "fs_full.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "24",
+ "metadata": {},
+ "source": [
+ "## When to Use Each Technique\n",
+ "\n",
+ "| Technique | Best For | Trade-off |\n",
+ "|-----------|----------|------------|\n",
+ "| **Full optimization** | Final results, small problems | Slowest, most accurate |\n",
+ "| **Resampling** | Quick screening, trend analysis | Fast, loses temporal detail |\n",
+ "| **Two-stage** | Investment decisions, large problems | Good balance of speed and accuracy |\n",
+ "| **Clustering** | Preserves extreme periods | Requires `tsam` package |\n",
+ "\n",
+ "### Resampling Options\n",
+ "\n",
+ "```python\n",
+ "# Different resolutions\n",
+ "fs_2h = flow_system.transform.resample('2h') # 2-hourly\n",
+ "fs_4h = flow_system.transform.resample('4h') # 4-hourly\n",
+ "fs_daily = flow_system.transform.resample('1D') # Daily\n",
+ "\n",
+ "# Different aggregation methods\n",
+ "fs_mean = flow_system.transform.resample('4h', method='mean') # Default\n",
+ "fs_max = flow_system.transform.resample('4h', method='max') # Preserve peaks\n",
+ "```\n",
+ "\n",
+ "### Two-Stage Workflow\n",
+ "\n",
+ "```python\n",
+ "# Stage 1: Sizing\n",
+ "fs_sizing = flow_system.transform.resample('4h')\n",
+ "fs_sizing.optimize(solver)\n",
+ "\n",
+ "# Stage 2: Dispatch\n",
+ "fs_dispatch = flow_system.transform.fix_sizes(fs_sizing.statistics.sizes)\n",
+ "fs_dispatch.optimize(solver)\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "25",
+ "metadata": {},
+ "source": [
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Use **`transform.resample()`** to reduce time resolution\n",
+ "- Apply **two-stage optimization** for large investment problems\n",
+ "- Use **`transform.fix_sizes()`** to lock in investment decisions\n",
+ "- Compare **speed vs. accuracy** trade-offs\n",
+ "\n",
+ "### Key Takeaways\n",
+ "\n",
+ "1. **Start fast**: Use resampling for initial exploration\n",
+ "2. **Iterate**: Refine with two-stage optimization\n",
+ "3. **Validate**: Run full optimization for final results\n",
+ "4. **Monitor**: Check cost gaps to ensure acceptable accuracy\n",
+ "\n",
+ "### Further Reading\n",
+ "\n",
+ "- For clustering with typical periods, see `transform.cluster()` (requires `tsam` package)\n",
+ "- For time selection, see `transform.sel()` and `transform.isel()`"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/09-plotting-and-data-access.ipynb b/docs/notebooks/09-plotting-and-data-access.ipynb
new file mode 100644
index 000000000..091d5f1d4
--- /dev/null
+++ b/docs/notebooks/09-plotting-and-data-access.ipynb
@@ -0,0 +1,7775 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": "# Plotting\n\nAccess optimization results and create visualizations.\n\nThis notebook covers:\n\n- Loading saved FlowSystems from NetCDF files\n- Accessing data (flow rates, sizes, effects, charge states)\n- Time series plots (balance, flows, storage)\n- Aggregated plots (sizes, effects, duration curves)\n- Heatmaps with time reshaping\n- Sankey diagrams\n- Topology visualization\n- Color customization and export"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "2",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:06.543191Z",
+ "start_time": "2025-12-13T14:13:00.434024Z"
+ }
+ },
+ "source": [
+ "from pathlib import Path\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "flixopt.config.CONFIG"
+ ]
+ },
+ "execution_count": 1,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 1
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## Generate Example Data\n",
+ "\n",
+ "First, run the script that generates three example FlowSystems with solutions:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "4",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:14.318678Z",
+ "start_time": "2025-12-13T14:13:06.637107Z"
+ }
+ },
+ "source": [
+ "# Run the generation script (only needed once, or to regenerate)\n",
+ "!python data/generate_example_systems.py"
+ ],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Creating simple_system...\r\n",
+ " Optimizing...\r\n",
+ " Saving to /Users/felix/PycharmProjects/flixopt_719231/docs/notebooks/data/simple_system.nc4...\r\n",
+ " Done. Objective: 558.83\r\n",
+ "\r\n",
+ "Creating complex_system...\r\n",
+ " Optimizing...\r\n",
+ "HighsMipSolverData::transformNewIntegerFeasibleSolution tmpSolver.run();\r\n",
+ "HighsMipSolverData::transformNewIntegerFeasibleSolution tmpSolver.run();\r\n",
+ " Saving to /Users/felix/PycharmProjects/flixopt_719231/docs/notebooks/data/complex_system.nc4...\r\n",
+ " Done. Objective: 302.36\r\n",
+ "\r\n",
+ "Creating multiperiod_system...\r\n",
+ " Optimizing...\r\n",
+ " Saving to /Users/felix/PycharmProjects/flixopt_719231/docs/notebooks/data/multiperiod_system.nc4...\r\n",
+ " Done. Objective: 19472.48\r\n",
+ "\r\n",
+ "All systems generated successfully!\r\n"
+ ]
+ }
+ ],
+ "execution_count": 2
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5",
+ "metadata": {},
+ "source": [
+ "## 1. Loading Saved FlowSystems\n",
+ "\n",
+ "FlowSystems can be saved to and loaded from NetCDF files, preserving the full structure and solution:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "6",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:14.940793Z",
+ "start_time": "2025-12-13T14:13:14.377412Z"
+ }
+ },
+ "source": [
+ "DATA_DIR = Path('data')\n",
+ "\n",
+ "# Load the three example systems\n",
+ "simple = fx.FlowSystem.from_netcdf(DATA_DIR / 'simple_system.nc4')\n",
+ "complex_sys = fx.FlowSystem.from_netcdf(DATA_DIR / 'complex_system.nc4')\n",
+ "multiperiod = fx.FlowSystem.from_netcdf(DATA_DIR / 'multiperiod_system.nc4')\n",
+ "\n",
+ "print('Loaded systems:')\n",
+ "print(f' simple: {len(simple.components)} components, {len(simple.buses)} buses')\n",
+ "print(f' complex_sys: {len(complex_sys.components)} components, {len(complex_sys.buses)} buses')\n",
+ "print(f' multiperiod: {len(multiperiod.components)} components, dims={dict(multiperiod.solution.sizes)}')"
+ ],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Loaded systems:\n",
+ " simple: 4 components, 2 buses\n",
+ " complex_sys: 9 components, 3 buses\n",
+ " multiperiod: 4 components, dims={'scenario': 2, 'period': 3, 'time': 49}\n"
+ ]
+ }
+ ],
+ "execution_count": 3
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": "## 2. Quick Overview: Balance Plot\n\nLet's start with the most common visualization - a balance plot showing energy flows:"
+ },
+ {
+ "cell_type": "code",
+ "id": "8",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:15.234587Z",
+ "start_time": "2025-12-13T14:13:14.950674Z"
+ }
+ },
+ "source": [
+ "# Balance plot for the Heat bus - shows all inflows and outflows\n",
+ "simple.statistics.plot.balance('Heat')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 7kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n",
+ " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n",
+ " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Office(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Office(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 4
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-12T12:06:35.534937Z",
+ "start_time": "2025-12-12T12:06:35.496736Z"
+ }
+ },
+ "source": "### Accessing Plot Data\n\nEvery plot returns a `PlotResult` with both the figure and underlying data. Use `.data.to_dataframe()` to get a pandas DataFrame:"
+ },
+ {
+ "cell_type": "code",
+ "id": "10",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:15.732085Z",
+ "start_time": "2025-12-13T14:13:15.577916Z"
+ }
+ },
+ "source": [
+ "# Get plot result and access the underlying data\n",
+ "result = simple.statistics.plot.balance('Heat', show=False)\n",
+ "\n",
+ "# Convert to DataFrame for easy viewing/export\n",
+ "df = result.data.to_dataframe()\n",
+ "df.head(10)"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ " Boiler(Heat) ThermalStorage(Discharge) \\\n",
+ "time \n",
+ "2024-01-15 00:00:00 -32.483571 -0.000000e+00 \n",
+ "2024-01-15 01:00:00 -29.308678 5.275242e-13 \n",
+ "2024-01-15 02:00:00 -33.238443 -7.086767e-13 \n",
+ "2024-01-15 03:00:00 -101.411593 -3.516828e-13 \n",
+ "2024-01-15 04:00:00 -128.829233 -5.613288e-13 \n",
+ "2024-01-15 05:00:00 -128.829315 -7.033655e-13 \n",
+ "2024-01-15 06:00:00 -0.000000 -3.789606e+01 \n",
+ "2024-01-15 07:00:00 -0.000000 -8.383717e+01 \n",
+ "2024-01-15 08:00:00 -0.000000 -7.765263e+01 \n",
+ "2024-01-15 09:00:00 -0.000000 -8.271280e+01 \n",
+ "\n",
+ " ThermalStorage(Charge) Office(Heat) \n",
+ "time \n",
+ "2024-01-15 00:00:00 0.000000e+00 32.483571 \n",
+ "2024-01-15 01:00:00 -3.747575e-13 29.308678 \n",
+ "2024-01-15 02:00:00 8.792069e-13 33.238443 \n",
+ "2024-01-15 03:00:00 6.379644e+01 37.615149 \n",
+ "2024-01-15 04:00:00 1.000000e+02 28.829233 \n",
+ "2024-01-15 05:00:00 1.000000e+02 28.829315 \n",
+ "2024-01-15 06:00:00 1.055048e-12 37.896064 \n",
+ "2024-01-15 07:00:00 7.033655e-13 83.837174 \n",
+ "2024-01-15 08:00:00 -7.673862e-13 77.652628 \n",
+ "2024-01-15 09:00:00 7.033655e-13 82.712800 "
+ ],
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " Boiler(Heat) | \n",
+ " ThermalStorage(Discharge) | \n",
+ " ThermalStorage(Charge) | \n",
+ " Office(Heat) | \n",
+ "
\n",
+ " \n",
+ " | time | \n",
+ " | \n",
+ " | \n",
+ " | \n",
+ " | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 2024-01-15 00:00:00 | \n",
+ " -32.483571 | \n",
+ " -0.000000e+00 | \n",
+ " 0.000000e+00 | \n",
+ " 32.483571 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 01:00:00 | \n",
+ " -29.308678 | \n",
+ " 5.275242e-13 | \n",
+ " -3.747575e-13 | \n",
+ " 29.308678 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 02:00:00 | \n",
+ " -33.238443 | \n",
+ " -7.086767e-13 | \n",
+ " 8.792069e-13 | \n",
+ " 33.238443 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 03:00:00 | \n",
+ " -101.411593 | \n",
+ " -3.516828e-13 | \n",
+ " 6.379644e+01 | \n",
+ " 37.615149 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 04:00:00 | \n",
+ " -128.829233 | \n",
+ " -5.613288e-13 | \n",
+ " 1.000000e+02 | \n",
+ " 28.829233 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 05:00:00 | \n",
+ " -128.829315 | \n",
+ " -7.033655e-13 | \n",
+ " 1.000000e+02 | \n",
+ " 28.829315 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 06:00:00 | \n",
+ " -0.000000 | \n",
+ " -3.789606e+01 | \n",
+ " 1.055048e-12 | \n",
+ " 37.896064 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 07:00:00 | \n",
+ " -0.000000 | \n",
+ " -8.383717e+01 | \n",
+ " 7.033655e-13 | \n",
+ " 83.837174 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 08:00:00 | \n",
+ " -0.000000 | \n",
+ " -7.765263e+01 | \n",
+ " -7.673862e-13 | \n",
+ " 77.652628 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 09:00:00 | \n",
+ " -0.000000 | \n",
+ " -8.271280e+01 | \n",
+ " 7.033655e-13 | \n",
+ " 82.712800 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 5
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-12T12:06:35.617665Z",
+ "start_time": "2025-12-12T12:06:35.585811Z"
+ }
+ },
+ "source": "### Energy Totals\n\nGet total energy by flow using `flow_hours`:"
+ },
+ {
+ "cell_type": "code",
+ "id": "12",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:15.948455Z",
+ "start_time": "2025-12-13T14:13:15.924150Z"
+ }
+ },
+ "source": "import pandas as pd\n\n# Total energy per flow\ntotals = {var: float(simple.statistics.flow_hours[var].sum()) for var in simple.statistics.flow_hours.data_vars}\n\npd.Series(totals, name='Energy [kWh]').to_frame().T",
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ " GasGrid(Gas) Boiler(Gas) Boiler(Heat) ThermalStorage(Charge) \\\n",
+ "Energy [kWh] 8936.665406 8936.665406 8221.732173 3457.182735 \n",
+ "\n",
+ " ThermalStorage(Discharge) Office(Heat) \n",
+ "Energy [kWh] 3242.788948 8007.338386 "
+ ],
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " GasGrid(Gas) | \n",
+ " Boiler(Gas) | \n",
+ " Boiler(Heat) | \n",
+ " ThermalStorage(Charge) | \n",
+ " ThermalStorage(Discharge) | \n",
+ " Office(Heat) | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | Energy [kWh] | \n",
+ " 8936.665406 | \n",
+ " 8936.665406 | \n",
+ " 8221.732173 | \n",
+ " 3457.182735 | \n",
+ " 3242.788948 | \n",
+ " 8007.338386 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 6
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-12T12:06:35.754890Z",
+ "start_time": "2025-12-12T12:06:35.735084Z"
+ }
+ },
+ "source": "## 3. Time Series Plots"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "14",
+ "metadata": {},
+ "source": "### 3.1 Balance Plot\n\nShows inflows (positive) and outflows (negative) for a bus or component:"
+ },
+ {
+ "cell_type": "code",
+ "id": "15",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:16.412850Z",
+ "start_time": "2025-12-13T14:13:16.305115Z"
+ }
+ },
+ "source": [
+ "# Component balance (all flows of a component)\n",
+ "simple.statistics.plot.balance('ThermalStorage')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 4kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " ThermalStorage(Charge) (time) float64 1kB -0.0 3.748e-13 ... -100.0 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB 0.0 -5.275e-13 ... nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAUfPDBB19aPby8nSEx72' ... 'AAAAAAgNj//////1jAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAKPvjgg49ivby8nSEx72' ... 'AAAAAgPWP9SoFav2i9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'ThermalStorage (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 7
+ },
+ {
+ "cell_type": "markdown",
+ "id": "16",
+ "metadata": {},
+ "source": "### 3.2 Carrier Balance\n\nShows all flows of a specific carrier across the entire system:"
+ },
+ {
+ "cell_type": "code",
+ "id": "17",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:16.630015Z",
+ "start_time": "2025-12-13T14:13:16.539450Z"
+ }
+ },
+ "source": [
+ "complex_sys.statistics.plot.carrier_balance('heat')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 4kB\n",
+ "Dimensions: (time: 73)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 584B 2024-06-01 ... 2024-06-04\n",
+ "Data variables:\n",
+ " CHP(Heat) (time) float64 584B 0.0 0.0 0.0 0.0 ... 0.0 0.0 nan\n",
+ " HeatPump(Heat) (time) float64 584B 0.0 0.0 0.0 0.0 ... 0.0 0.0 nan\n",
+ " BackupBoiler(Heat) (time) float64 584B 20.0 26.01 25.43 ... 20.0 nan\n",
+ " HeatStorage(Discharge) (time) float64 584B 0.0 0.0 0.0 ... 0.0 nan\n",
+ " HeatStorage(Charge) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 nan\n",
+ " HeatDemand(Heat) (time) float64 584B -20.0 -26.01 ... -20.0 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=CHP(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'CHP(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'CHP(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=HeatPump(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatPump(Heat)',\n",
+ " 'marker': {'color': '#FFA15A', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatPump(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=BackupBoiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'BackupBoiler(Heat)',\n",
+ " 'marker': {'color': '#19D3F3', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'BackupBoiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAANEBcQRe1SgI6QOU9Gisjbz' ... 'Dnhlw6QAAAAAAAADRAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=HeatStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatStorage(Discharge)',\n",
+ " 'marker': {'color': '#FF6692', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'RO7MQ7PQAAAAAAAAAAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=HeatStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatStorage(Charge)',\n",
+ " 'marker': {'color': '#FF6692', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'RO7MQ+vQAAAAAAAACAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=HeatDemand(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatDemand(Heat)',\n",
+ " 'marker': {'color': '#B6E880', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatDemand(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAANMBcQRe1SgI6wOQ9Gisjbz' ... 'Dnhlw6wAAAAAAAADTAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat Balance (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 8
+ },
+ {
+ "cell_type": "code",
+ "id": "18",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:16.765682Z",
+ "start_time": "2025-12-13T14:13:16.660109Z"
+ }
+ },
+ "source": [
+ "complex_sys.statistics.plot.carrier_balance('electricity')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 4kB\n",
+ "Dimensions: (time: 73)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 584B 2024-06-01 ... 2024-06-04\n",
+ "Data variables:\n",
+ " ElectricityImport(El) (time) float64 584B 23.49 20.59 21.13 ... 17.12 nan\n",
+ " CHP(El) (time) float64 584B 0.0 0.0 0.0 0.0 ... 0.0 0.0 nan\n",
+ " ElectricityExport(El) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 -0.0 nan\n",
+ " HeatPump(El) (time) float64 584B -0.0 -0.0 -0.0 ... -0.0 -0.0 nan\n",
+ " ElDemand(El) (time) float64 584B -23.49 -20.59 ... -17.12 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=ElectricityImport(El)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElectricityImport(El)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElectricityImport(El)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('2HsanZJ8N0B/T9mTNpc0QB5Tg3x1IT' ... 'ANSU0wQAE5VciyHTFAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=CHP(El)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'CHP(El)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'CHP(El)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ElectricityExport(El)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElectricityExport(El)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElectricityExport(El)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'AAAAAAgAAAAAAAAACAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=HeatPump(El)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatPump(El)',\n",
+ " 'marker': {'color': '#FFA15A', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatPump(El)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAAAAAAAAAAgAAAAAAAAA' ... 'AAAAAAgAAAAAAAAACAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ElDemand(El)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElDemand(El)',\n",
+ " 'marker': {'color': '#FF97FF', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElDemand(El)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-06-01T00:00:00.000000000', '2024-06-01T01:00:00.000000000',\n",
+ " '2024-06-01T02:00:00.000000000', '2024-06-01T03:00:00.000000000',\n",
+ " '2024-06-01T04:00:00.000000000', '2024-06-01T05:00:00.000000000',\n",
+ " '2024-06-01T06:00:00.000000000', '2024-06-01T07:00:00.000000000',\n",
+ " '2024-06-01T08:00:00.000000000', '2024-06-01T09:00:00.000000000',\n",
+ " '2024-06-01T10:00:00.000000000', '2024-06-01T11:00:00.000000000',\n",
+ " '2024-06-01T12:00:00.000000000', '2024-06-01T13:00:00.000000000',\n",
+ " '2024-06-01T14:00:00.000000000', '2024-06-01T15:00:00.000000000',\n",
+ " '2024-06-01T16:00:00.000000000', '2024-06-01T17:00:00.000000000',\n",
+ " '2024-06-01T18:00:00.000000000', '2024-06-01T19:00:00.000000000',\n",
+ " '2024-06-01T20:00:00.000000000', '2024-06-01T21:00:00.000000000',\n",
+ " '2024-06-01T22:00:00.000000000', '2024-06-01T23:00:00.000000000',\n",
+ " '2024-06-02T00:00:00.000000000', '2024-06-02T01:00:00.000000000',\n",
+ " '2024-06-02T02:00:00.000000000', '2024-06-02T03:00:00.000000000',\n",
+ " '2024-06-02T04:00:00.000000000', '2024-06-02T05:00:00.000000000',\n",
+ " '2024-06-02T06:00:00.000000000', '2024-06-02T07:00:00.000000000',\n",
+ " '2024-06-02T08:00:00.000000000', '2024-06-02T09:00:00.000000000',\n",
+ " '2024-06-02T10:00:00.000000000', '2024-06-02T11:00:00.000000000',\n",
+ " '2024-06-02T12:00:00.000000000', '2024-06-02T13:00:00.000000000',\n",
+ " '2024-06-02T14:00:00.000000000', '2024-06-02T15:00:00.000000000',\n",
+ " '2024-06-02T16:00:00.000000000', '2024-06-02T17:00:00.000000000',\n",
+ " '2024-06-02T18:00:00.000000000', '2024-06-02T19:00:00.000000000',\n",
+ " '2024-06-02T20:00:00.000000000', '2024-06-02T21:00:00.000000000',\n",
+ " '2024-06-02T22:00:00.000000000', '2024-06-02T23:00:00.000000000',\n",
+ " '2024-06-03T00:00:00.000000000', '2024-06-03T01:00:00.000000000',\n",
+ " '2024-06-03T02:00:00.000000000', '2024-06-03T03:00:00.000000000',\n",
+ " '2024-06-03T04:00:00.000000000', '2024-06-03T05:00:00.000000000',\n",
+ " '2024-06-03T06:00:00.000000000', '2024-06-03T07:00:00.000000000',\n",
+ " '2024-06-03T08:00:00.000000000', '2024-06-03T09:00:00.000000000',\n",
+ " '2024-06-03T10:00:00.000000000', '2024-06-03T11:00:00.000000000',\n",
+ " '2024-06-03T12:00:00.000000000', '2024-06-03T13:00:00.000000000',\n",
+ " '2024-06-03T14:00:00.000000000', '2024-06-03T15:00:00.000000000',\n",
+ " '2024-06-03T16:00:00.000000000', '2024-06-03T17:00:00.000000000',\n",
+ " '2024-06-03T18:00:00.000000000', '2024-06-03T19:00:00.000000000',\n",
+ " '2024-06-03T20:00:00.000000000', '2024-06-03T21:00:00.000000000',\n",
+ " '2024-06-03T22:00:00.000000000', '2024-06-03T23:00:00.000000000',\n",
+ " '2024-06-04T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('2HsanZJ8N8B/T9mTNpc0wB5Tg3x1IT' ... 'ANSU0wwAE5VciyHTHAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Electricity Balance (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 9
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-12T12:06:36.266666Z",
+ "start_time": "2025-12-12T12:06:36.198686Z"
+ }
+ },
+ "source": "### 3.3 Flow Rates\n\nPlot multiple flow rates together:"
+ },
+ {
+ "cell_type": "code",
+ "id": "20",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:16.863735Z",
+ "start_time": "2025-12-13T14:13:16.783096Z"
+ }
+ },
+ "source": [
+ "# All flows\n",
+ "simple.statistics.plot.flows()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 9kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " GasGrid(Gas) (time) float64 1kB 35.31 31.86 ... 135.3 nan\n",
+ " Boiler(Gas) (time) float64 1kB 35.31 31.86 ... 135.3 nan\n",
+ " Boiler(Heat) (time) float64 1kB 32.48 29.31 ... 124.5 nan\n",
+ " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB 0.0 -5.275e-13 ... nan\n",
+ " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=GasGrid(Gas)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'GasGrid(Gas)',\n",
+ " 'line': {'color': '#636EFA', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'GasGrid(Gas)',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scattergl',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('GuEHDXSnQUD261BXdds/QI2yoZ56EE' ... 'SmN701QKxDuYXg6WBAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Boiler(Gas)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Gas)',\n",
+ " 'line': {'color': '#EF553B', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'Boiler(Gas)',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scattergl',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('GuEHDXSnQUD261BXdds/QI2yoZ56EE' ... 'SmN701QKxDuYXg6WBAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'line': {'color': '#00CC96', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scattergl',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QED3U8WNBU89QHjXQkqFnk' ... '////8zQPW5+Ef5Hl9AAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'line': {'color': '#AB63FA', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scattergl',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'line': {'color': '#FFA15A', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scattergl',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAKPvjgg49ivby8nSEx72' ... 'AAAAAgPWP9SoFav2i9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Office(Heat)',\n",
+ " 'line': {'color': '#19D3F3', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'Office(Heat)',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scattergl',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Flows (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 10
+ },
+ {
+ "cell_type": "code",
+ "id": "21",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:16.936035Z",
+ "start_time": "2025-12-13T14:13:16.880022Z"
+ }
+ },
+ "source": [
+ "# Flows filtered by component\n",
+ "simple.statistics.plot.flows(component='Boiler')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 4kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-01-22\n",
+ "Data variables:\n",
+ " Boiler(Gas) (time) float64 1kB 35.31 31.86 36.13 110.2 ... 21.74 135.3 nan\n",
+ " Boiler(Heat) (time) float64 1kB 32.48 29.31 33.24 101.4 ... 20.0 124.5 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Gas)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Gas)',\n",
+ " 'line': {'color': '#636EFA', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'Boiler(Gas)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('GuEHDXSnQUD261BXdds/QI2yoZ56EE' ... 'SmN701QKxDuYXg6WBAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'line': {'color': '#EF553B', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QED3U8WNBU89QHjXQkqFnk' ... '////8zQPW5+Ef5Hl9AAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Flows (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 11
+ },
+ {
+ "cell_type": "markdown",
+ "id": "32",
+ "metadata": {},
+ "source": [
+ "### 3.4 Storage Plot\n",
+ "\n",
+ "Combined view of storage charge state and flows:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "33",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.166751Z",
+ "start_time": "2025-12-13T14:13:16.985913Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.storage('ThermalStorage')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 5kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n",
+ " charge_state (time) float64 1kB 250.0 248.8 ... 102.5 200.0, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#D62728', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'time=%{x}
value=%{y}',\n",
+ " 'legendgroup': '',\n",
+ " 'line': {'color': 'black', 'width': 2},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'charge_state',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'ThermalStorage Operation (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}},\n",
+ " 'yaxis2': {'overlaying': 'y', 'showgrid': False, 'side': 'right', 'title': {'text': 'Charge State'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 12,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 12
+ },
+ {
+ "cell_type": "markdown",
+ "id": "34",
+ "metadata": {},
+ "source": [
+ "### 3.5 Charge States Plot\n",
+ "\n",
+ "Plot charge state time series directly:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "35",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.297322Z",
+ "start_time": "2025-12-13T14:13:17.214857Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.charge_states('ThermalStorage')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 3kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-01-22\n",
+ "Data variables:\n",
+ " ThermalStorage (time) float64 1kB 250.0 248.8 247.5 ... 103.0 102.5 200.0, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=ThermalStorage
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'line': {'color': '#636EFA', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAABAb0AAAAAAABhvQDkzMzMz8G' ... 'LbxcFZQPDkQtTNoFlAAAAAAAAAaUA='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Storage Charge States'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'Charge State'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 13,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 13
+ },
+ {
+ "cell_type": "markdown",
+ "id": "36",
+ "metadata": {},
+ "source": [
+ "## 4. Aggregated Plots"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "37",
+ "metadata": {},
+ "source": [
+ "### 4.1 Sizes Plot\n",
+ "\n",
+ "Bar chart of component/flow sizes:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "38",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:17:12.906249Z",
+ "start_time": "2025-12-13T14:17:12.823893Z"
+ }
+ },
+ "source": "multiperiod.statistics.plot.sizes()",
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 208B\n",
+ "Dimensions: (period: 3, scenario: 2)\n",
+ "Coordinates:\n",
+ " * period (period) int64 24B 2024 2025 2026\n",
+ " * scenario (scenario) scenario=high_demand
period=2024
Size=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['Boiler(Heat)'], dtype=object),\n",
+ " 'xaxis': 'x4',\n",
+ " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y4'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2025
Size=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['Boiler(Heat)'], dtype=object),\n",
+ " 'xaxis': 'x5',\n",
+ " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y5'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2026
Size=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['Boiler(Heat)'], dtype=object),\n",
+ " 'xaxis': 'x6',\n",
+ " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y6'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2024
Size=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['Boiler(Heat)'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2025
Size=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['Boiler(Heat)'], dtype=object),\n",
+ " 'xaxis': 'x2',\n",
+ " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2026
Size=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#30123b', 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['Boiler(Heat)'], dtype=object),\n",
+ " 'xaxis': 'x3',\n",
+ " 'y': {'bdata': 'PvP9oLpQWkA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y3'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2024
Size=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ThermalStorage'], dtype=object),\n",
+ " 'xaxis': 'x4',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y4'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2025
Size=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ThermalStorage'], dtype=object),\n",
+ " 'xaxis': 'x5',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y5'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=high_demand
period=2026
Size=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ThermalStorage'], dtype=object),\n",
+ " 'xaxis': 'x6',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y6'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2024
Size=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ThermalStorage'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2025
Size=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ThermalStorage'], dtype=object),\n",
+ " 'xaxis': 'x2',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'},\n",
+ " {'hovertemplate': 'Flow=%{x}
scenario=low_demand
period=2026
Size=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage',\n",
+ " 'marker': {'color': '#7a0402', 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ThermalStorage'], dtype=object),\n",
+ " 'xaxis': 'x3',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y3'}],\n",
+ " 'layout': {'annotations': [{'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'period=2024',\n",
+ " 'x': 0.15666666666666665,\n",
+ " 'xanchor': 'center',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 1.0,\n",
+ " 'yanchor': 'bottom',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'period=2025',\n",
+ " 'x': 0.49,\n",
+ " 'xanchor': 'center',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 1.0,\n",
+ " 'yanchor': 'bottom',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'period=2026',\n",
+ " 'x': 0.8233333333333333,\n",
+ " 'xanchor': 'center',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 1.0,\n",
+ " 'yanchor': 'bottom',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'scenario=low_demand',\n",
+ " 'textangle': 90,\n",
+ " 'x': 0.98,\n",
+ " 'xanchor': 'left',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 0.2425,\n",
+ " 'yanchor': 'middle',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'scenario=high_demand',\n",
+ " 'textangle': 90,\n",
+ " 'x': 0.98,\n",
+ " 'xanchor': 'left',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 0.7575000000000001,\n",
+ " 'yanchor': 'middle',\n",
+ " 'yref': 'paper'}],\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'Flow'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Investment Sizes'},\n",
+ " 'xaxis': {'anchor': 'y',\n",
+ " 'categoryarray': [Boiler(Heat), ThermalStorage],\n",
+ " 'categoryorder': 'array',\n",
+ " 'domain': [0.0, 0.3133333333333333],\n",
+ " 'title': {'text': 'Flow'}},\n",
+ " 'xaxis2': {'anchor': 'y2',\n",
+ " 'categoryarray': [Boiler(Heat), ThermalStorage],\n",
+ " 'categoryorder': 'array',\n",
+ " 'domain': [0.3333333333333333, 0.6466666666666666],\n",
+ " 'matches': 'x',\n",
+ " 'title': {'text': 'Flow'}},\n",
+ " 'xaxis3': {'anchor': 'y3',\n",
+ " 'categoryarray': [Boiler(Heat), ThermalStorage],\n",
+ " 'categoryorder': 'array',\n",
+ " 'domain': [0.6666666666666666, 0.98],\n",
+ " 'matches': 'x',\n",
+ " 'title': {'text': 'Flow'}},\n",
+ " 'xaxis4': {'anchor': 'y4', 'domain': [0.0, 0.3133333333333333], 'matches': 'x', 'showticklabels': False},\n",
+ " 'xaxis5': {'anchor': 'y5',\n",
+ " 'domain': [0.3333333333333333, 0.6466666666666666],\n",
+ " 'matches': 'x',\n",
+ " 'showticklabels': False},\n",
+ " 'xaxis6': {'anchor': 'y6', 'domain': [0.6666666666666666, 0.98], 'matches': 'x', 'showticklabels': False},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 0.485], 'title': {'text': 'Size'}},\n",
+ " 'yaxis2': {'anchor': 'x2', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n",
+ " 'yaxis3': {'anchor': 'x3', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n",
+ " 'yaxis4': {'anchor': 'x4', 'domain': [0.515, 1.0], 'matches': 'y', 'title': {'text': 'Size'}},\n",
+ " 'yaxis5': {'anchor': 'x5', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False},\n",
+ " 'yaxis6': {'anchor': 'x6', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 46,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 46
+ },
+ {
+ "cell_type": "markdown",
+ "id": "39",
+ "metadata": {},
+ "source": [
+ "### 4.2 Effects Plot\n",
+ "\n",
+ "Bar chart of effect totals by component:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "40",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.440231Z",
+ "start_time": "2025-12-13T14:13:17.355184Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.effects(effect='costs')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 24B\n",
+ "Dimensions: (effect: 1, component: 1)\n",
+ "Coordinates:\n",
+ " * effect (effect) object 8B 'costs'\n",
+ " * component (component) object 8B 'GasGrid'\n",
+ "Data variables:\n",
+ " total (effect, component) float64 8B 558.8, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'GasGrid',\n",
+ " 'marker': {'color': '#a4fc3b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'GasGrid',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['GasGrid'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'sDkY5qR2gUA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'costs (total) by component'},\n",
+ " 'xaxis': {'anchor': 'y',\n",
+ " 'categoryarray': [GasGrid],\n",
+ " 'categoryorder': 'array',\n",
+ " 'domain': [0.0, 1.0],\n",
+ " 'title': {'text': 'component'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 15
+ },
+ {
+ "cell_type": "code",
+ "id": "41",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.547032Z",
+ "start_time": "2025-12-13T14:13:17.454197Z"
+ }
+ },
+ "source": [
+ "# Multi-effect system: compare costs and CO2\n",
+ "complex_sys.statistics.plot.effects(effect='costs')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 104B\n",
+ "Dimensions: (effect: 1, component: 6)\n",
+ "Coordinates:\n",
+ " * effect (effect) object 8B 'costs'\n",
+ " * component (component) object 48B 'CHP' ... 'HeatStorage'\n",
+ "Data variables:\n",
+ " total (effect, component) float64 48B 76.0 -297.4 102.9 420.8 0.0 0.0, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'CHP',\n",
+ " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'CHP',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['CHP'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAU0A=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElectricityExport',\n",
+ " 'marker': {'color': '#3c99f9', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElectricityExport',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ElectricityExport'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'QuE7D7GWcsA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElectricityImport',\n",
+ " 'marker': {'color': '#49f683', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElectricityImport',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ElectricityImport'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'mB7bhVm8WUA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'GasGrid',\n",
+ " 'marker': {'color': '#dfda36', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'GasGrid',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['GasGrid'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'VVvjiWRNekA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatPump',\n",
+ " 'marker': {'color': '#ee5a12', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatPump',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['HeatPump'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatStorage',\n",
+ " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['HeatStorage'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'costs (total) by component'},\n",
+ " 'xaxis': {'anchor': 'y',\n",
+ " 'categoryarray': [CHP, ElectricityExport,\n",
+ " ElectricityImport, GasGrid, HeatPump,\n",
+ " HeatStorage],\n",
+ " 'categoryorder': 'array',\n",
+ " 'domain': [0.0, 1.0],\n",
+ " 'title': {'text': 'component'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 16,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 16
+ },
+ {
+ "cell_type": "code",
+ "id": "42",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.616154Z",
+ "start_time": "2025-12-13T14:13:17.558702Z"
+ }
+ },
+ "source": [
+ "complex_sys.statistics.plot.effects(effect='CO2')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 104B\n",
+ "Dimensions: (effect: 1, component: 6)\n",
+ "Coordinates:\n",
+ " * effect (effect) object 8B 'CO2'\n",
+ " * component (component) object 48B 'CHP' ... 'HeatStorage'\n",
+ "Data variables:\n",
+ " total (effect, component) float64 48B 0.0 0.0 255.1 1.403e+03 0.0 0.0, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'CHP',\n",
+ " 'marker': {'color': '#30123b', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'CHP',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['CHP'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElectricityExport',\n",
+ " 'marker': {'color': '#3c99f9', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElectricityExport',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ElectricityExport'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ElectricityImport',\n",
+ " 'marker': {'color': '#49f683', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ElectricityImport',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['ElectricityImport'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'PuZR52/jb0A=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'GasGrid',\n",
+ " 'marker': {'color': '#dfda36', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'GasGrid',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['GasGrid'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'HMySHSnrlUA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatPump',\n",
+ " 'marker': {'color': '#ee5a12', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatPump',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['HeatPump'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'component=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatStorage',\n",
+ " 'marker': {'color': '#7a0402', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'HeatStorage',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['HeatStorage'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': 'AAAAAAAAAAA=', 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'component'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'CO2 (total) by component'},\n",
+ " 'xaxis': {'anchor': 'y',\n",
+ " 'categoryarray': [CHP, ElectricityExport,\n",
+ " ElectricityImport, GasGrid, HeatPump,\n",
+ " HeatStorage],\n",
+ " 'categoryorder': 'array',\n",
+ " 'domain': [0.0, 1.0],\n",
+ " 'title': {'text': 'component'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 17,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 17
+ },
+ {
+ "cell_type": "markdown",
+ "id": "43",
+ "metadata": {},
+ "source": [
+ "### 4.3 Duration Curve\n",
+ "\n",
+ "Shows how often each power level is reached:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "44",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.659929Z",
+ "start_time": "2025-12-13T14:13:17.624261Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.duration_curve('Boiler(Heat)')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 3kB\n",
+ "Dimensions: (duration: 169)\n",
+ "Coordinates:\n",
+ " * duration (duration) int64 1kB 0 1 2 3 4 5 6 ... 163 164 165 166 167 168\n",
+ "Data variables:\n",
+ " Boiler(Heat) (duration) float64 1kB nan 137.8 134.1 133.1 ... 0.0 0.0 0.0, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
duration=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'line': {'color': '#636EFA', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': {'bdata': ('AAABAAIAAwAEAAUABgAHAAgACQAKAA' ... '4AnwCgAKEAogCjAKQApQCmAKcAqAA='),\n",
+ " 'dtype': 'i2'},\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('/////////39oQtzNVzphQLt+ZyCBw2' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Duration Curve'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 18,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 18
+ },
+ {
+ "cell_type": "code",
+ "id": "45",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.711351Z",
+ "start_time": "2025-12-13T14:13:17.670270Z"
+ }
+ },
+ "source": [
+ "# Multiple variables\n",
+ "complex_sys.statistics.plot.duration_curve(['CHP(Heat)', 'HeatPump(Heat)', 'BackupBoiler(Heat)'])"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 2kB\n",
+ "Dimensions: (duration: 73)\n",
+ "Coordinates:\n",
+ " * duration (duration) int64 584B 0 1 2 3 4 5 ... 67 68 69 70 71 72\n",
+ "Data variables:\n",
+ " CHP(Heat) (duration) float64 584B nan 80.88 80.62 ... 0.0 0.0 0.0\n",
+ " HeatPump(Heat) (duration) float64 584B nan 0.0 0.0 0.0 ... 0.0 0.0 0.0\n",
+ " BackupBoiler(Heat) (duration) float64 584B nan 63.11 ... -8.993e-15, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=CHP(Heat)
duration=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'CHP(Heat)',\n",
+ " 'line': {'color': '#636EFA', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'CHP(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n",
+ " 'dtype': 'i1'},\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('/////////39Gwcq9YjhUQOyIZIeOJ1' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=HeatPump(Heat)
duration=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'HeatPump(Heat)',\n",
+ " 'line': {'color': '#EF553B', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'HeatPump(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n",
+ " 'dtype': 'i1'},\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('/////////38AAAAAAAAAAAAAAAAAAA' ... 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAA='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=BackupBoiler(Heat)
duration=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'BackupBoiler(Heat)',\n",
+ " 'line': {'color': '#00CC96', 'dash': 'solid'},\n",
+ " 'marker': {'symbol': 'circle'},\n",
+ " 'mode': 'lines',\n",
+ " 'name': 'BackupBoiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'type': 'scatter',\n",
+ " 'x': {'bdata': ('AAECAwQFBgcICQoLDA0ODxAREhMUFR' ... 'Q1Njc4OTo7PD0+P0BBQkNERUZHSA=='),\n",
+ " 'dtype': 'i1'},\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('/////////38h4dgzOo5PQDMD0m1cz0' ... 'AAAACwvAAAAAAAALi8AAAAAABABL0='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Duration Curve'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'Timesteps'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 19,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 19
+ },
+ {
+ "cell_type": "markdown",
+ "id": "46",
+ "metadata": {},
+ "source": [
+ "## 5. Heatmaps\n",
+ "\n",
+ "Heatmaps reshape time series into 2D grids (e.g., hour-of-day vs day):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "47",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.799982Z",
+ "start_time": "2025-12-13T14:13:17.729391Z"
+ }
+ },
+ "source": [
+ "# Auto-reshape based on data frequency\n",
+ "simple.statistics.plot.heatmap('Boiler(Heat)')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 2kB\n",
+ "Dimensions: (timeframe: 8, timestep: 24)\n",
+ "Coordinates:\n",
+ " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n",
+ " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n",
+ "Data variables:\n",
+ " value (timestep, timeframe) float64 2kB 32.48 42.84 47.28 ... 124.5 nan, figure=Figure({\n",
+ " 'data': [{'coloraxis': 'coloraxis',\n",
+ " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Boiler(Heat)|flow_rate: %{z}',\n",
+ " 'name': '0',\n",
+ " 'type': 'heatmap',\n",
+ " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n",
+ " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n",
+ " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n",
+ " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n",
+ " dtype=object),\n",
+ " 'yaxis': 'y',\n",
+ " 'z': {'bdata': ('5ZuWpeU9QED8nmEA1mtFQOR8bxYopE' ... '//////M0D1ufhH+R5fQAAAAAAAAPh/'),\n",
+ " 'dtype': 'f8',\n",
+ " 'shape': '24, 8'}}],\n",
+ " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Boiler(Heat)|flow_rate'}},\n",
+ " 'colorscale': [[0.0, '#30123b'],\n",
+ " [0.07142857142857142, '#4145ab'],\n",
+ " [0.14285714285714285, '#4675ed'],\n",
+ " [0.21428571428571427, '#39a2fc'],\n",
+ " [0.2857142857142857, '#1bcfd4'],\n",
+ " [0.35714285714285715, '#24eca6'],\n",
+ " [0.42857142857142855, '#61fc6c'], [0.5,\n",
+ " '#a4fc3b'], [0.5714285714285714,\n",
+ " '#d1e834'], [0.6428571428571429,\n",
+ " '#f3c63a'], [0.7142857142857143,\n",
+ " '#fe9b2d'], [0.7857142857142857,\n",
+ " '#f36315'], [0.8571428571428571,\n",
+ " '#d93806'], [0.9285714285714286,\n",
+ " '#b11901'], [1.0, '#7a0402']]},\n",
+ " 'margin': {'t': 60},\n",
+ " 'template': '...',\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n",
+ " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 20,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 20
+ },
+ {
+ "cell_type": "code",
+ "id": "48",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.849042Z",
+ "start_time": "2025-12-13T14:13:17.808302Z"
+ }
+ },
+ "source": [
+ "# Storage charge state heatmap\n",
+ "simple.statistics.plot.heatmap('ThermalStorage')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 2kB\n",
+ "Dimensions: (timeframe: 8, timestep: 24)\n",
+ "Coordinates:\n",
+ " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n",
+ " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n",
+ "Data variables:\n",
+ " value (timestep, timeframe) float64 2kB 250.0 1.379e-14 ... 102.5 nan, figure=Figure({\n",
+ " 'data': [{'coloraxis': 'coloraxis',\n",
+ " 'hovertemplate': ('timeframe: %{x}
timestep: %' ... 'rge_state: %{z}'),\n",
+ " 'name': '0',\n",
+ " 'type': 'heatmap',\n",
+ " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n",
+ " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n",
+ " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n",
+ " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n",
+ " dtype=object),\n",
+ " 'yaxis': 'y',\n",
+ " 'z': {'bdata': ('AAAAAABAb0DkBdNVug0PPZGJ+Pa5Lj' ... 'AAAAAAAADw5ELUzaBZQAAAAAAAAPh/'),\n",
+ " 'dtype': 'f8',\n",
+ " 'shape': '24, 8'}}],\n",
+ " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'ThermalStorage|charge_state'}},\n",
+ " 'colorscale': [[0.0, '#30123b'],\n",
+ " [0.07142857142857142, '#4145ab'],\n",
+ " [0.14285714285714285, '#4675ed'],\n",
+ " [0.21428571428571427, '#39a2fc'],\n",
+ " [0.2857142857142857, '#1bcfd4'],\n",
+ " [0.35714285714285715, '#24eca6'],\n",
+ " [0.42857142857142855, '#61fc6c'], [0.5,\n",
+ " '#a4fc3b'], [0.5714285714285714,\n",
+ " '#d1e834'], [0.6428571428571429,\n",
+ " '#f3c63a'], [0.7142857142857143,\n",
+ " '#fe9b2d'], [0.7857142857142857,\n",
+ " '#f36315'], [0.8571428571428571,\n",
+ " '#d93806'], [0.9285714285714286,\n",
+ " '#b11901'], [1.0, '#7a0402']]},\n",
+ " 'margin': {'t': 60},\n",
+ " 'template': '...',\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n",
+ " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 21,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 21
+ },
+ {
+ "cell_type": "code",
+ "id": "49",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.900833Z",
+ "start_time": "2025-12-13T14:13:17.858196Z"
+ }
+ },
+ "source": [
+ "# Custom colorscale\n",
+ "simple.statistics.plot.heatmap('Office(Heat)', color_continuous_scale='Blues', title='Heat Demand Pattern')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 2kB\n",
+ "Dimensions: (timeframe: 8, timestep: 24)\n",
+ "Coordinates:\n",
+ " * timeframe (timeframe) object 64B '2024-01-15' '2024-01-16' ... '2024-01-22'\n",
+ " * timestep (timestep) object 192B '00:00' '01:00' ... '22:00' '23:00'\n",
+ "Data variables:\n",
+ " value (timestep, timeframe) float64 2kB 32.48 27.28 31.72 ... 24.48 nan, figure=Figure({\n",
+ " 'data': [{'coloraxis': 'coloraxis',\n",
+ " 'hovertemplate': 'timeframe: %{x}
timestep: %{y}
Office(Heat)|flow_rate: %{z}',\n",
+ " 'name': '0',\n",
+ " 'type': 'heatmap',\n",
+ " 'x': array(['2024-01-15', '2024-01-16', '2024-01-17', '2024-01-18', '2024-01-19',\n",
+ " '2024-01-20', '2024-01-21', '2024-01-22'], dtype=object),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': array(['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00',\n",
+ " '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00',\n",
+ " '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'],\n",
+ " dtype=object),\n",
+ " 'yaxis': 'y',\n",
+ " 'z': {'bdata': ('5ZuWpeU9QEDqSDirMEc7QB8FVNfUtz' ... 'AAAAAANECu5+If5Xs4QAAAAAAAAPh/'),\n",
+ " 'dtype': 'f8',\n",
+ " 'shape': '24, 8'}}],\n",
+ " 'layout': {'coloraxis': {'colorbar': {'title': {'text': 'Office(Heat)|flow_rate'}},\n",
+ " 'colorscale': [[0.0, 'rgb(247,251,255)'], [0.125,\n",
+ " 'rgb(222,235,247)'], [0.25,\n",
+ " 'rgb(198,219,239)'], [0.375,\n",
+ " 'rgb(158,202,225)'], [0.5,\n",
+ " 'rgb(107,174,214)'], [0.625,\n",
+ " 'rgb(66,146,198)'], [0.75,\n",
+ " 'rgb(33,113,181)'], [0.875,\n",
+ " 'rgb(8,81,156)'], [1.0,\n",
+ " 'rgb(8,48,107)']]},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat Demand Pattern'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'timeframe'}},\n",
+ " 'yaxis': {'anchor': 'x', 'autorange': 'reversed', 'domain': [0.0, 1.0], 'title': {'text': 'timestep'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 22,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 22
+ },
+ {
+ "cell_type": "markdown",
+ "id": "50",
+ "metadata": {},
+ "source": [
+ "## 6. Sankey Diagrams\n",
+ "\n",
+ "Sankey diagrams visualize energy flows through the system."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "51",
+ "metadata": {},
+ "source": [
+ "### 6.1 Flow Sankey\n",
+ "\n",
+ "Total energy flows:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "52",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.930662Z",
+ "start_time": "2025-12-13T14:13:17.908846Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.sankey.flows()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 1kB\n",
+ "Dimensions: (link: 6)\n",
+ "Coordinates:\n",
+ " * link (link) int64 48B 0 1 2 3 4 5\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 23,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 23
+ },
+ {
+ "cell_type": "code",
+ "id": "53",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.970954Z",
+ "start_time": "2025-12-13T14:13:17.939809Z"
+ }
+ },
+ "source": [
+ "# Complex system with multiple carriers\n",
+ "complex_sys.statistics.plot.sankey.flows()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 3kB\n",
+ "Dimensions: (link: 10)\n",
+ "Coordinates:\n",
+ " * link (link) int64 80B 0 1 2 3 4 5 6 7 8 9\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 24,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 24
+ },
+ {
+ "cell_type": "markdown",
+ "id": "54",
+ "metadata": {},
+ "source": [
+ "### 6.2 Sizes Sankey\n",
+ "\n",
+ "Capacity/size allocation:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "55",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:17.993818Z",
+ "start_time": "2025-12-13T14:13:17.977340Z"
+ }
+ },
+ "source": [
+ "multiperiod.statistics.plot.sankey.sizes()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 120B\n",
+ "Dimensions: (link: 1)\n",
+ "Coordinates:\n",
+ " * link (link) int64 8B 0\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 25
+ },
+ {
+ "cell_type": "markdown",
+ "id": "56",
+ "metadata": {},
+ "source": [
+ "### 6.3 Peak Flow Sankey\n",
+ "\n",
+ "Maximum flow rates (peak power):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "57",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.029364Z",
+ "start_time": "2025-12-13T14:13:18.001651Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.sankey.peak_flow()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 1kB\n",
+ "Dimensions: (link: 6)\n",
+ "Coordinates:\n",
+ " * link (link) int64 48B 0 1 2 3 4 5\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 26,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 26
+ },
+ {
+ "cell_type": "markdown",
+ "id": "58",
+ "metadata": {},
+ "source": [
+ "### 6.4 Effects Sankey\n",
+ "\n",
+ "Cost/emission allocation:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "59",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.051137Z",
+ "start_time": "2025-12-13T14:13:18.037718Z"
+ }
+ },
+ "source": [
+ "simple.statistics.plot.sankey.effects(select={'effect': 'costs'})"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 184B\n",
+ "Dimensions: (link: 1)\n",
+ "Coordinates:\n",
+ " * link (link) int64 8B 0\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 27,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 27
+ },
+ {
+ "cell_type": "code",
+ "id": "60",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.072870Z",
+ "start_time": "2025-12-13T14:13:18.057665Z"
+ }
+ },
+ "source": [
+ "# CO2 allocation in complex system\n",
+ "complex_sys.statistics.plot.sankey.effects(select={'effect': 'CO2'})"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 488B\n",
+ "Dimensions: (link: 2)\n",
+ "Coordinates:\n",
+ " * link (link) int64 16B 0 1\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 28,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 28
+ },
+ {
+ "cell_type": "markdown",
+ "id": "61",
+ "metadata": {},
+ "source": [
+ "### 6.5 Filtering with `select`\n",
+ "\n",
+ "Filter Sankey to specific buses or carriers:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "62",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.102271Z",
+ "start_time": "2025-12-13T14:13:18.087615Z"
+ }
+ },
+ "source": [
+ "# Only heat flows\n",
+ "complex_sys.statistics.plot.sankey.flows(select={'bus': 'Heat'})"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 576B\n",
+ "Dimensions: (link: 3)\n",
+ "Coordinates:\n",
+ " * link (link) int64 24B 0 1 2\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 29,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 29
+ },
+ {
+ "cell_type": "markdown",
+ "id": "63",
+ "metadata": {},
+ "source": [
+ "## 7. Topology Visualization\n",
+ "\n",
+ "Visualize the system structure (no solution data required)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "64",
+ "metadata": {},
+ "source": [
+ "### 7.1 Topology Plot\n",
+ "\n",
+ "Sankey-style network diagram:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "65",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.129663Z",
+ "start_time": "2025-12-13T14:13:18.109005Z"
+ }
+ },
+ "source": [
+ "simple.topology.plot()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 1kB\n",
+ "Dimensions: (link: 6)\n",
+ "Coordinates:\n",
+ " * link (link) ',\n",
+ " 'label': [Boiler(Gas), Boiler(Heat), GasGrid(Gas),\n",
+ " Office(Heat), ThermalStorage(Charge),\n",
+ " ThermalStorage(Discharge)],\n",
+ " 'source': [5, 4, 0, 1, 1, 2],\n",
+ " 'target': [4, 1, 5, 3, 2, 1],\n",
+ " 'value': [1, 1, 1, 1, 1, 1]},\n",
+ " 'node': {'color': [#636EFA, #D62728, #00CC96, #AB63FA, #EF553B,\n",
+ " #1F77B4],\n",
+ " 'customdata': [Source('GasGrid')
outputs:
*\n",
+ " Flow('GasGrid(Gas)', bus='Gas', size=500.0,\n",
+ " effects_per_flow_hour={'costs': ~0.1}),\n",
+ " Bus('Heat', carrier='heat')
inputs:
\n",
+ " * Flow('Boiler(Heat)', bus='Heat',\n",
+ " size=150.0)
*\n",
+ " Flow('ThermalStorage(Discharge)', bus='Heat',\n",
+ " size=100.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " outputs:
*\n",
+ " Flow('ThermalStorage(Charge)', bus='Heat',\n",
+ " size=100.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " * Flow('Office(Heat)', bus='Heat', size=1.0,\n",
+ " fixed_relative_profile=20.0-92.3),\n",
+ " Storage('ThermalStorage',\n",
+ " capacity_in_flow_hours=500.0,\n",
+ " initial_charge_state=250.0,\n",
+ " minimal_final_charge_state=200.0,\n",
+ " eta_charge=1.0, eta_discharge=1.0,\n",
+ " relative_loss_per_hour=0.0)
inputs:
\n",
+ " * Flow('ThermalStorage(Charge)', bus='Heat',\n",
+ " size=100.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " outputs:
*\n",
+ " Flow('ThermalStorage(Discharge)', bus='Heat',\n",
+ " size=100.0,\n",
+ " status_parameters=StatusParameters()),\n",
+ " Sink('Office')
inputs:
*\n",
+ " Flow('Office(Heat)', bus='Heat', size=1.0,\n",
+ " fixed_relative_profile=20.0-92.3),\n",
+ " Boiler('Boiler', thermal_efficiency=0.9,\n",
+ " fuel_flow=Flow('Boiler(Gas)', bus='Gas'),\n",
+ " thermal_flow=Flow('Boiler(Heat)', bus='Heat',\n",
+ " size=150.0))
inputs:
*\n",
+ " Flow('Boiler(Gas)', bus='Gas')
\n",
+ " outputs:
* Flow('Boiler(Heat)',\n",
+ " bus='Heat', size=150.0), Bus('Gas',\n",
+ " carrier='gas')
inputs:
*\n",
+ " Flow('GasGrid(Gas)', bus='Gas', size=500.0,\n",
+ " effects_per_flow_hour={'costs': ~0.1})
\n",
+ " outputs:
* Flow('Boiler(Gas)',\n",
+ " bus='Gas')],\n",
+ " 'hovertemplate': '%{customdata}',\n",
+ " 'label': [GasGrid, Heat, ThermalStorage, Office, Boiler,\n",
+ " Gas],\n",
+ " 'line': {'color': 'black', 'width': 0.5},\n",
+ " 'pad': 15,\n",
+ " 'thickness': 20},\n",
+ " 'type': 'sankey'}],\n",
+ " 'layout': {'template': '...', 'title': {'text': 'Flow System Topology'}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 30,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 30
+ },
+ {
+ "cell_type": "code",
+ "id": "66",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.157403Z",
+ "start_time": "2025-12-13T14:13:18.136357Z"
+ }
+ },
+ "source": [
+ "complex_sys.topology.plot(title='Complex System Topology')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 3kB\n",
+ "Dimensions: (link: 14)\n",
+ "Coordinates:\n",
+ " * link (link) ',\n",
+ " 'label': [BackupBoiler(Gas), BackupBoiler(Heat), CHP(El),\n",
+ " CHP(Gas), CHP(Heat), ElDemand(El),\n",
+ " ElectricityExport(El), ElectricityImport(El),\n",
+ " GasGrid(Gas), HeatDemand(Heat), HeatPump(El),\n",
+ " HeatPump(Heat), HeatStorage(Charge),\n",
+ " HeatStorage(Discharge)],\n",
+ " 'source': [11, 1, 9, 11, 9, 0, 0, 10, 2, 6, 0, 3, 6, 8],\n",
+ " 'target': [1, 6, 0, 9, 6, 4, 5, 0, 11, 7, 3, 6, 8, 6],\n",
+ " 'value': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]},\n",
+ " 'node': {'color': [#FECB52, #19D3F3, #636EFA, #FFA15A, #FF97FF,\n",
+ " #00CC96, #D62728, #B6E880, #FF6692, #AB63FA,\n",
+ " #EF553B, #1F77B4],\n",
+ " 'customdata': [Bus('Electricity',\n",
+ " carrier='electricity')
inputs:
*\n",
+ " Flow('ElectricityImport(El)',\n",
+ " bus='Electricity', size=100.0,\n",
+ " effects_per_flow_hour={'costs': 0.1-0.2,\n",
+ " 'CO2': 0.3-0.4})
* Flow('CHP(El)',\n",
+ " bus='Electricity', size=80.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " outputs:
*\n",
+ " Flow('ElectricityExport(El)',\n",
+ " bus='Electricity', size=50.0,\n",
+ " effects_per_flow_hour={'costs': -0.2--\n",
+ " 0.1})
* Flow('HeatPump(El)',\n",
+ " bus='Electricity')
*\n",
+ " Flow('ElDemand(El)', bus='Electricity',\n",
+ " size=1.0, fixed_relative_profile=10.0-42.3),\n",
+ " Boiler('BackupBoiler',\n",
+ " thermal_efficiency=0.9,\n",
+ " fuel_flow=Flow('BackupBoiler(Gas)',\n",
+ " bus='Gas'),\n",
+ " thermal_flow=Flow('BackupBoiler(Heat)',\n",
+ " bus='Heat', size=80.0))
inputs:
*\n",
+ " Flow('BackupBoiler(Gas)', bus='Gas')
\n",
+ " outputs:
* Flow('BackupBoiler(Heat)',\n",
+ " bus='Heat', size=80.0), Source('GasGrid')
\n",
+ " outputs:
* Flow('GasGrid(Gas)',\n",
+ " bus='Gas', size=300.0,\n",
+ " effects_per_flow_hour={'costs': 0.1, 'CO2':\n",
+ " 0.2}), HeatPump('HeatPump', cop=3.5,\n",
+ " electrical_flow=Flow('HeatPump(El)',\n",
+ " bus='Electricity'),\n",
+ " thermal_flow=Flow('HeatPump(Heat)',\n",
+ " bus='Heat', size=InvestP...)
inputs:
\n",
+ " * Flow('HeatPump(El)', bus='Electricity')
\n",
+ " outputs:
* Flow('HeatPump(Heat)',\n",
+ " bus='Heat',\n",
+ " size=InvestParameters(minimum_size=0.0,\n",
+ " maximum_size...), Sink('ElDemand')
\n",
+ " inputs:
* Flow('ElDemand(El)',\n",
+ " bus='Electricity', size=1.0,\n",
+ " fixed_relative_profile=10.0-42.3),\n",
+ " Sink('ElectricityExport')
inputs:
\n",
+ " * Flow('ElectricityExport(El)',\n",
+ " bus='Electricity', size=50.0,\n",
+ " effects_per_flow_hour={'costs': -0.2--0.1}),\n",
+ " Bus('Heat', carrier='heat')
inputs:
\n",
+ " * Flow('CHP(Heat)', bus='Heat', size=85.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " * Flow('HeatPump(Heat)', bus='Heat',\n",
+ " size=InvestParameters(minimum_size=0.0,\n",
+ " maximum_size...)
*\n",
+ " Flow('BackupBoiler(Heat)', bus='Heat',\n",
+ " size=80.0)
*\n",
+ " Flow('HeatStorage(Discharge)', bus='Heat',\n",
+ " size=50.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " outputs:
* Flow('HeatStorage(Charge)',\n",
+ " bus='Heat', size=50.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " * Flow('HeatDemand(Heat)', bus='Heat',\n",
+ " size=1.0, fixed_relative_profile=20.0-87.5),\n",
+ " Sink('HeatDemand')
inputs:
*\n",
+ " Flow('HeatDemand(Heat)', bus='Heat',\n",
+ " size=1.0, fixed_relative_profile=20.0-87.5),\n",
+ " Storage('HeatStorage', capacity_in_flow_hours\n",
+ " =InvestParameters(minimum_size=0.0,\n",
+ " maximum_size..., eta_charge=1.0,\n",
+ " eta_discharge=1.0)
inputs:
*\n",
+ " Flow('HeatStorage(Charge)', bus='Heat',\n",
+ " size=50.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " outputs:
*\n",
+ " Flow('HeatStorage(Discharge)', bus='Heat',\n",
+ " size=50.0,\n",
+ " status_parameters=StatusParameters()),\n",
+ " LinearConverter('CHP', status_parameters=Stat\n",
+ " usParameters(effects_per_active_hour={'cost..\n",
+ " ., piecewise_conversion=PiecewiseConversion(p\n",
+ " iecewises={'Gas': Piecewis...)
\n",
+ " inputs:
* Flow('CHP(Gas)', bus='Gas',\n",
+ " size=200.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " outputs:
* Flow('CHP(El)',\n",
+ " bus='Electricity', size=80.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " * Flow('CHP(Heat)', bus='Heat', size=85.0,\n",
+ " status_parameters=StatusParameters()),\n",
+ " Source('ElectricityImport')
outputs:
\n",
+ " * Flow('ElectricityImport(El)',\n",
+ " bus='Electricity', size=100.0,\n",
+ " effects_per_flow_hour={'costs': 0.1-0.2,\n",
+ " 'CO2': 0.3-0.4}), Bus('Gas',\n",
+ " carrier='gas')
inputs:
*\n",
+ " Flow('GasGrid(Gas)', bus='Gas', size=300.0,\n",
+ " effects_per_flow_hour={'costs': 0.1, 'CO2':\n",
+ " 0.2})
outputs:
* Flow('CHP(Gas)',\n",
+ " bus='Gas', size=200.0,\n",
+ " status_parameters=StatusParameters())
\n",
+ " * Flow('BackupBoiler(Gas)', bus='Gas')],\n",
+ " 'hovertemplate': '%{customdata}',\n",
+ " 'label': [Electricity, BackupBoiler, GasGrid, HeatPump,\n",
+ " ElDemand, ElectricityExport, Heat, HeatDemand,\n",
+ " HeatStorage, CHP, ElectricityImport, Gas],\n",
+ " 'line': {'color': 'black', 'width': 0.5},\n",
+ " 'pad': 15,\n",
+ " 'thickness': 20},\n",
+ " 'type': 'sankey'}],\n",
+ " 'layout': {'template': '...', 'title': {'text': 'Complex System Topology'}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 31,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 31
+ },
+ {
+ "cell_type": "markdown",
+ "id": "67",
+ "metadata": {},
+ "source": [
+ "### 7.2 Topology Info\n",
+ "\n",
+ "Get node and edge information programmatically:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "68",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.168871Z",
+ "start_time": "2025-12-13T14:13:18.165083Z"
+ }
+ },
+ "source": [
+ "nodes, edges = simple.topology.infos()\n",
+ "\n",
+ "print('Nodes:')\n",
+ "for label, info in nodes.items():\n",
+ " print(f' {label}: {info[\"class\"]}')\n",
+ "\n",
+ "print('\\nEdges (flows):')\n",
+ "for label, info in edges.items():\n",
+ " print(f' {info[\"start\"]} -> {info[\"end\"]}: {label}')"
+ ],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Nodes:\n",
+ " GasGrid: Component\n",
+ " Boiler: Component\n",
+ " ThermalStorage: Component\n",
+ " Office: Component\n",
+ " Gas: Bus\n",
+ " Heat: Bus\n",
+ "\n",
+ "Edges (flows):\n",
+ " Gas -> Boiler: Boiler(Gas)\n",
+ " Boiler -> Heat: Boiler(Heat)\n",
+ " GasGrid -> Gas: GasGrid(Gas)\n",
+ " Heat -> Office: Office(Heat)\n",
+ " Heat -> ThermalStorage: ThermalStorage(Charge)\n",
+ " ThermalStorage -> Heat: ThermalStorage(Discharge)\n"
+ ]
+ }
+ ],
+ "execution_count": 32
+ },
+ {
+ "cell_type": "markdown",
+ "id": "69",
+ "metadata": {},
+ "source": [
+ "## 8. Multi-Period/Scenario Data\n",
+ "\n",
+ "Working with multi-dimensional results:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "70",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.194588Z",
+ "start_time": "2025-12-13T14:13:18.191374Z"
+ }
+ },
+ "source": [
+ "print('Multiperiod system dimensions:')\n",
+ "print(f' Periods: {list(multiperiod.periods)}')\n",
+ "print(f' Scenarios: {list(multiperiod.scenarios)}')\n",
+ "print(f' Solution dims: {dict(multiperiod.solution.sizes)}')"
+ ],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Multiperiod system dimensions:\n",
+ " Periods: [2024, 2025, 2026]\n",
+ " Scenarios: ['high_demand', 'low_demand']\n",
+ " Solution dims: {'scenario': 2, 'period': 3, 'time': 49}\n"
+ ]
+ }
+ ],
+ "execution_count": 33
+ },
+ {
+ "cell_type": "code",
+ "id": "71",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.325331Z",
+ "start_time": "2025-12-13T14:13:18.199791Z"
+ }
+ },
+ "source": [
+ "# Balance plot with faceting by scenario\n",
+ "multiperiod.statistics.plot.balance('Heat')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 10kB\n",
+ "Dimensions: (time: 49, period: 3, scenario: 2)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 392B 2024-01-01 ... 2024...\n",
+ " * period (period) int64 24B 2024 2025 2026\n",
+ " * scenario (scenario) scena' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x4',\n",
+ " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwF3XQkqFnk' ... 'rxMNlDwFu20eeOpEfAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y4'},\n",
+ " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x5',\n",
+ " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwF3XQkqFnk' ... 'rxMNlDwFu20eeOpEfAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y5'},\n",
+ " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x6',\n",
+ " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwFvXQkqFnk' ... 'rxMNlDwFy20eeOpEfAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y6'},\n",
+ " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('EgPMGubHPsD7i30z/HU4wBwgRYDluD' ... 'Vm3JI8wDayyUAFXDnAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x2',\n",
+ " 'y': {'bdata': ('EgPMGubHPsD7i30z/HU4wBwgRYDluD' ... 'Vm3JI8wDayyUAFXDnAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'},\n",
+ " {'hovertemplate': ('variable=Boiler(Heat)
scena' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x3',\n",
+ " 'y': {'bdata': ('EgPMGubHPsD7i30z/HU4wBwgRYDluD' ... 'Vm3JI8wDayyUAFXDnAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y3'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x4',\n",
+ " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9nPQo++OCDj2' ... 'jgg89hPWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y4'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x5',\n",
+ " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9nPQo++OCDj2' ... 'qBWr9oPWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y5'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x6',\n",
+ " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9oPby8nSExr2' ... 'qBWr9oPQo++OCDz2E9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y6'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIC3nSExb8dkPbedITFvx2' ... 'Exb8dkPbedITFvx2Q9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x2',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIC3nSExb8dkPbedITFvx2' ... 'Exb8dkPbedITFvx2Q9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Discha' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x3',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIC3nSExb8dkPbedITFvx2' ... 'Exb8dkPbedITFvp2U9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y3'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x4',\n",
+ " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9ovQo++OCDT2' ... 'jgg49ivWP9SoFav2m9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y4'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x5',\n",
+ " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9ovQo++OCDT2' ... 'qBWr9pvWP9SoFav2m9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y5'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x6',\n",
+ " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9pvby8nSEx72' ... 'qBWr9pvQo++OCDj2K9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y6'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAC3nSExb6dlvbedITFvp2' ... 'Exb6dlvbedITFvp2W9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x2',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAC3nSExb6dlvbedITFvp2' ... 'Exb6dlvbedITFvp2W9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'},\n",
+ " {'hovertemplate': ('variable=ThermalStorage(Charge' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x3',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAC3nSExb6dlvbedITFvp2' ... 'Exb6dlvbedITFvh2a9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y3'},\n",
+ " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x4',\n",
+ " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y4'},\n",
+ " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x5',\n",
+ " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y5'},\n",
+ " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x6',\n",
+ " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y6'},\n",
+ " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('EgPMGubHPkACjH0z/HU4QCMgRYDluD' ... 'Vm3JI8QD2yyUAFXDlAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x2',\n",
+ " 'y': {'bdata': ('EgPMGubHPkACjH0z/HU4QCMgRYDluD' ... 'Vm3JI8QD2yyUAFXDlAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y2'},\n",
+ " {'hovertemplate': ('variable=Building(Heat)
sce' ... '}
value=%{y}'),\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': False,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x3',\n",
+ " 'y': {'bdata': ('EgPMGubHPkACjH0z/HU4QCMgRYDluD' ... 'Vm3JI8QD2yyUAFXDlAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y3'}],\n",
+ " 'layout': {'annotations': [{'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'period=2024',\n",
+ " 'x': 0.15666666666666665,\n",
+ " 'xanchor': 'center',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 1.0,\n",
+ " 'yanchor': 'bottom',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'period=2025',\n",
+ " 'x': 0.49,\n",
+ " 'xanchor': 'center',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 1.0,\n",
+ " 'yanchor': 'bottom',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'period=2026',\n",
+ " 'x': 0.8233333333333333,\n",
+ " 'xanchor': 'center',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 1.0,\n",
+ " 'yanchor': 'bottom',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'scenario=low_demand',\n",
+ " 'textangle': 90,\n",
+ " 'x': 0.98,\n",
+ " 'xanchor': 'left',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 0.2425,\n",
+ " 'yanchor': 'middle',\n",
+ " 'yref': 'paper'},\n",
+ " {'font': {},\n",
+ " 'showarrow': False,\n",
+ " 'text': 'scenario=high_demand',\n",
+ " 'textangle': 90,\n",
+ " 'x': 0.98,\n",
+ " 'xanchor': 'left',\n",
+ " 'xref': 'paper',\n",
+ " 'y': 0.7575000000000001,\n",
+ " 'yanchor': 'middle',\n",
+ " 'yref': 'paper'}],\n",
+ " 'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 0.3133333333333333], 'title': {'text': 'time'}},\n",
+ " 'xaxis2': {'anchor': 'y2',\n",
+ " 'domain': [0.3333333333333333, 0.6466666666666666],\n",
+ " 'matches': 'x',\n",
+ " 'title': {'text': 'time'}},\n",
+ " 'xaxis3': {'anchor': 'y3', 'domain': [0.6666666666666666, 0.98], 'matches': 'x', 'title': {'text': 'time'}},\n",
+ " 'xaxis4': {'anchor': 'y4', 'domain': [0.0, 0.3133333333333333], 'matches': 'x', 'showticklabels': False},\n",
+ " 'xaxis5': {'anchor': 'y5',\n",
+ " 'domain': [0.3333333333333333, 0.6466666666666666],\n",
+ " 'matches': 'x',\n",
+ " 'showticklabels': False},\n",
+ " 'xaxis6': {'anchor': 'y6', 'domain': [0.6666666666666666, 0.98], 'matches': 'x', 'showticklabels': False},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 0.485], 'title': {'text': 'value'}},\n",
+ " 'yaxis2': {'anchor': 'x2', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n",
+ " 'yaxis3': {'anchor': 'x3', 'domain': [0.0, 0.485], 'matches': 'y', 'showticklabels': False},\n",
+ " 'yaxis4': {'anchor': 'x4', 'domain': [0.515, 1.0], 'matches': 'y', 'title': {'text': 'value'}},\n",
+ " 'yaxis5': {'anchor': 'x5', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False},\n",
+ " 'yaxis6': {'anchor': 'x6', 'domain': [0.515, 1.0], 'matches': 'y', 'showticklabels': False}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 34,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 34
+ },
+ {
+ "cell_type": "code",
+ "id": "72",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.395048Z",
+ "start_time": "2025-12-13T14:13:18.341709Z"
+ }
+ },
+ "source": [
+ "# Filter to specific scenario/period\n",
+ "multiperiod.statistics.plot.balance('Heat', select={'scenario': 'high_demand', 'period': 2024})"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 2kB\n",
+ "Dimensions: (time: 49)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 392B 2024-01-01 ... 2024...\n",
+ "Data variables:\n",
+ " Boiler(Heat) (time) float64 392B -44.48 -41.31 ... -47.29 nan\n",
+ " ThermalStorage(Discharge) (time) float64 392B 1.723e-13 6.749e-13 ... nan\n",
+ " ThermalStorage(Charge) (time) float64 392B -1.794e-13 -7.034e-13 ... nan\n",
+ " Building(Heat) (time) float64 392B 44.48 41.31 ... 47.29 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#EF553B', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5JuWpeU9RsDiqeLGgqdEwF3XQkqFnk' ... 'rxMNlDwFu20eeOpEfAAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('iAK1fqVASD1j/UqBWr9nPQo++OCDj2' ... 'jgg89hPWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#00CC96', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('iAK1fqVASb1j/UqBWr9ovQo++OCDT2' ... 'jgg49ivWP9SoFav2m9AAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Building(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Building(Heat)',\n",
+ " 'marker': {'color': '#AB63FA', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Building(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-01T00:00:00.000000000', '2024-01-01T01:00:00.000000000',\n",
+ " '2024-01-01T02:00:00.000000000', '2024-01-01T03:00:00.000000000',\n",
+ " '2024-01-01T04:00:00.000000000', '2024-01-01T05:00:00.000000000',\n",
+ " '2024-01-01T06:00:00.000000000', '2024-01-01T07:00:00.000000000',\n",
+ " '2024-01-01T08:00:00.000000000', '2024-01-01T09:00:00.000000000',\n",
+ " '2024-01-01T10:00:00.000000000', '2024-01-01T11:00:00.000000000',\n",
+ " '2024-01-01T12:00:00.000000000', '2024-01-01T13:00:00.000000000',\n",
+ " '2024-01-01T14:00:00.000000000', '2024-01-01T15:00:00.000000000',\n",
+ " '2024-01-01T16:00:00.000000000', '2024-01-01T17:00:00.000000000',\n",
+ " '2024-01-01T18:00:00.000000000', '2024-01-01T19:00:00.000000000',\n",
+ " '2024-01-01T20:00:00.000000000', '2024-01-01T21:00:00.000000000',\n",
+ " '2024-01-01T22:00:00.000000000', '2024-01-01T23:00:00.000000000',\n",
+ " '2024-01-02T00:00:00.000000000', '2024-01-02T01:00:00.000000000',\n",
+ " '2024-01-02T02:00:00.000000000', '2024-01-02T03:00:00.000000000',\n",
+ " '2024-01-02T04:00:00.000000000', '2024-01-02T05:00:00.000000000',\n",
+ " '2024-01-02T06:00:00.000000000', '2024-01-02T07:00:00.000000000',\n",
+ " '2024-01-02T08:00:00.000000000', '2024-01-02T09:00:00.000000000',\n",
+ " '2024-01-02T10:00:00.000000000', '2024-01-02T11:00:00.000000000',\n",
+ " '2024-01-02T12:00:00.000000000', '2024-01-02T13:00:00.000000000',\n",
+ " '2024-01-02T14:00:00.000000000', '2024-01-02T15:00:00.000000000',\n",
+ " '2024-01-02T16:00:00.000000000', '2024-01-02T17:00:00.000000000',\n",
+ " '2024-01-02T18:00:00.000000000', '2024-01-02T19:00:00.000000000',\n",
+ " '2024-01-02T20:00:00.000000000', '2024-01-02T21:00:00.000000000',\n",
+ " '2024-01-02T22:00:00.000000000', '2024-01-02T23:00:00.000000000',\n",
+ " '2024-01-03T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9RkDmqeLGgqdEQGDXQkqFnk' ... 'rxMNlDQF+20eeOpEdAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 35,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 35
+ },
+ {
+ "cell_type": "code",
+ "id": "73",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.481894Z",
+ "start_time": "2025-12-13T14:13:18.459661Z"
+ }
+ },
+ "source": [
+ "# Sankey aggregates across all dimensions by default\n",
+ "multiperiod.statistics.plot.sankey.flows()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 592B\n",
+ "Dimensions: (link: 4)\n",
+ "Coordinates:\n",
+ " * link (link) int64 32B 0 1 2 3\n",
+ " source (link) \n",
+ " "
+ ]
+ },
+ "execution_count": 36,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 36
+ },
+ {
+ "cell_type": "markdown",
+ "id": "74",
+ "metadata": {},
+ "source": [
+ "## 9. Color Customization\n",
+ "\n",
+ "Colors can be customized in multiple ways:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "75",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.553613Z",
+ "start_time": "2025-12-13T14:13:18.488703Z"
+ }
+ },
+ "source": [
+ "# Using a colorscale name\n",
+ "simple.statistics.plot.balance('Heat', colors='Set2')"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 7kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n",
+ " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n",
+ " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#66c2a5', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#fc8d62', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#8da0cb', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Office(Heat)',\n",
+ " 'marker': {'color': '#e78ac3', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Office(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 37,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 37
+ },
+ {
+ "cell_type": "code",
+ "id": "76",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.619651Z",
+ "start_time": "2025-12-13T14:13:18.562286Z"
+ }
+ },
+ "source": [
+ "# Using a list of colors\n",
+ "simple.statistics.plot.balance('Heat', colors=['#e41a1c', '#377eb8', '#4daf4a', '#984ea3'])"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 7kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n",
+ " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n",
+ " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': '#e41a1c', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': '#377eb8', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': '#4daf4a', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Office(Heat)',\n",
+ " 'marker': {'color': '#984ea3', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Office(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 38,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 38
+ },
+ {
+ "cell_type": "code",
+ "id": "77",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.672843Z",
+ "start_time": "2025-12-13T14:13:18.628572Z"
+ }
+ },
+ "source": [
+ "# Using a dictionary for specific labels\n",
+ "simple.statistics.plot.balance(\n",
+ " 'Heat',\n",
+ " colors={\n",
+ " 'Boiler(Heat)': 'orangered',\n",
+ " 'ThermalStorage(Charge)': 'steelblue',\n",
+ " 'ThermalStorage(Discharge)': 'lightblue',\n",
+ " 'Office(Heat)': 'forestgreen',\n",
+ " },\n",
+ ")"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "PlotResult(data= Size: 7kB\n",
+ "Dimensions: (time: 169)\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 1kB 2024-01-15 ... 2024-...\n",
+ "Data variables:\n",
+ " Boiler(Heat) (time) float64 1kB -32.48 -29.31 ... -124.5 nan\n",
+ " ThermalStorage(Discharge) (time) float64 1kB -0.0 5.275e-13 ... nan\n",
+ " ThermalStorage(Charge) (time) float64 1kB 0.0 -3.748e-13 ... 100.0 nan\n",
+ " Office(Heat) (time) float64 1kB 32.48 29.31 ... 24.48 nan, figure=Figure({\n",
+ " 'data': [{'hovertemplate': 'variable=Boiler(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Boiler(Heat)',\n",
+ " 'marker': {'color': 'orangered', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Boiler(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QMD3U8WNBU89wHjXQkqFnk' ... '////8zwPW5+Ef5Hl/AAAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Discharge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Discharge)',\n",
+ " 'marker': {'color': 'lightblue', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Discharge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAIAKPvjgg49iPby8nSEx72' ... 'AAAAAgvWP9SoFav2g9AAAAAAAA+P8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=ThermalStorage(Charge)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'ThermalStorage(Charge)',\n",
+ " 'marker': {'color': 'steelblue', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'ThermalStorage(Charge)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('AAAAAAAAAAAUfPDBB19avby8nSEx72' ... 'AAAAAAANj//////1hAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'},\n",
+ " {'hovertemplate': 'variable=Office(Heat)
time=%{x}
value=%{y}',\n",
+ " 'legendgroup': 'Office(Heat)',\n",
+ " 'marker': {'color': 'forestgreen', 'line': {'width': 0}, 'pattern': {'shape': ''}},\n",
+ " 'name': 'Office(Heat)',\n",
+ " 'orientation': 'v',\n",
+ " 'showlegend': True,\n",
+ " 'textposition': 'auto',\n",
+ " 'type': 'bar',\n",
+ " 'x': array(['2024-01-15T00:00:00.000000000', '2024-01-15T01:00:00.000000000',\n",
+ " '2024-01-15T02:00:00.000000000', '2024-01-15T03:00:00.000000000',\n",
+ " '2024-01-15T04:00:00.000000000', '2024-01-15T05:00:00.000000000',\n",
+ " '2024-01-15T06:00:00.000000000', '2024-01-15T07:00:00.000000000',\n",
+ " '2024-01-15T08:00:00.000000000', '2024-01-15T09:00:00.000000000',\n",
+ " '2024-01-15T10:00:00.000000000', '2024-01-15T11:00:00.000000000',\n",
+ " '2024-01-15T12:00:00.000000000', '2024-01-15T13:00:00.000000000',\n",
+ " '2024-01-15T14:00:00.000000000', '2024-01-15T15:00:00.000000000',\n",
+ " '2024-01-15T16:00:00.000000000', '2024-01-15T17:00:00.000000000',\n",
+ " '2024-01-15T18:00:00.000000000', '2024-01-15T19:00:00.000000000',\n",
+ " '2024-01-15T20:00:00.000000000', '2024-01-15T21:00:00.000000000',\n",
+ " '2024-01-15T22:00:00.000000000', '2024-01-15T23:00:00.000000000',\n",
+ " '2024-01-16T00:00:00.000000000', '2024-01-16T01:00:00.000000000',\n",
+ " '2024-01-16T02:00:00.000000000', '2024-01-16T03:00:00.000000000',\n",
+ " '2024-01-16T04:00:00.000000000', '2024-01-16T05:00:00.000000000',\n",
+ " '2024-01-16T06:00:00.000000000', '2024-01-16T07:00:00.000000000',\n",
+ " '2024-01-16T08:00:00.000000000', '2024-01-16T09:00:00.000000000',\n",
+ " '2024-01-16T10:00:00.000000000', '2024-01-16T11:00:00.000000000',\n",
+ " '2024-01-16T12:00:00.000000000', '2024-01-16T13:00:00.000000000',\n",
+ " '2024-01-16T14:00:00.000000000', '2024-01-16T15:00:00.000000000',\n",
+ " '2024-01-16T16:00:00.000000000', '2024-01-16T17:00:00.000000000',\n",
+ " '2024-01-16T18:00:00.000000000', '2024-01-16T19:00:00.000000000',\n",
+ " '2024-01-16T20:00:00.000000000', '2024-01-16T21:00:00.000000000',\n",
+ " '2024-01-16T22:00:00.000000000', '2024-01-16T23:00:00.000000000',\n",
+ " '2024-01-17T00:00:00.000000000', '2024-01-17T01:00:00.000000000',\n",
+ " '2024-01-17T02:00:00.000000000', '2024-01-17T03:00:00.000000000',\n",
+ " '2024-01-17T04:00:00.000000000', '2024-01-17T05:00:00.000000000',\n",
+ " '2024-01-17T06:00:00.000000000', '2024-01-17T07:00:00.000000000',\n",
+ " '2024-01-17T08:00:00.000000000', '2024-01-17T09:00:00.000000000',\n",
+ " '2024-01-17T10:00:00.000000000', '2024-01-17T11:00:00.000000000',\n",
+ " '2024-01-17T12:00:00.000000000', '2024-01-17T13:00:00.000000000',\n",
+ " '2024-01-17T14:00:00.000000000', '2024-01-17T15:00:00.000000000',\n",
+ " '2024-01-17T16:00:00.000000000', '2024-01-17T17:00:00.000000000',\n",
+ " '2024-01-17T18:00:00.000000000', '2024-01-17T19:00:00.000000000',\n",
+ " '2024-01-17T20:00:00.000000000', '2024-01-17T21:00:00.000000000',\n",
+ " '2024-01-17T22:00:00.000000000', '2024-01-17T23:00:00.000000000',\n",
+ " '2024-01-18T00:00:00.000000000', '2024-01-18T01:00:00.000000000',\n",
+ " '2024-01-18T02:00:00.000000000', '2024-01-18T03:00:00.000000000',\n",
+ " '2024-01-18T04:00:00.000000000', '2024-01-18T05:00:00.000000000',\n",
+ " '2024-01-18T06:00:00.000000000', '2024-01-18T07:00:00.000000000',\n",
+ " '2024-01-18T08:00:00.000000000', '2024-01-18T09:00:00.000000000',\n",
+ " '2024-01-18T10:00:00.000000000', '2024-01-18T11:00:00.000000000',\n",
+ " '2024-01-18T12:00:00.000000000', '2024-01-18T13:00:00.000000000',\n",
+ " '2024-01-18T14:00:00.000000000', '2024-01-18T15:00:00.000000000',\n",
+ " '2024-01-18T16:00:00.000000000', '2024-01-18T17:00:00.000000000',\n",
+ " '2024-01-18T18:00:00.000000000', '2024-01-18T19:00:00.000000000',\n",
+ " '2024-01-18T20:00:00.000000000', '2024-01-18T21:00:00.000000000',\n",
+ " '2024-01-18T22:00:00.000000000', '2024-01-18T23:00:00.000000000',\n",
+ " '2024-01-19T00:00:00.000000000', '2024-01-19T01:00:00.000000000',\n",
+ " '2024-01-19T02:00:00.000000000', '2024-01-19T03:00:00.000000000',\n",
+ " '2024-01-19T04:00:00.000000000', '2024-01-19T05:00:00.000000000',\n",
+ " '2024-01-19T06:00:00.000000000', '2024-01-19T07:00:00.000000000',\n",
+ " '2024-01-19T08:00:00.000000000', '2024-01-19T09:00:00.000000000',\n",
+ " '2024-01-19T10:00:00.000000000', '2024-01-19T11:00:00.000000000',\n",
+ " '2024-01-19T12:00:00.000000000', '2024-01-19T13:00:00.000000000',\n",
+ " '2024-01-19T14:00:00.000000000', '2024-01-19T15:00:00.000000000',\n",
+ " '2024-01-19T16:00:00.000000000', '2024-01-19T17:00:00.000000000',\n",
+ " '2024-01-19T18:00:00.000000000', '2024-01-19T19:00:00.000000000',\n",
+ " '2024-01-19T20:00:00.000000000', '2024-01-19T21:00:00.000000000',\n",
+ " '2024-01-19T22:00:00.000000000', '2024-01-19T23:00:00.000000000',\n",
+ " '2024-01-20T00:00:00.000000000', '2024-01-20T01:00:00.000000000',\n",
+ " '2024-01-20T02:00:00.000000000', '2024-01-20T03:00:00.000000000',\n",
+ " '2024-01-20T04:00:00.000000000', '2024-01-20T05:00:00.000000000',\n",
+ " '2024-01-20T06:00:00.000000000', '2024-01-20T07:00:00.000000000',\n",
+ " '2024-01-20T08:00:00.000000000', '2024-01-20T09:00:00.000000000',\n",
+ " '2024-01-20T10:00:00.000000000', '2024-01-20T11:00:00.000000000',\n",
+ " '2024-01-20T12:00:00.000000000', '2024-01-20T13:00:00.000000000',\n",
+ " '2024-01-20T14:00:00.000000000', '2024-01-20T15:00:00.000000000',\n",
+ " '2024-01-20T16:00:00.000000000', '2024-01-20T17:00:00.000000000',\n",
+ " '2024-01-20T18:00:00.000000000', '2024-01-20T19:00:00.000000000',\n",
+ " '2024-01-20T20:00:00.000000000', '2024-01-20T21:00:00.000000000',\n",
+ " '2024-01-20T22:00:00.000000000', '2024-01-20T23:00:00.000000000',\n",
+ " '2024-01-21T00:00:00.000000000', '2024-01-21T01:00:00.000000000',\n",
+ " '2024-01-21T02:00:00.000000000', '2024-01-21T03:00:00.000000000',\n",
+ " '2024-01-21T04:00:00.000000000', '2024-01-21T05:00:00.000000000',\n",
+ " '2024-01-21T06:00:00.000000000', '2024-01-21T07:00:00.000000000',\n",
+ " '2024-01-21T08:00:00.000000000', '2024-01-21T09:00:00.000000000',\n",
+ " '2024-01-21T10:00:00.000000000', '2024-01-21T11:00:00.000000000',\n",
+ " '2024-01-21T12:00:00.000000000', '2024-01-21T13:00:00.000000000',\n",
+ " '2024-01-21T14:00:00.000000000', '2024-01-21T15:00:00.000000000',\n",
+ " '2024-01-21T16:00:00.000000000', '2024-01-21T17:00:00.000000000',\n",
+ " '2024-01-21T18:00:00.000000000', '2024-01-21T19:00:00.000000000',\n",
+ " '2024-01-21T20:00:00.000000000', '2024-01-21T21:00:00.000000000',\n",
+ " '2024-01-21T22:00:00.000000000', '2024-01-21T23:00:00.000000000',\n",
+ " '2024-01-22T00:00:00.000000000'], dtype='datetime64[ns]'),\n",
+ " 'xaxis': 'x',\n",
+ " 'y': {'bdata': ('5ZuWpeU9QEDMU8WNBU89QGDXQkqFnk' ... 'AAAAA0QK7n4h/lezhAAAAAAAAA+H8='),\n",
+ " 'dtype': 'f8'},\n",
+ " 'yaxis': 'y'}],\n",
+ " 'layout': {'bargap': 0,\n",
+ " 'bargroupgap': 0,\n",
+ " 'barmode': 'relative',\n",
+ " 'legend': {'title': {'text': 'variable'}, 'tracegroupgap': 0},\n",
+ " 'template': '...',\n",
+ " 'title': {'text': 'Heat (flow_rate)'},\n",
+ " 'xaxis': {'anchor': 'y', 'domain': [0.0, 1.0], 'title': {'text': 'time'}},\n",
+ " 'yaxis': {'anchor': 'x', 'domain': [0.0, 1.0], 'title': {'text': 'value'}}}\n",
+ "}))"
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "execution_count": 39,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 39
+ },
+ {
+ "cell_type": "markdown",
+ "id": "78",
+ "metadata": {},
+ "source": [
+ "## 10. Exporting Results\n",
+ "\n",
+ "Plots return a `PlotResult` with data and figure that can be exported:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "79",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.710193Z",
+ "start_time": "2025-12-13T14:13:18.681521Z"
+ }
+ },
+ "source": [
+ "# Get plot result\n",
+ "result = simple.statistics.plot.balance('Heat')\n",
+ "\n",
+ "print('PlotResult contains:')\n",
+ "print(f' data: {type(result.data).__name__} with vars {list(result.data.data_vars)}')\n",
+ "print(f' figure: {type(result.figure).__name__}')"
+ ],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "PlotResult contains:\n",
+ " data: Dataset with vars ['Boiler(Heat)', 'ThermalStorage(Discharge)', 'ThermalStorage(Charge)', 'Office(Heat)']\n",
+ " figure: Figure\n"
+ ]
+ }
+ ],
+ "execution_count": 40
+ },
+ {
+ "cell_type": "code",
+ "id": "80",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.736577Z",
+ "start_time": "2025-12-13T14:13:18.723621Z"
+ }
+ },
+ "source": [
+ "# Export data to pandas DataFrame\n",
+ "df = result.data.to_dataframe()\n",
+ "df.head()"
+ ],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ " Boiler(Heat) ThermalStorage(Discharge) \\\n",
+ "time \n",
+ "2024-01-15 00:00:00 -32.483571 -0.000000e+00 \n",
+ "2024-01-15 01:00:00 -29.308678 5.275242e-13 \n",
+ "2024-01-15 02:00:00 -33.238443 -7.086767e-13 \n",
+ "2024-01-15 03:00:00 -101.411593 -3.516828e-13 \n",
+ "2024-01-15 04:00:00 -128.829233 -5.613288e-13 \n",
+ "\n",
+ " ThermalStorage(Charge) Office(Heat) \n",
+ "time \n",
+ "2024-01-15 00:00:00 0.000000e+00 32.483571 \n",
+ "2024-01-15 01:00:00 -3.747575e-13 29.308678 \n",
+ "2024-01-15 02:00:00 8.792069e-13 33.238443 \n",
+ "2024-01-15 03:00:00 6.379644e+01 37.615149 \n",
+ "2024-01-15 04:00:00 1.000000e+02 28.829233 "
+ ],
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " Boiler(Heat) | \n",
+ " ThermalStorage(Discharge) | \n",
+ " ThermalStorage(Charge) | \n",
+ " Office(Heat) | \n",
+ "
\n",
+ " \n",
+ " | time | \n",
+ " | \n",
+ " | \n",
+ " | \n",
+ " | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 2024-01-15 00:00:00 | \n",
+ " -32.483571 | \n",
+ " -0.000000e+00 | \n",
+ " 0.000000e+00 | \n",
+ " 32.483571 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 01:00:00 | \n",
+ " -29.308678 | \n",
+ " 5.275242e-13 | \n",
+ " -3.747575e-13 | \n",
+ " 29.308678 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 02:00:00 | \n",
+ " -33.238443 | \n",
+ " -7.086767e-13 | \n",
+ " 8.792069e-13 | \n",
+ " 33.238443 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 03:00:00 | \n",
+ " -101.411593 | \n",
+ " -3.516828e-13 | \n",
+ " 6.379644e+01 | \n",
+ " 37.615149 | \n",
+ "
\n",
+ " \n",
+ " | 2024-01-15 04:00:00 | \n",
+ " -128.829233 | \n",
+ " -5.613288e-13 | \n",
+ " 1.000000e+02 | \n",
+ " 28.829233 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ]
+ },
+ "execution_count": 41,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": 41
+ },
+ {
+ "cell_type": "code",
+ "id": "81",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2025-12-13T14:13:18.774445Z",
+ "start_time": "2025-12-13T14:13:18.771181Z"
+ }
+ },
+ "source": [
+ "# Export figure to HTML (interactive)\n",
+ "# result.figure.write_html('balance_plot.html')\n",
+ "\n",
+ "# Export figure to image\n",
+ "# result.figure.write_image('balance_plot.png', scale=2)"
+ ],
+ "outputs": [],
+ "execution_count": 42
+ },
+ {
+ "cell_type": "markdown",
+ "id": "85",
+ "metadata": {},
+ "source": [
+ "## Summary\n",
+ "\n",
+ "### Data Access\n",
+ "\n",
+ "| Property | Description |\n",
+ "|----------|-------------|\n",
+ "| `statistics.flow_rates` | Time series of flow rates (power) |\n",
+ "| `statistics.flow_hours` | Energy values (rate × duration) |\n",
+ "| `statistics.sizes` | Component/flow capacities |\n",
+ "| `statistics.charge_states` | Storage charge levels |\n",
+ "| `statistics.temporal_effects` | Effects per timestep |\n",
+ "| `statistics.periodic_effects` | Effects per period |\n",
+ "| `statistics.total_effects` | Aggregated effect totals |\n",
+ "| `topology.carrier_colors` | Cached carrier color mapping |\n",
+ "| `topology.component_colors` | Cached component color mapping |\n",
+ "| `topology.bus_colors` | Cached bus color mapping |\n",
+ "\n",
+ "### Plot Methods\n",
+ "\n",
+ "| Method | Description |\n",
+ "|--------|-------------|\n",
+ "| `plot.balance(node)` | Stacked bar of in/outflows |\n",
+ "| `plot.carrier_balance(carrier)` | Balance for all flows of a carrier |\n",
+ "| `plot.flows(variables)` | Time series line/area plot |\n",
+ "| `plot.storage(component)` | Combined charge state and flows |\n",
+ "| `plot.charge_states(component)` | Charge state time series |\n",
+ "| `plot.sizes()` | Bar chart of sizes |\n",
+ "| `plot.effects(effect)` | Bar chart of effect contributions |\n",
+ "| `plot.duration_curve(variables)` | Sorted duration curve |\n",
+ "| `plot.heatmap(variable)` | 2D time-reshaped heatmap |\n",
+ "| `plot.sankey.flows()` | Energy flow Sankey |\n",
+ "| `plot.sankey.sizes()` | Capacity Sankey |\n",
+ "| `plot.sankey.peak_flow()` | Peak power Sankey |\n",
+ "| `plot.sankey.effects(effect)` | Effect allocation Sankey |\n",
+ "| `topology.plot()` | System structure diagram |"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/10-transmission.ipynb b/docs/notebooks/10-transmission.ipynb
new file mode 100644
index 000000000..898d092c0
--- /dev/null
+++ b/docs/notebooks/10-transmission.ipynb
@@ -0,0 +1,418 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": [
+ "# Transmission\n",
+ "\n",
+ "Model energy or material transport between locations with losses.\n",
+ "\n",
+ "This notebook covers:\n",
+ "\n",
+ "- **Transmission component**: Connecting sites with pipelines, cables, or conveyors\n",
+ "- **Transmission losses**: Relative losses (proportional) and absolute losses (fixed)\n",
+ "- **Bidirectional flow**: Two-way transmission with flow direction constraints\n",
+ "- **Capacity optimization**: Sizing transmission infrastructure"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## Setup"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import plotly.express as px\n",
+ "import xarray as xr\n",
+ "\n",
+ "import flixopt as fx\n",
+ "\n",
+ "fx.CONFIG.notebook()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "## The Problem: Connecting Two Sites\n",
+ "\n",
+ "Consider a district heating network with two sites:\n",
+ "\n",
+ "- **Site A**: Has a large gas boiler (cheap production)\n",
+ "- **Site B**: Has a smaller electric boiler (expensive, but flexible)\n",
+ "\n",
+ "A district heating pipe connects both sites. The question: How should heat flow between sites to minimize total costs?\n",
+ "\n",
+ "### Transmission Characteristics\n",
+ "\n",
+ "| Parameter | Value | Description |\n",
+ "|-----------|-------|-------------|\n",
+ "| Relative losses | 5% | Heat loss proportional to flow (pipe heat loss) |\n",
+ "| Capacity | 200 kW | Maximum transmission rate |\n",
+ "| Bidirectional | Yes | Heat can flow A→B or B→A |"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4",
+ "metadata": {},
+ "source": [
+ "## Define Time Series Data"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# One week simulation\n",
+ "timesteps = pd.date_range('2024-01-22', periods=168, freq='h')\n",
+ "hours = np.arange(168)\n",
+ "hour_of_day = hours % 24\n",
+ "\n",
+ "# Site A: Industrial facility with steady demand\n",
+ "demand_a_base = 150\n",
+ "demand_a_variation = 30 * np.sin(hour_of_day * np.pi / 12) # Day/night cycle\n",
+ "demand_a = demand_a_base + demand_a_variation\n",
+ "\n",
+ "# Site B: Office building with peak during work hours\n",
+ "demand_b = np.where(\n",
+ " (hour_of_day >= 8) & (hour_of_day <= 18),\n",
+ " 180, # Daytime: 180 kW\n",
+ " 80, # Nighttime: 80 kW\n",
+ ")\n",
+ "# Add weekly pattern (lower on weekends)\n",
+ "day_of_week = (hours // 24) % 7\n",
+ "demand_b = np.where(day_of_week >= 5, demand_b * 0.6, demand_b) # Weekend reduction"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize demand profiles\n",
+ "fig = px.line(\n",
+ " x=timesteps.tolist() * 2,\n",
+ " y=np.concatenate([demand_a, demand_b]),\n",
+ " color=['Site A (Industrial)'] * 168 + ['Site B (Office)'] * 168,\n",
+ " title='Heat Demand at Both Sites',\n",
+ " labels={'x': 'Time', 'y': 'Heat Demand [kW]', 'color': 'Site'},\n",
+ ")\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "## Example 1: Unidirectional Transmission\n",
+ "\n",
+ "Start with a simple case: heat flows only from Site A to Site B."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8",
+ "metadata": {},
+ "outputs": [],
+ "source": "fs_unidirectional = fx.FlowSystem(timesteps)\nfs_unidirectional.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nfs_unidirectional.add_elements(\n # === Buses (one per site) ===\n fx.Bus('Heat_A', carrier='heat'), # Site A heat network\n fx.Bus('Heat_B', carrier='heat'), # Site B heat network\n fx.Bus('Gas', carrier='gas'), # Gas supply network\n fx.Bus('Electricity', carrier='electricity'), # Electricity grid\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === External supplies ===\n fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.25)]),\n # === Site A: Large gas boiler (cheap) ===\n fx.LinearConverter(\n 'GasBoiler_A',\n inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n conversion_factors=[{'Gas': 1, 'Heat': 0.92}], # 92% efficiency\n ),\n # === Site B: Small electric boiler (expensive but flexible) ===\n fx.LinearConverter(\n 'ElecBoiler_B',\n inputs=[fx.Flow('Elec', bus='Electricity', size=250)],\n outputs=[fx.Flow('Heat', bus='Heat_B', size=250)],\n conversion_factors=[{'Elec': 1, 'Heat': 0.99}], # 99% efficiency\n ),\n # === Transmission: A → B (unidirectional) ===\n fx.Transmission(\n 'Pipe_A_to_B',\n in1=fx.Flow('from_A', bus='Heat_A', size=200), # Input from Site A\n out1=fx.Flow('to_B', bus='Heat_B', size=200), # Output to Site B\n relative_losses=0.05, # 5% heat loss in pipe\n ),\n # === Demands ===\n fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n)\n\nfs_unidirectional.optimize(fx.solvers.HighsSolver())"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# View results\n",
+ "print(f'Total cost: {fs_unidirectional.solution[\"costs\"].item():.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Heat balance at Site A\n",
+ "fs_unidirectional.statistics.plot.balance('Heat_A')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "11",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Heat balance at Site B\n",
+ "fs_unidirectional.statistics.plot.balance('Heat_B')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "12",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Energy flow overview\n",
+ "fs_unidirectional.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "13",
+ "metadata": {},
+ "source": [
+ "### Observations\n",
+ "\n",
+ "- The optimizer uses the **cheaper gas boiler at Site A** as much as possible\n",
+ "- Heat is transmitted to Site B (despite 5% losses) because gas is much cheaper than electricity\n",
+ "- The electric boiler at Site B only runs when transmission capacity is insufficient"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "14",
+ "metadata": {},
+ "source": [
+ "## Example 2: Bidirectional Transmission\n",
+ "\n",
+ "Now allow heat to flow in **both directions**. This is useful when:\n",
+ "- Both sites have generation capacity\n",
+ "- Demand patterns differ between sites\n",
+ "- Prices or availability vary over time"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "15",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Add a heat pump at Site B (cheaper during certain hours)\n",
+ "# Electricity price varies: cheap at night, expensive during day\n",
+ "elec_price = np.where(\n",
+ " (hour_of_day >= 22) | (hour_of_day <= 6),\n",
+ " 0.08, # Night: 0.08 €/kWh\n",
+ " 0.25, # Day: 0.25 €/kWh\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "16",
+ "metadata": {},
+ "outputs": [],
+ "source": "fs_bidirectional = fx.FlowSystem(timesteps)\nfs_bidirectional.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nfs_bidirectional.add_elements(\n # === Buses ===\n fx.Bus('Heat_A', carrier='heat'),\n fx.Bus('Heat_B', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Electricity', carrier='electricity'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === External supplies ===\n fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n # === Site A: Gas boiler ===\n fx.LinearConverter(\n 'GasBoiler_A',\n inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n conversion_factors=[{'Gas': 1, 'Heat': 0.92}],\n ),\n # === Site B: Heat pump (efficient with variable electricity price) ===\n fx.LinearConverter(\n 'HeatPump_B',\n inputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n outputs=[fx.Flow('Heat', bus='Heat_B', size=350)],\n conversion_factors=[{'Elec': 1, 'Heat': 3.5}], # COP = 3.5\n ),\n # === BIDIRECTIONAL Transmission ===\n fx.Transmission(\n 'Pipe_AB',\n # Direction 1: A → B\n in1=fx.Flow('from_A', bus='Heat_A', size=200),\n out1=fx.Flow('to_B', bus='Heat_B', size=200),\n # Direction 2: B → A\n in2=fx.Flow('from_B', bus='Heat_B', size=200),\n out2=fx.Flow('to_A', bus='Heat_A', size=200),\n relative_losses=0.05,\n prevent_simultaneous_flows_in_both_directions=True, # Can't flow both ways at once\n ),\n # === Demands ===\n fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n)\n\nfs_bidirectional.optimize(fx.solvers.HighsSolver())"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "17",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Compare costs\n",
+ "print(f'Unidirectional cost: {fs_unidirectional.solution[\"costs\"].item():.2f} €')\n",
+ "print(f'Bidirectional cost: {fs_bidirectional.solution[\"costs\"].item():.2f} €')\n",
+ "savings = fs_unidirectional.solution['costs'].item() - fs_bidirectional.solution['costs'].item()\n",
+ "print(f'Savings from bidirectional: {savings:.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "18",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Visualize transmission flows in both directions using xarray\n",
+ "flow_data = xr.Dataset(\n",
+ " {\n",
+ " 'A_to_B': fs_bidirectional.solution['Pipe_AB(from_A)|flow_rate'],\n",
+ " 'B_to_A': fs_bidirectional.solution['Pipe_AB(from_B)|flow_rate'],\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "fig = px.line(\n",
+ " x=list(flow_data['time'].values) * 2,\n",
+ " y=np.concatenate([flow_data['A_to_B'].values, flow_data['B_to_A'].values]),\n",
+ " color=['A → B'] * len(flow_data['time']) + ['B → A'] * len(flow_data['time']),\n",
+ " title='Transmission Flow Direction Over Time',\n",
+ " labels={'x': 'Time', 'y': 'Flow Rate [kW]', 'color': 'Direction'},\n",
+ ")\n",
+ "fig"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "19",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Heat balance at Site B showing bidirectional flows\n",
+ "fs_bidirectional.statistics.plot.balance('Heat_B')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "20",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Energy flow overview\n",
+ "fs_bidirectional.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "21",
+ "metadata": {},
+ "source": [
+ "### Observations\n",
+ "\n",
+ "- During **cheap electricity hours** (night): Heat pump at Site B produces heat, some flows to Site A\n",
+ "- During **expensive electricity hours** (day): Gas boiler at Site A supplies both sites\n",
+ "- The bidirectional transmission enables **load shifting** and **arbitrage** between sites"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "22",
+ "metadata": {},
+ "source": [
+ "## Example 3: Transmission Capacity Optimization\n",
+ "\n",
+ "What's the **optimal pipe capacity**? Let the optimizer decide."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "23",
+ "metadata": {},
+ "outputs": [],
+ "source": "# Daily amortized pipe cost (simplified)\nPIPE_COST_PER_KW = 0.05 # €/kW/day capacity cost\n\nfs_invest = fx.FlowSystem(timesteps)\nfs_invest.add_carriers(\n fx.Carrier('gas', '#3498db', 'kW'),\n fx.Carrier('electricity', '#f1c40f', 'kW'),\n fx.Carrier('heat', '#e74c3c', 'kW'),\n)\nfs_invest.add_elements(\n # === Buses ===\n fx.Bus('Heat_A', carrier='heat'),\n fx.Bus('Heat_B', carrier='heat'),\n fx.Bus('Gas', carrier='gas'),\n fx.Bus('Electricity', carrier='electricity'),\n # === Effect ===\n fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n # === External supplies ===\n fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n # === Site A: Gas boiler ===\n fx.LinearConverter(\n 'GasBoiler_A',\n inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n conversion_factors=[{'Gas': 1, 'Heat': 0.92}],\n ),\n # === Site B: Heat pump ===\n fx.LinearConverter(\n 'HeatPump_B',\n inputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n outputs=[fx.Flow('Heat', bus='Heat_B', size=350)],\n conversion_factors=[{'Elec': 1, 'Heat': 3.5}],\n ),\n # === Site B: Backup electric boiler ===\n fx.LinearConverter(\n 'ElecBoiler_B',\n inputs=[fx.Flow('Elec', bus='Electricity', size=200)],\n outputs=[fx.Flow('Heat', bus='Heat_B', size=200)],\n conversion_factors=[{'Elec': 1, 'Heat': 0.99}],\n ),\n # === Transmission with INVESTMENT OPTIMIZATION ===\n # Investment parameters are passed via 'size' parameter\n fx.Transmission(\n 'Pipe_AB',\n in1=fx.Flow(\n 'from_A',\n bus='Heat_A',\n size=fx.InvestParameters(\n effects_of_investment_per_size={'costs': PIPE_COST_PER_KW * 7}, # Weekly cost\n minimum_size=0,\n maximum_size=300,\n ),\n ),\n out1=fx.Flow('to_B', bus='Heat_B'),\n in2=fx.Flow(\n 'from_B',\n bus='Heat_B',\n size=fx.InvestParameters(\n effects_of_investment_per_size={'costs': PIPE_COST_PER_KW * 7},\n minimum_size=0,\n maximum_size=300,\n ),\n ),\n out2=fx.Flow('to_A', bus='Heat_A'),\n relative_losses=0.05,\n balanced=True, # Same capacity in both directions\n prevent_simultaneous_flows_in_both_directions=True,\n ),\n # === Demands ===\n fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n)\n\nfs_invest.optimize(fx.solvers.HighsSolver())"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "24",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Results\n",
+ "optimal_capacity = fs_invest.solution['Pipe_AB(from_A)|size'].item()\n",
+ "total_cost = fs_invest.solution['costs'].item()\n",
+ "\n",
+ "print(f'Optimal pipe capacity: {optimal_capacity:.1f} kW')\n",
+ "print(f'Total cost: {total_cost:.2f} €')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "25",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Effect breakdown by component\n",
+ "fs_invest.statistics.plot.effects()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "26",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Energy flows\n",
+ "fs_invest.statistics.plot.sankey.flows()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "27",
+ "metadata": {},
+ "source": "## Key Concepts\n\n### Transmission Component Structure\n\n```python\nfx.Transmission(\n label='pipe_name',\n # Direction 1: A → B\n in1=fx.Flow('from_A', bus='Bus_A', size=100),\n out1=fx.Flow('to_B', bus='Bus_B', size=100),\n # Direction 2: B → A (optional - omit for unidirectional)\n in2=fx.Flow('from_B', bus='Bus_B', size=100),\n out2=fx.Flow('to_A', bus='Bus_A', size=100),\n # Loss parameters\n relative_losses=0.05, # 5% proportional loss\n absolute_losses=10, # 10 kW fixed loss when active (optional)\n # Operational constraints\n prevent_simultaneous_flows_in_both_directions=True,\n balanced=True, # Same capacity both directions (needs InvestParameters)\n)\n```\n\n### Loss Types\n\n| Loss Type | Formula | Use Case |\n|-----------|---------|----------|\n| **Relative** | `out = in × (1 - loss)` | Heat pipes, electrical lines |\n| **Absolute** | `out = in - loss` (when active) | Pump energy, standby losses |\n\n### Bidirectional vs Unidirectional\n\n| Configuration | Parameters | Use Case |\n|---------------|------------|----------|\n| **Unidirectional** | `in1`, `out1` only | One-way pipelines, conveyors |\n| **Bidirectional** | `in1`, `out1`, `in2`, `out2` | Power lines, reversible pipes |\n\n### Investment Optimization\n\nUse `InvestParameters` as the `size` parameter for capacity optimization:\n\n```python\nin1=fx.Flow(\n 'from_A', \n bus='Bus_A',\n size=fx.InvestParameters( # Pass InvestParameters as size\n effects_of_investment_per_size={'costs': cost_per_kw},\n minimum_size=0,\n maximum_size=500,\n ),\n)\n```"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "28",
+ "metadata": {},
+ "source": [
+ "## Common Use Cases\n",
+ "\n",
+ "| Application | Typical Losses | Notes |\n",
+ "|-------------|---------------|-------|\n",
+ "| **District heating pipe** | 2-10% relative | Temperature-dependent |\n",
+ "| **High voltage line** | 1-5% relative | Distance-dependent |\n",
+ "| **Natural gas pipeline** | 0.5-2% relative | Compressor energy as absolute loss |\n",
+ "| **Conveyor belt** | Fixed absolute | Motor energy consumption |\n",
+ "| **Hydrogen pipeline** | 1-3% relative | Compression losses |"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "29",
+ "metadata": {},
+ "source": [
+ "## Summary\n",
+ "\n",
+ "You learned how to:\n",
+ "\n",
+ "- Create **unidirectional transmission** between two buses\n",
+ "- Model **bidirectional transmission** with flow direction constraints\n",
+ "- Apply **relative and absolute losses** to transmission\n",
+ "- Optimize **transmission capacity** using InvestParameters\n",
+ "- Analyze **multi-site energy systems** with interconnections\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- **[07-scenarios-and-periods](07-scenarios-and-periods.ipynb)**: Multi-year planning with uncertainty\n",
+ "- **[08-large-scale-optimization](08-large-scale-optimization.ipynb)**: Computational efficiency techniques"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.10.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/notebooks/data/generate_example_systems.py b/docs/notebooks/data/generate_example_systems.py
new file mode 100644
index 000000000..556463302
--- /dev/null
+++ b/docs/notebooks/data/generate_example_systems.py
@@ -0,0 +1,345 @@
+"""Generate example FlowSystem files for the plotting notebook.
+
+This script creates three FlowSystems of varying complexity:
+1. simple_system - Basic heat system (boiler + storage + sink)
+2. complex_system - Multi-carrier with multiple effects and piecewise efficiency
+3. multiperiod_system - System with periods and scenarios
+
+Run this script to regenerate the example data files.
+"""
+
+from pathlib import Path
+
+import numpy as np
+import pandas as pd
+
+import flixopt as fx
+
+# Output directory (same as this script)
+try:
+ OUTPUT_DIR = Path(__file__).parent
+except NameError:
+ # Running in notebook context (e.g., mkdocs-jupyter)
+ OUTPUT_DIR = Path('docs/notebooks/data')
+
+
+def create_simple_system() -> fx.FlowSystem:
+ """Create a simple heat system with boiler, storage, and demand.
+
+ Components:
+ - Gas boiler (150 kW)
+ - Thermal storage (500 kWh)
+ - Office heat demand
+
+ One week, hourly resolution.
+ """
+ # One week, hourly
+ timesteps = pd.date_range('2024-01-15', periods=168, freq='h')
+
+ # Create demand pattern
+ hours = np.arange(168)
+ hour_of_day = hours % 24
+ day_of_week = (hours // 24) % 7
+
+ base_demand = np.where((hour_of_day >= 7) & (hour_of_day <= 18), 80, 30)
+ weekend_factor = np.where(day_of_week >= 5, 0.5, 1.0)
+
+ np.random.seed(42)
+ heat_demand = base_demand * weekend_factor + np.random.normal(0, 5, len(hours))
+ heat_demand = np.clip(heat_demand, 20, 100)
+
+ # Time-varying gas price
+ gas_price = np.where((hour_of_day >= 6) & (hour_of_day <= 22), 0.08, 0.05)
+
+ fs = fx.FlowSystem(timesteps)
+ fs.add_carriers(
+ fx.Carrier('gas', '#3498db', 'kW'),
+ fx.Carrier('heat', '#e74c3c', 'kW'),
+ )
+ fs.add_elements(
+ fx.Bus('Gas', carrier='gas'),
+ fx.Bus('Heat', carrier='heat'),
+ fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),
+ fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)]),
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.92,
+ thermal_flow=fx.Flow('Heat', bus='Heat', size=150),
+ fuel_flow=fx.Flow('Gas', bus='Gas'),
+ ),
+ fx.Storage(
+ 'ThermalStorage',
+ capacity_in_flow_hours=500,
+ initial_charge_state=250,
+ minimal_final_charge_state=200,
+ eta_charge=0.98,
+ eta_discharge=0.98,
+ relative_loss_per_hour=0.005,
+ charging=fx.Flow('Charge', bus='Heat', size=100),
+ discharging=fx.Flow('Discharge', bus='Heat', size=100),
+ ),
+ fx.Sink('Office', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ )
+ return fs
+
+
+def create_complex_system() -> fx.FlowSystem:
+ """Create a complex multi-carrier system with multiple effects.
+
+ Components:
+ - Gas grid (with CO2 emissions)
+ - Electricity grid (with time-varying price and CO2)
+ - CHP with piecewise efficiency
+ - Heat pump
+ - Gas boiler (backup)
+ - Thermal storage
+ - Heat demand
+
+ Effects: costs (objective), CO2
+
+ Three days, hourly resolution.
+ """
+ timesteps = pd.date_range('2024-06-01', periods=72, freq='h')
+ hours = np.arange(72)
+ hour_of_day = hours % 24
+
+ # Demand profiles
+ np.random.seed(123)
+ heat_demand = 50 + 30 * np.sin(2 * np.pi * hour_of_day / 24 - np.pi / 2) + np.random.normal(0, 5, 72)
+ heat_demand = np.clip(heat_demand, 20, 100)
+
+ electricity_demand = 20 + 15 * np.sin(2 * np.pi * hour_of_day / 24) + np.random.normal(0, 3, 72)
+ electricity_demand = np.clip(electricity_demand, 10, 50)
+
+ # Price profiles
+ electricity_price = np.where((hour_of_day >= 8) & (hour_of_day <= 20), 0.25, 0.12)
+ gas_price = 0.06
+
+ # CO2 factors (kg/kWh)
+ electricity_co2 = np.where((hour_of_day >= 8) & (hour_of_day <= 20), 0.4, 0.3) # Higher during peak
+ gas_co2 = 0.2
+
+ fs = fx.FlowSystem(timesteps)
+ fs.add_carriers(
+ fx.Carrier('gas', '#3498db', 'kW'),
+ fx.Carrier('electricity', '#f1c40f', 'kW'),
+ fx.Carrier('heat', '#e74c3c', 'kW'),
+ )
+ fs.add_elements(
+ # Buses
+ fx.Bus('Gas', carrier='gas'),
+ fx.Bus('Electricity', carrier='electricity'),
+ fx.Bus('Heat', carrier='heat'),
+ # Effects
+ fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),
+ fx.Effect('CO2', 'kg', 'CO2 Emissions'),
+ # Gas supply
+ fx.Source(
+ 'GasGrid',
+ outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour={'costs': gas_price, 'CO2': gas_co2})],
+ ),
+ # Electricity grid (import and export)
+ fx.Source(
+ 'ElectricityImport',
+ outputs=[
+ fx.Flow(
+ 'El',
+ bus='Electricity',
+ size=100,
+ effects_per_flow_hour={'costs': electricity_price, 'CO2': electricity_co2},
+ )
+ ],
+ ),
+ fx.Sink(
+ 'ElectricityExport',
+ inputs=[
+ fx.Flow('El', bus='Electricity', size=50, effects_per_flow_hour={'costs': -electricity_price * 0.8})
+ ],
+ ),
+ # CHP with piecewise efficiency (efficiency varies with load)
+ fx.LinearConverter(
+ 'CHP',
+ inputs=[fx.Flow('Gas', bus='Gas', size=200)],
+ outputs=[fx.Flow('El', bus='Electricity', size=80), fx.Flow('Heat', bus='Heat', size=85)],
+ piecewise_conversion=fx.PiecewiseConversion(
+ {
+ 'Gas': fx.Piecewise(
+ [
+ fx.Piece(start=80, end=160), # Part load
+ fx.Piece(start=160, end=200), # Full load
+ ]
+ ),
+ 'El': fx.Piecewise(
+ [
+ fx.Piece(start=25, end=60), # ~31-38% electrical efficiency
+ fx.Piece(start=60, end=80), # ~38-40% electrical efficiency
+ ]
+ ),
+ 'Heat': fx.Piecewise(
+ [
+ fx.Piece(start=35, end=70), # ~44% thermal efficiency
+ fx.Piece(start=70, end=85), # ~43% thermal efficiency
+ ]
+ ),
+ }
+ ),
+ status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 2}),
+ ),
+ # Heat pump (with investment)
+ fx.linear_converters.HeatPump(
+ 'HeatPump',
+ thermal_flow=fx.Flow(
+ 'Heat',
+ bus='Heat',
+ size=fx.InvestParameters(
+ effects_of_investment={'costs': 500},
+ effects_of_investment_per_size={'costs': 100},
+ maximum_size=60,
+ ),
+ ),
+ electrical_flow=fx.Flow('El', bus='Electricity'),
+ cop=3.5,
+ ),
+ # Backup boiler
+ fx.linear_converters.Boiler(
+ 'BackupBoiler',
+ thermal_flow=fx.Flow('Heat', bus='Heat', size=80),
+ fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_efficiency=0.90,
+ ),
+ # Thermal storage (with investment)
+ fx.Storage(
+ 'HeatStorage',
+ capacity_in_flow_hours=fx.InvestParameters(
+ effects_of_investment={'costs': 200},
+ effects_of_investment_per_size={'costs': 10},
+ maximum_size=300,
+ ),
+ eta_charge=0.95,
+ eta_discharge=0.95,
+ charging=fx.Flow('Charge', bus='Heat', size=50),
+ discharging=fx.Flow('Discharge', bus='Heat', size=50),
+ ),
+ # Demands
+ fx.Sink('HeatDemand', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Sink(
+ 'ElDemand', inputs=[fx.Flow('El', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ ),
+ )
+ return fs
+
+
+def create_multiperiod_system() -> fx.FlowSystem:
+ """Create a system with multiple periods and scenarios.
+
+ Same structure as simple system but with:
+ - 3 planning periods (years 2024, 2025, 2026)
+ - 2 scenarios (high demand, low demand)
+
+ Each period: 48 hours (2 days representative)
+ """
+ timesteps = pd.date_range('2024-01-01', periods=48, freq='h')
+ hour_of_day = np.arange(48) % 24
+
+ # Period definitions (years)
+ periods = pd.Index([2024, 2025, 2026], name='period')
+
+ # Scenario definitions
+ scenarios = pd.Index(['high_demand', 'low_demand'], name='scenario')
+ scenario_weights = np.array([0.3, 0.7])
+
+ # Base demand pattern (hourly)
+ base_pattern = np.where((hour_of_day >= 7) & (hour_of_day <= 18), 80.0, 35.0)
+
+ # Scenario-specific scaling
+ np.random.seed(42)
+ high_demand = base_pattern * 1.2 + np.random.normal(0, 5, 48)
+ low_demand = base_pattern * 0.85 + np.random.normal(0, 3, 48)
+
+ # Create DataFrame with scenario columns
+ heat_demand = pd.DataFrame(
+ {
+ 'high_demand': np.clip(high_demand, 20, 120),
+ 'low_demand': np.clip(low_demand, 15, 90),
+ },
+ index=timesteps,
+ )
+
+ # Gas price varies by period (rising costs)
+ gas_prices = np.array([0.06, 0.08, 0.10]) # Per period
+
+ fs = fx.FlowSystem(
+ timesteps,
+ periods=periods,
+ scenarios=scenarios,
+ scenario_weights=scenario_weights,
+ )
+ fs.add_carriers(
+ fx.Carrier('gas', '#3498db', 'kW'),
+ fx.Carrier('heat', '#e74c3c', 'kW'),
+ )
+ fs.add_elements(
+ fx.Bus('Gas', carrier='gas'),
+ fx.Bus('Heat', carrier='heat'),
+ fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),
+ fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_prices)]),
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.92,
+ thermal_flow=fx.Flow(
+ 'Heat',
+ bus='Heat',
+ size=fx.InvestParameters(
+ effects_of_investment={'costs': 1000},
+ effects_of_investment_per_size={'costs': 50},
+ maximum_size=250,
+ ),
+ ),
+ fuel_flow=fx.Flow('Gas', bus='Gas'),
+ ),
+ fx.Storage(
+ 'ThermalStorage',
+ capacity_in_flow_hours=fx.InvestParameters(
+ effects_of_investment={'costs': 500},
+ effects_of_investment_per_size={'costs': 15},
+ maximum_size=400,
+ ),
+ eta_charge=0.98,
+ eta_discharge=0.98,
+ charging=fx.Flow('Charge', bus='Heat', size=80),
+ discharging=fx.Flow('Discharge', bus='Heat', size=80),
+ ),
+ fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ )
+ return fs
+
+
+def main():
+ """Generate all example systems and save to netCDF."""
+ solver = fx.solvers.HighsSolver(log_to_console=False)
+
+ systems = [
+ ('simple_system', create_simple_system),
+ ('complex_system', create_complex_system),
+ ('multiperiod_system', create_multiperiod_system),
+ ]
+
+ for name, create_func in systems:
+ print(f'Creating {name}...')
+ fs = create_func()
+
+ print(' Optimizing...')
+ fs.optimize(solver)
+
+ output_path = OUTPUT_DIR / f'{name}.nc4'
+ print(f' Saving to {output_path}...')
+ fs.to_netcdf(output_path, overwrite=True)
+
+ print(f' Done. Objective: {fs.solution["objective"].item():.2f}')
+ print()
+
+ print('All systems generated successfully!')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/docs/notebooks/index.md b/docs/notebooks/index.md
new file mode 100644
index 000000000..067d5247b
--- /dev/null
+++ b/docs/notebooks/index.md
@@ -0,0 +1,62 @@
+# Examples
+
+Learn flixopt through practical examples organized by topic. Each notebook includes a real-world user story and progressively builds your understanding.
+
+## Basics
+
+| Notebook | Description |
+|----------|-------------|
+| [01-Quickstart](01-quickstart.ipynb) | Minimal working example - heat a workshop with a gas boiler |
+| [02-Heat System](02-heat-system.ipynb) | District heating with thermal storage and time-varying prices |
+
+## Investment
+
+| Notebook | Description |
+|----------|-------------|
+| [03-Sizing](03-investment-optimization.ipynb) | Size a solar heating system - let the optimizer decide equipment sizes |
+| [04-Constraints](04-operational-constraints.ipynb) | Industrial boiler with startup costs, minimum uptime, and load constraints |
+
+## Advanced
+
+| Notebook | Description |
+|----------|-------------|
+| [05-Multi-Carrier](05-multi-carrier-system.ipynb) | Hospital with CHP producing both electricity and heat |
+| [10-Transmission](10-transmission.ipynb) | Connect sites with pipelines or cables, including losses and bidirectional flow |
+
+## Non-Linear Modeling
+
+| Notebook | Description |
+|----------|-------------|
+| [06a-Time-Varying](06a-time-varying-parameters.ipynb) | Heat pump with temperature-dependent COP |
+| [06b-Piecewise Conversion](06b-piecewise-conversion.ipynb) | Gas engine with load-dependent efficiency curves |
+| [06c-Piecewise Effects](06c-piecewise-effects.ipynb) | Economies of scale in investment costs |
+
+## Scaling
+
+| Notebook | Description |
+|----------|-------------|
+| [07-Scenarios](07-scenarios-and-periods.ipynb) | Multi-year planning with uncertain demand scenarios |
+| [08-Large-Scale](08-large-scale-optimization.ipynb) | Speed up large problems with resampling and two-stage optimization |
+
+## Results
+
+| Notebook | Description |
+|----------|-------------|
+| [09-Plotting](09-plotting-and-data-access.ipynb) | Access optimization results and create visualizations |
+
+## Key Concepts
+
+| Concept | Introduced In |
+|---------|---------------|
+| `FlowSystem`, `Bus`, `Flow` | Quickstart |
+| `Storage`, time-varying prices | Heat System |
+| `InvestParameters`, optimal sizing | Sizing |
+| `StatusParameters`, startup costs | Constraints |
+| Multi-carrier, CHP | Multi-Carrier |
+| `Transmission`, losses, bidirectional | Transmission |
+| Time-varying `conversion_factors` | Time-Varying Parameters |
+| `PiecewiseConversion`, part-load efficiency | Piecewise Conversion |
+| `PiecewiseEffects`, economies of scale | Piecewise Effects |
+| Periods, scenarios, weights | Scenarios |
+| `transform.resample()`, `fix_sizes()` | Large-Scale |
+| `statistics`, `topology`, plotting | Plotting |
diff --git a/docs/overrides/main.html b/docs/overrides/main.html
new file mode 100644
index 000000000..b245acdaa
--- /dev/null
+++ b/docs/overrides/main.html
@@ -0,0 +1,11 @@
+{% extends "base.html" %}
+
+{% block content %}
+{% if page.nb_url %}
+
+ {% include ".icons/material/download.svg" %}
+
+{% endif %}
+
+{{ super() }}
+{% endblock content %}
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
index 78946b9ad..ee551b3bd 100644
--- a/docs/stylesheets/extra.css
+++ b/docs/stylesheets/extra.css
@@ -763,6 +763,188 @@ button:focus-visible {
scrollbar-color: var(--md-default-fg-color--lighter) var(--md-default-bg-color);
}
+/* ============================================================================
+ Color Swatches for Carrier Documentation
+ ========================================================================= */
+
+/* Inline color swatch - a small colored square */
+.color-swatch {
+ display: inline-block;
+ width: 1em;
+ height: 1em;
+ border-radius: 3px;
+ vertical-align: middle;
+ margin-right: 0.3em;
+ border: 1px solid rgba(0, 0, 0, 0.15);
+ box-shadow: 0 1px 2px rgba(0, 0, 0, 0.1);
+}
+
+[data-md-color-scheme="slate"] .color-swatch {
+ border-color: rgba(255, 255, 255, 0.2);
+}
+
+/* ============================================================================
+ Jupyter Notebook Styling (syncs with dark/light theme)
+ ========================================================================= */
+
+/* Override Jupyter notebook syntax highlighting to match Material theme */
+/* Use Material's CSS variables for consistent colors */
+.highlight-ipynb { background: var(--md-code-bg-color) !important; color: var(--md-code-fg-color) !important; }
+
+/* Comments */
+.highlight-ipynb .c, .highlight-ipynb .c1, .highlight-ipynb .ch,
+.highlight-ipynb .cm, .highlight-ipynb .cp, .highlight-ipynb .cpf,
+.highlight-ipynb .cs { color: var(--md-code-hl-comment-color, var(--md-default-fg-color--light)) !important; font-style: italic; }
+
+/* Keywords */
+.highlight-ipynb .k, .highlight-ipynb .kc, .highlight-ipynb .kd,
+.highlight-ipynb .kn, .highlight-ipynb .kp, .highlight-ipynb .kr,
+.highlight-ipynb .kt { color: var(--md-code-hl-keyword-color, #3f6ec6) !important; }
+
+/* Strings */
+.highlight-ipynb .s, .highlight-ipynb .s1, .highlight-ipynb .s2,
+.highlight-ipynb .sa, .highlight-ipynb .sb, .highlight-ipynb .sc,
+.highlight-ipynb .sd, .highlight-ipynb .se, .highlight-ipynb .sh,
+.highlight-ipynb .si, .highlight-ipynb .sl, .highlight-ipynb .sr,
+.highlight-ipynb .ss, .highlight-ipynb .sx { color: var(--md-code-hl-string-color, #1c7d4d) !important; }
+
+/* Numbers */
+.highlight-ipynb .m, .highlight-ipynb .mb, .highlight-ipynb .mf,
+.highlight-ipynb .mh, .highlight-ipynb .mi, .highlight-ipynb .mo,
+.highlight-ipynb .il { color: var(--md-code-hl-number-color, #d52a2a) !important; }
+
+/* Functions */
+.highlight-ipynb .nf, .highlight-ipynb .fm { color: var(--md-code-hl-function-color, #a846b9) !important; }
+
+/* Constants/Builtins */
+.highlight-ipynb .nb, .highlight-ipynb .bp,
+.highlight-ipynb .kc { color: var(--md-code-hl-constant-color, #6e59d9) !important; }
+
+/* Special */
+.highlight-ipynb .nc, .highlight-ipynb .ne, .highlight-ipynb .nd,
+.highlight-ipynb .ni { color: var(--md-code-hl-special-color, #db1457) !important; }
+
+/* Names/variables */
+.highlight-ipynb .n, .highlight-ipynb .nn, .highlight-ipynb .na,
+.highlight-ipynb .nv, .highlight-ipynb .no { color: var(--md-code-hl-name-color, var(--md-code-fg-color)) !important; }
+
+/* Operators */
+.highlight-ipynb .o, .highlight-ipynb .ow { color: var(--md-code-hl-operator-color, var(--md-default-fg-color--light)) !important; }
+
+/* Punctuation */
+.highlight-ipynb .p, .highlight-ipynb .pm { color: var(--md-code-hl-punctuation-color, var(--md-default-fg-color--light)) !important; }
+
+/* Errors */
+.highlight-ipynb .err { color: var(--md-code-hl-special-color, #db1457) !important; }
+
+/* Notebook container */
+.jupyter-wrapper {
+ margin: 1rem 0;
+}
+
+/* Code cell styling - clean and modern */
+.jupyter-wrapper .jp-CodeCell {
+ border-radius: 0.4rem;
+ margin: 0.5rem 0;
+ border: 1px solid var(--md-default-fg-color--lightest);
+ overflow: hidden;
+}
+
+/* Input cells (code) */
+.jupyter-wrapper .jp-CodeCell .jp-InputArea {
+ background-color: var(--md-code-bg-color);
+ border: none;
+}
+
+.jupyter-wrapper .jp-InputArea pre {
+ margin: 0;
+ padding: 0.6rem 0.8rem;
+ font-size: 0.55rem;
+ line-height: 1.4;
+}
+
+/* Output cells */
+.jupyter-wrapper .jp-OutputArea pre {
+ font-size: 0.55rem;
+ margin: 0;
+}
+
+/* Cell prompts (In [1]:, Out [1]:) - hide for cleaner look */
+.jupyter-wrapper .jp-InputPrompt,
+.jupyter-wrapper .jp-OutputPrompt {
+ display: none;
+}
+
+/* Markdown cells - blend with page, no background */
+.jupyter-wrapper .jp-MarkdownCell {
+ background: transparent;
+ border: none;
+ margin: 0;
+}
+
+.jupyter-wrapper .jp-RenderedMarkdown {
+ padding: 0.5rem 0;
+}
+
+/* Tables in notebooks */
+.jupyter-wrapper table {
+ font-size: 0.55rem;
+ margin: 0;
+ border-collapse: collapse;
+}
+
+.jupyter-wrapper table th,
+.jupyter-wrapper table td {
+ padding: 0.3rem 0.6rem;
+ border: 1px solid var(--md-default-fg-color--lightest);
+}
+
+.jupyter-wrapper table th {
+ background-color: var(--md-default-fg-color--lightest);
+ font-weight: 600;
+}
+
+/* Images and plots */
+.jupyter-wrapper .jp-RenderedImage img,
+.jupyter-wrapper .jp-RenderedImage svg {
+ max-width: 100%;
+ height: auto;
+ display: block;
+ margin: 0 auto;
+}
+
+/* Dark mode adjustments */
+[data-md-color-scheme="slate"] .jupyter-wrapper .jp-CodeCell {
+ border-color: rgba(255, 255, 255, 0.1);
+}
+
+[data-md-color-scheme="slate"] .jupyter-wrapper table th {
+ background-color: rgba(255, 255, 255, 0.05);
+}
+
+[data-md-color-scheme="slate"] .jupyter-wrapper table th,
+[data-md-color-scheme="slate"] .jupyter-wrapper table td {
+ border-color: rgba(255, 255, 255, 0.1);
+}
+
+/* Plotly charts - ensure proper sizing */
+.jupyter-wrapper .plotly-graph-div {
+ margin: 0 auto;
+}
+
+/* Error output styling */
+.jupyter-wrapper .jp-RenderedText[data-mime-type="application/vnd.jupyter.stderr"] {
+ background-color: rgba(255, 0, 0, 0.05);
+ color: #c7254e;
+ padding: 0.5rem;
+ border-radius: 0.3rem;
+}
+
+[data-md-color-scheme="slate"] .jupyter-wrapper .jp-RenderedText[data-mime-type="application/vnd.jupyter.stderr"] {
+ background-color: rgba(255, 0, 0, 0.15);
+ color: #ff6b6b;
+}
+
/* ============================================================================
Footer Alignment Fix
========================================================================= */
diff --git a/docs/user-guide/building-models/choosing-components.md b/docs/user-guide/building-models/choosing-components.md
new file mode 100644
index 000000000..5f07e82dc
--- /dev/null
+++ b/docs/user-guide/building-models/choosing-components.md
@@ -0,0 +1,381 @@
+# Choosing Components
+
+This guide helps you select the right flixOpt component for your modeling needs.
+
+## Decision Tree
+
+```mermaid
+graph TD
+ A[What does this element do?] --> B{Brings energy INTO system?}
+ B -->|Yes| C[Source]
+ B -->|No| D{Takes energy OUT of system?}
+ D -->|Yes| E[Sink]
+ D -->|No| F{Converts energy type?}
+ F -->|Yes| G[LinearConverter]
+ F -->|No| H{Stores energy?}
+ H -->|Yes| I[Storage]
+ H -->|No| J{Transports between locations?}
+ J -->|Yes| K[Transmission]
+ J -->|No| L[Consider custom constraints]
+```
+
+## Component Comparison
+
+| Component | Purpose | Inputs | Outputs | Key Parameters |
+|-----------|---------|--------|---------|----------------|
+| **Source** | External supply | None | 1+ flows | `effects_per_flow_hour` |
+| **Sink** | Demand/export | 1+ flows | None | `fixed_relative_profile` |
+| **SourceAndSink** | Bidirectional exchange | 1+ flows | 1+ flows | Both input and output |
+| **LinearConverter** | Transform energy | 1+ flows | 1+ flows | `conversion_factors` |
+| **Storage** | Time-shift energy | charge flow | discharge flow | `capacity_in_flow_hours` |
+| **Transmission** | Transport energy | in1, in2 | out1, out2 | `relative_losses` |
+
+## Detailed Component Guide
+
+### Source
+
+**Use when:** Purchasing or importing energy/material from outside your system boundary.
+
+```python
+fx.Source(
+ 'GridElectricity',
+ outputs=[fx.Flow('Elec', bus='Electricity', size=1000, effects_per_flow_hour=0.25)]
+)
+```
+
+**Typical applications:**
+- Grid electricity connection
+- Natural gas supply
+- Raw material supply
+- Fuel delivery
+
+**Key parameters:**
+
+| Parameter | Purpose |
+|-----------|---------|
+| `outputs` | List of flows leaving this source |
+| `effects_per_flow_hour` | Cost/emissions per unit |
+| `invest_parameters` | For optimizing connection capacity |
+
+---
+
+### Sink
+
+**Use when:** Energy/material leaves your system (demand, export, waste).
+
+```python
+# Fixed demand (must be met)
+fx.Sink(
+ 'Building',
+ inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand)]
+)
+
+# Optional export (can sell if profitable)
+fx.Sink(
+ 'Export',
+ inputs=[fx.Flow('Elec', bus='Electricity', size=100, effects_per_flow_hour=-0.15)]
+)
+```
+
+**Typical applications:**
+- Heat/electricity demand
+- Product output
+- Grid export
+- Waste disposal
+
+**Key parameters:**
+
+| Parameter | Purpose |
+|-----------|---------|
+| `inputs` | List of flows entering this sink |
+| `fixed_relative_profile` | Demand profile (on flow) |
+| `effects_per_flow_hour` | Negative = revenue |
+
+---
+
+### SourceAndSink
+
+**Use when:** Bidirectional exchange at a single point (buy AND sell from same connection).
+
+```python
+fx.SourceAndSink(
+ 'GridConnection',
+ inputs=[fx.Flow('import', bus='Electricity', size=500, effects_per_flow_hour=0.25)],
+ outputs=[fx.Flow('export', bus='Electricity', size=500, effects_per_flow_hour=-0.15)],
+ prevent_simultaneous_flow_rates=True, # Can't buy and sell at same time
+)
+```
+
+**Typical applications:**
+- Electricity grid (buy/sell)
+- Gas grid with injection capability
+- Material exchange with warehouse
+
+---
+
+### LinearConverter
+
+**Use when:** Transforming one energy type to another with a linear relationship.
+
+```python
+# Single input, single output
+fx.LinearConverter(
+ 'Boiler',
+ inputs=[fx.Flow('Gas', bus='Gas', size=500)],
+ outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
+)
+
+# Multiple outputs (CHP)
+fx.LinearConverter(
+ 'CHP',
+ inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ outputs=[
+ fx.Flow('Elec', bus='Electricity', size=100),
+ fx.Flow('Heat', bus='Heat', size=150),
+ ],
+ conversion_factors=[{'Gas': 1, 'Elec': 0.35, 'Heat': 0.50}],
+)
+
+# Multiple inputs
+fx.LinearConverter(
+ 'CoFiringBoiler',
+ inputs=[
+ fx.Flow('Gas', bus='Gas', size=200),
+ fx.Flow('Biomass', bus='Biomass', size=100),
+ ],
+ outputs=[fx.Flow('Heat', bus='Heat', size=270)],
+ conversion_factors=[{'Gas': 1, 'Biomass': 1, 'Heat': 0.9}],
+)
+```
+
+**Typical applications:**
+- Boilers (fuel → heat)
+- Heat pumps (electricity → heat)
+- Chillers (electricity → cooling)
+- Turbines (fuel → electricity)
+- CHPs (fuel → electricity + heat)
+- Electrolyzers (electricity → hydrogen)
+
+**Key parameters:**
+
+| Parameter | Purpose |
+|-----------|---------|
+| `conversion_factors` | Efficiency relationship |
+| `piecewise_conversion` | Non-linear efficiency curve |
+| `status_parameters` | On/off behavior, startup costs |
+
+#### Pre-built Converters
+
+flixOpt includes ready-to-use converters in `flixopt.linear_converters`:
+
+| Class | Description | Key Parameters |
+|-------|-------------|----------------|
+| `Boiler` | Fuel → Heat | `thermal_efficiency` |
+| `HeatPump` | Electricity → Heat | `cop` |
+| `HeatPumpWithSource` | Elec + Ambient → Heat | `cop`, source flow |
+| `CHP` | Fuel → Elec + Heat | `electrical_efficiency`, `thermal_efficiency` |
+| `Chiller` | Electricity → Cooling | `cop` |
+
+```python
+from flixopt.linear_converters import Boiler, HeatPump
+
+boiler = Boiler(
+ 'GasBoiler',
+ thermal_efficiency=0.92,
+ fuel_flow=fx.Flow('gas', bus='Gas', size=500, effects_per_flow_hour=0.05),
+ thermal_flow=fx.Flow('heat', bus='Heat', size=460),
+)
+```
+
+---
+
+### Storage
+
+**Use when:** Storing energy for later use.
+
+```python
+fx.Storage(
+ 'Battery',
+ charging=fx.Flow('charge', bus='Electricity', size=100),
+ discharging=fx.Flow('discharge', bus='Electricity', size=100),
+ capacity_in_flow_hours=4, # 4 hours at full rate = 400 kWh
+ eta_charge=0.95,
+ eta_discharge=0.95,
+ relative_loss_per_hour=0.001,
+ initial_charge_state=0.5,
+)
+```
+
+**Typical applications:**
+- Batteries (electrical)
+- Thermal tanks (heat/cold)
+- Hydrogen storage
+- Material buffers
+
+**Key parameters:**
+
+| Parameter | Purpose |
+|-----------|---------|
+| `charging`, `discharging` | Flows for in/out |
+| `capacity_in_flow_hours` | Size (or use `InvestParameters`) |
+| `eta_charge`, `eta_discharge` | Round-trip efficiency |
+| `relative_loss_per_hour` | Standing losses |
+| `initial_charge_state` | Starting level (0-1 or `'equals_final'`) |
+
+---
+
+### Transmission
+
+**Use when:** Transporting energy between different locations.
+
+```python
+# Unidirectional
+fx.Transmission(
+ 'HeatPipe',
+ in1=fx.Flow('from_A', bus='Heat_A', size=200),
+ out1=fx.Flow('to_B', bus='Heat_B', size=200),
+ relative_losses=0.05,
+)
+
+# Bidirectional
+fx.Transmission(
+ 'PowerLine',
+ in1=fx.Flow('A_to_B', bus='Elec_A', size=100),
+ out1=fx.Flow('at_B', bus='Elec_B', size=100),
+ in2=fx.Flow('B_to_A', bus='Elec_B', size=100),
+ out2=fx.Flow('at_A', bus='Elec_A', size=100),
+ relative_losses=0.03,
+ prevent_simultaneous_flows_in_both_directions=True,
+)
+```
+
+**Typical applications:**
+- District heating pipes
+- Power transmission lines
+- Gas pipelines
+- Conveyor belts
+
+**Key parameters:**
+
+| Parameter | Purpose |
+|-----------|---------|
+| `in1`, `out1` | Primary direction flows |
+| `in2`, `out2` | Reverse direction (optional) |
+| `relative_losses` | Proportional losses |
+| `absolute_losses` | Fixed losses when active |
+| `balanced` | Same capacity both ways |
+
+## Feature Combinations
+
+### Investment Optimization
+
+Add `InvestParameters` to flows to let the optimizer choose sizes:
+
+```python
+fx.Flow(
+ 'Heat',
+ bus='Heat',
+ invest_parameters=fx.InvestParameters(
+ effects_of_investment_per_size={'costs': 100}, # €/kW
+ minimum_size=0,
+ maximum_size=1000,
+ )
+)
+```
+
+Works with: Source, Sink, LinearConverter, Storage, Transmission
+
+### Operational Constraints
+
+Add `StatusParameters` to flows for on/off behavior:
+
+```python
+fx.Flow(
+ 'Heat',
+ bus='Heat',
+ size=500,
+ status_parameters=fx.StatusParameters(
+ effects_per_switch_on={'costs': 50}, # Startup cost
+ on_hours_min=2, # Minimum runtime
+ off_hours_min=1, # Minimum downtime
+ )
+)
+```
+
+Works with: All components with flows
+
+### Non-Linear Efficiency
+
+Use `PiecewiseConversion` for load-dependent efficiency:
+
+```python
+fx.LinearConverter(
+ 'GasEngine',
+ inputs=[fx.Flow('Fuel', bus='Gas')],
+ outputs=[fx.Flow('Elec', bus='Electricity')],
+ piecewise_conversion=fx.PiecewiseConversion({
+ 'Fuel': fx.Piecewise([fx.Piece(100, 200), fx.Piece(200, 300)]),
+ 'Elec': fx.Piecewise([fx.Piece(35, 80), fx.Piece(80, 110)]),
+ }),
+)
+```
+
+Works with: LinearConverter
+
+## Common Modeling Patterns
+
+### Pattern: Parallel Redundant Units
+
+Model N identical units that can operate independently:
+
+```python
+for i in range(3):
+ flow_system.add_elements(
+ fx.LinearConverter(
+ f'Boiler_{i}',
+ inputs=[fx.Flow('Gas', bus='Gas', size=100)],
+ outputs=[fx.Flow('Heat', bus='Heat', size=90)],
+ conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
+ )
+ )
+```
+
+### Pattern: Heat Recovery
+
+Model waste heat recovery from one process to another:
+
+```python
+# Process that generates waste heat
+process = fx.LinearConverter(
+ 'Process',
+ inputs=[fx.Flow('Elec', bus='Electricity', size=100)],
+ outputs=[
+ fx.Flow('Product', bus='Products', size=80),
+ fx.Flow('WasteHeat', bus='Heat', size=20), # Recovered heat
+ ],
+ conversion_factors=[{'Elec': 1, 'Product': 0.8, 'WasteHeat': 0.2}],
+)
+```
+
+### Pattern: Fuel Switching
+
+Model a component that can use multiple fuels:
+
+```python
+flex_boiler = fx.LinearConverter(
+ 'FlexBoiler',
+ inputs=[
+ fx.Flow('Gas', bus='Gas', size=200, effects_per_flow_hour=0.05),
+ fx.Flow('Oil', bus='Oil', size=200, effects_per_flow_hour=0.08),
+ ],
+ outputs=[fx.Flow('Heat', bus='Heat', size=180)],
+ conversion_factors=[{'Gas': 1, 'Oil': 1, 'Heat': 0.9}],
+)
+```
+
+## Next Steps
+
+- **[Building Models](index.md)** — Step-by-step modeling guide
+- **[Examples](../../notebooks/index.md)** — Working code examples
+- **[Mathematical Notation](../mathematical-notation/index.md)** — Constraint formulations
diff --git a/docs/user-guide/building-models/index.md b/docs/user-guide/building-models/index.md
index 27808ea56..11ff4081d 100644
--- a/docs/user-guide/building-models/index.md
+++ b/docs/user-guide/building-models/index.md
@@ -1,20 +1,378 @@
# Building Models
-!!! note "Under Development"
- This section is being expanded with detailed tutorials.
+This guide walks you through constructing FlowSystem models step by step. By the end, you'll understand how to translate real-world energy systems into flixOpt models.
-Learn how to construct FlowSystem models step by step:
+## Overview
-- Defining time horizons and dimensions
-- Creating buses and flows
-- Adding components (Sources, Sinks, Converters, Storage)
-- Configuring effects and objectives
-- Using advanced features (Investment, On/Off, Piecewise)
+Building a model follows a consistent pattern:
-## Getting Started
+```python
+import pandas as pd
+import flixopt as fx
-For now, see:
+# 1. Define time horizon
+timesteps = pd.date_range('2024-01-01', periods=24, freq='h')
-- **[Core Concepts](../core-concepts.md)** - Understand the fundamental building blocks
-- **[Examples](../../examples/index.md)** - Working code you can learn from
-- **[Mathematical Notation](../mathematical-notation/index.md)** - Detailed specifications of each element
+# 2. Create the FlowSystem
+flow_system = fx.FlowSystem(timesteps)
+
+# 3. Add elements
+flow_system.add_elements(
+ # Buses, Components, Effects...
+)
+
+# 4. Optimize
+flow_system.optimize(fx.solvers.HighsSolver())
+```
+
+## Step 1: Define Your Time Horizon
+
+Every FlowSystem needs a time definition. Use pandas DatetimeIndex:
+
+```python
+# Hourly data for one week
+timesteps = pd.date_range('2024-01-01', periods=168, freq='h')
+
+# 15-minute intervals for one day
+timesteps = pd.date_range('2024-01-01', periods=96, freq='15min')
+
+# Custom timestamps (e.g., from your data)
+timesteps = pd.DatetimeIndex(your_data.index)
+```
+
+!!! tip "Time Resolution"
+ Higher resolution (more timesteps) gives more accurate results but increases computation time. Start with hourly data and refine if needed.
+
+## Step 2: Create Buses
+
+Buses are connection points where energy flows meet. Every bus enforces a balance: inputs = outputs.
+
+```python
+# Basic buses
+heat_bus = fx.Bus('Heat')
+electricity_bus = fx.Bus('Electricity')
+
+# With carrier (enables automatic coloring in plots)
+heat_bus = fx.Bus('Heat', carrier='heat')
+gas_bus = fx.Bus('Gas', carrier='gas')
+```
+
+### When to Create a Bus
+
+| Scenario | Bus Needed? |
+|----------|-------------|
+| Multiple components share a resource | Yes |
+| Need to track balance at a location | Yes |
+| Component has external input (grid, fuel) | Often no - use `bus=None` |
+| Component transforms A → B | Yes, one bus per carrier |
+
+### Bus Balance Modes
+
+By default, buses require exact balance. For systems with unavoidable imbalances:
+
+```python
+# Allow small imbalances with penalty
+heat_bus = fx.Bus(
+ 'Heat',
+ imbalance_penalty_per_flow_hour=1000, # High cost discourages imbalance
+)
+```
+
+## Step 3: Add Components
+
+Components are the equipment in your system. Choose based on function:
+
+### Sources — External Inputs
+
+Use for **purchasing** energy or materials from outside:
+
+```python
+# Grid electricity with time-varying price
+grid = fx.Source(
+ 'Grid',
+ outputs=[fx.Flow('Elec', bus='Electricity', size=1000, effects_per_flow_hour=price_profile)]
+)
+
+# Natural gas with fixed price
+gas_supply = fx.Source(
+ 'GasSupply',
+ outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]
+)
+```
+
+### Sinks — Demands
+
+Use for **consuming** energy or materials (demands, exports):
+
+```python
+# Heat demand (must be met exactly)
+building = fx.Sink(
+ 'Building',
+ inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand_profile)]
+)
+
+# Optional export (can sell but not required)
+export = fx.Sink(
+ 'Export',
+ inputs=[fx.Flow('Elec', bus='Electricity', size=100, effects_per_flow_hour=-0.15)] # Negative = revenue
+)
+```
+
+### LinearConverter — Transformations
+
+Use for **converting** one form of energy to another:
+
+```python
+# Gas boiler: Gas → Heat
+boiler = fx.LinearConverter(
+ 'Boiler',
+ inputs=[fx.Flow('Gas', bus='Gas', size=500)],
+ outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ conversion_factors=[{'Gas': 1, 'Heat': 0.9}], # 90% efficiency
+)
+
+# Heat pump: Electricity → Heat
+heat_pump = fx.LinearConverter(
+ 'HeatPump',
+ inputs=[fx.Flow('Elec', bus='Electricity', size=100)],
+ outputs=[fx.Flow('Heat', bus='Heat', size=350)],
+ conversion_factors=[{'Elec': 1, 'Heat': 3.5}], # COP = 3.5
+)
+
+# CHP: Gas → Electricity + Heat (multiple outputs)
+chp = fx.LinearConverter(
+ 'CHP',
+ inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ outputs=[
+ fx.Flow('Elec', bus='Electricity', size=100),
+ fx.Flow('Heat', bus='Heat', size=150),
+ ],
+ conversion_factors=[{'Gas': 1, 'Elec': 0.35, 'Heat': 0.50}],
+)
+```
+
+### Storage — Time-Shifting
+
+Use for **storing** energy or materials:
+
+```python
+# Thermal storage
+tank = fx.Storage(
+ 'ThermalTank',
+ charging=fx.Flow('charge', bus='Heat', size=200),
+ discharging=fx.Flow('discharge', bus='Heat', size=200),
+ capacity_in_flow_hours=10, # 10 hours at full charge/discharge rate
+ eta_charge=0.95,
+ eta_discharge=0.95,
+ relative_loss_per_hour=0.01, # 1% loss per hour
+ initial_charge_state=0.5, # Start 50% full
+)
+```
+
+### Transmission — Transport Between Locations
+
+Use for **connecting** different locations:
+
+```python
+# District heating pipe
+pipe = fx.Transmission(
+ 'HeatPipe',
+ in1=fx.Flow('from_A', bus='Heat_A', size=200),
+ out1=fx.Flow('to_B', bus='Heat_B', size=200),
+ relative_losses=0.05, # 5% loss
+)
+```
+
+## Step 4: Configure Effects
+
+Effects track metrics like costs, emissions, or energy use. One must be the objective:
+
+```python
+# Operating costs (minimize this)
+costs = fx.Effect(
+ 'costs',
+ '€',
+ 'Operating Costs',
+ is_standard=True, # Included by default in all effect allocations
+ is_objective=True, # This is what we minimize
+)
+
+# CO2 emissions (track or constrain)
+co2 = fx.Effect(
+ 'CO2',
+ 'kg',
+ 'CO2 Emissions',
+ maximum_temporal=1000, # Constraint: max 1000 kg total
+)
+```
+
+### Linking Effects to Flows
+
+Effects are typically assigned per flow hour:
+
+```python
+# Gas costs 0.05 €/kWh
+fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})
+
+# Shorthand when only one effect (the standard one)
+fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)
+```
+
+## Step 5: Add Everything to FlowSystem
+
+Use `add_elements()` with all elements:
+
+```python
+flow_system = fx.FlowSystem(timesteps)
+
+flow_system.add_elements(
+ # Buses
+ fx.Bus('Heat', carrier='heat'),
+ fx.Bus('Gas', carrier='gas'),
+
+ # Effects
+ fx.Effect('costs', '€', is_standard=True, is_objective=True),
+
+ # Components
+ fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]),
+ fx.LinearConverter(
+ 'Boiler',
+ inputs=[fx.Flow('Gas', bus='Gas', size=500)],
+ outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
+ ),
+ fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand)]),
+)
+```
+
+## Common Patterns
+
+### Pattern 1: Simple Conversion System
+
+Gas → Boiler → Heat
+
+```python
+flow_system.add_elements(
+ fx.Bus('Heat'),
+ fx.Effect('costs', '€', is_standard=True, is_objective=True),
+ fx.Source('Gas', outputs=[fx.Flow('gas', bus=None, size=500, effects_per_flow_hour=0.05)]),
+ fx.LinearConverter(
+ 'Boiler',
+ inputs=[fx.Flow('gas', bus=None, size=500)], # Inline source
+ outputs=[fx.Flow('heat', bus='Heat', size=450)],
+ conversion_factors=[{'gas': 1, 'heat': 0.9}],
+ ),
+ fx.Sink('Demand', inputs=[fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=demand)]),
+)
+```
+
+### Pattern 2: Multiple Generation Options
+
+Choose between boiler, heat pump, or both:
+
+```python
+flow_system.add_elements(
+ fx.Bus('Heat'),
+ fx.Effect('costs', '€', is_standard=True, is_objective=True),
+
+ # Option 1: Gas boiler (cheap gas, moderate efficiency)
+ fx.LinearConverter('Boiler', ...),
+
+ # Option 2: Heat pump (expensive electricity, high efficiency)
+ fx.LinearConverter('HeatPump', ...),
+
+ # Demand
+ fx.Sink('Building', ...),
+)
+```
+
+The optimizer chooses the cheapest mix at each timestep.
+
+### Pattern 3: System with Storage
+
+Add flexibility through storage:
+
+```python
+flow_system.add_elements(
+ fx.Bus('Heat'),
+ fx.Effect('costs', '€', is_standard=True, is_objective=True),
+
+ # Generation
+ fx.LinearConverter('Boiler', ...),
+
+ # Storage (can shift load in time)
+ fx.Storage('Tank', ...),
+
+ # Demand
+ fx.Sink('Building', ...),
+)
+```
+
+## Component Selection Guide
+
+| I need to... | Use this component |
+|-------------|-------------------|
+| Buy/import energy | `Source` |
+| Sell/export energy | `Sink` with negative effects |
+| Meet a demand | `Sink` with `fixed_relative_profile` |
+| Convert energy type | `LinearConverter` |
+| Store energy | `Storage` |
+| Transport between sites | `Transmission` |
+| Model combined heat & power | `LinearConverter` with multiple outputs |
+
+For detailed component selection, see [Choosing Components](choosing-components.md).
+
+## Input Data Types
+
+flixOpt accepts various data formats for parameters:
+
+| Input Type | Example | Use Case |
+|-----------|---------|----------|
+| Scalar | `0.05` | Constant value |
+| NumPy array | `np.array([...])` | Time-varying, matches timesteps |
+| Pandas Series | `pd.Series([...], index=timesteps)` | Time-varying with labels |
+| TimeSeriesData | `fx.TimeSeriesData(...)` | Advanced: aggregation metadata |
+
+```python
+# All equivalent for a constant efficiency
+efficiency = 0.9
+efficiency = np.full(len(timesteps), 0.9)
+efficiency = pd.Series(0.9, index=timesteps)
+
+# Time-varying price
+price = np.where(hour_of_day >= 8, 0.25, 0.10)
+```
+
+## Debugging Tips
+
+### Check Bus Balance
+
+If optimization fails with infeasibility:
+
+1. Ensure demands can be met by available generation
+2. Check that flow sizes are large enough
+3. Add `imbalance_penalty_per_flow_hour` to identify problematic buses
+
+### Verify Element Registration
+
+```python
+# List all elements
+print(flow_system.components.keys())
+print(flow_system.buses.keys())
+print(flow_system.effects.keys())
+```
+
+### Inspect Model Before Solving
+
+```python
+flow_system.build_model()
+print(f"Variables: {len(flow_system.model.variables)}")
+print(f"Constraints: {len(flow_system.model.constraints)}")
+```
+
+## Next Steps
+
+- **[Choosing Components](choosing-components.md)** — Decision tree for component selection
+- **[Core Concepts](../core-concepts.md)** — Deeper understanding of fundamentals
+- **[Examples](../../notebooks/index.md)** — Working code examples
+- **[Mathematical Notation](../mathematical-notation/index.md)** — Detailed constraint formulations
diff --git a/docs/user-guide/core-concepts.md b/docs/user-guide/core-concepts.md
index 3bccb554c..47e4a882c 100644
--- a/docs/user-guide/core-concepts.md
+++ b/docs/user-guide/core-concepts.md
@@ -31,6 +31,17 @@ $$\sum inputs = \sum outputs$$
This balance constraint is what makes your model physically meaningful — energy can't appear or disappear.
+### Carriers
+
+Buses can be assigned a **carrier** — a type of energy or material (electricity, heat, gas, etc.). Carriers enable automatic coloring in plots and help organize your system semantically:
+
+```python
+heat_bus = fx.Bus('HeatNetwork', carrier='heat') # Uses default heat color
+elec_bus = fx.Bus('Grid', carrier='electricity')
+```
+
+See [Color Management](results-plotting.md#color-management) for details.
+
## Flows: What Moves Between Elements
A [`Flow`][flixopt.elements.Flow] represents the movement of energy or material. Every flow connects a component to a bus, with a defined direction.
@@ -127,23 +138,29 @@ Define your system structure, parameters, and time series data.
### 2. Run the Optimization
-Create an [`Optimization`][flixopt.optimization.Optimization] and solve it:
+Optimize your FlowSystem with a solver:
```python
-optimization = fx.Optimization('my_model', flow_system)
-results = optimization.solve(fx.solvers.HighsSolver())
+flow_system.optimize(fx.solvers.HighsSolver())
```
### 3. Analyze Results
-The [`Results`][flixopt.results.Results] object contains all solution data:
+Access solution data directly from the FlowSystem:
```python
-# Access component results
-boiler_output = results['Boiler'].node_balance()
+# Access component solutions
+boiler = flow_system.components['Boiler']
+print(boiler.solution)
# Get total costs
-total_costs = results.solution['Costs']
+total_costs = flow_system.solution['costs|total']
+
+# Use statistics for aggregated data
+print(flow_system.statistics.flow_hours)
+
+# Plot results
+flow_system.statistics.plot.balance('HeatBus')
```
@@ -163,6 +180,55 @@ total_costs = results.solution['Costs']
| **Effect** | Metric to track/optimize | Costs, emissions, energy use |
| **FlowSystem** | Complete model | Your entire system |
+## FlowSystem API at a Glance
+
+The `FlowSystem` is the central object in flixOpt. After building your model, all operations are accessed through the FlowSystem and its **accessors**:
+
+```python
+flow_system = fx.FlowSystem(timesteps)
+flow_system.add_elements(...)
+
+# Optimize
+flow_system.optimize(solver)
+
+# Access results
+flow_system.solution # Raw xarray Dataset
+flow_system.statistics.flow_hours # Aggregated statistics
+flow_system.statistics.plot.balance() # Visualization
+
+# Transform (returns new FlowSystem)
+fs_subset = flow_system.transform.sel(time=slice(...))
+
+# Inspect structure
+flow_system.topology.plot()
+```
+
+### Accessor Overview
+
+| Accessor | Purpose | Key Methods |
+|----------|---------|-------------|
+| **`solution`** | Raw optimization results | xarray Dataset with all variables |
+| **`statistics`** | Aggregated data | `flow_rates`, `flow_hours`, `sizes`, `charge_states`, `total_effects` |
+| **`statistics.plot`** | Visualization | `balance()`, `heatmap()`, `sankey()`, `effects()`, `storage()` |
+| **`transform`** | Create modified copies | `sel()`, `isel()`, `resample()`, `cluster()` |
+| **`topology`** | Network structure | `plot()`, `start_app()`, `infos()` |
+
+### Element Access
+
+Access elements directly from the FlowSystem:
+
+```python
+# Access by label
+flow_system.components['Boiler'] # Get a component
+flow_system.buses['Heat'] # Get a bus
+flow_system.flows['Boiler(Q_th)'] # Get a flow
+flow_system.effects['costs'] # Get an effect
+
+# Element-specific solutions
+flow_system.components['Boiler'].solution
+flow_system.flows['Boiler(Q_th)'].solution
+```
+
## Beyond Energy Systems
While our example used a heating system, flixOpt works for any flow-based optimization:
@@ -177,7 +243,7 @@ While our example used a heating system, flixOpt works for any flow-based optimi
## Next Steps
- **[Building Models](building-models/index.md)** — Step-by-step guide to constructing models
-- **[Examples](../examples/index.md)** — Working code for common scenarios
+- **[Examples](../notebooks/index.md)** — Working code for common scenarios
- **[Mathematical Notation](mathematical-notation/index.md)** — Detailed constraint formulations
## Advanced: Extending with linopy
@@ -185,12 +251,17 @@ While our example used a heating system, flixOpt works for any flow-based optimi
flixOpt is built on [linopy](https://github.com/PyPSA/linopy). You can access and extend the underlying optimization model for custom constraints:
```python
-# Access the linopy model after building
-optimization.do_modeling()
-model = optimization.model
+# Build the model (without solving)
+flow_system.build_model()
+
+# Access the linopy model
+model = flow_system.model
# Add custom constraints using linopy API
model.add_constraints(...)
+
+# Then solve
+flow_system.solve(fx.solvers.HighsSolver())
```
This allows advanced users to add domain-specific constraints while keeping flixOpt's convenience for standard modeling.
diff --git a/docs/user-guide/index.md b/docs/user-guide/index.md
index bfb288ea4..d079f645f 100644
--- a/docs/user-guide/index.md
+++ b/docs/user-guide/index.md
@@ -59,14 +59,14 @@ This guide follows a sequential learning path:
### Getting Started
- [Quick Start](../home/quick-start.md) - Build your first model in 5 minutes
-- [Minimal Example](../examples/00-Minimal Example.md) - Simplest possible model
+- [Minimal Example](../notebooks/01-quickstart.ipynb) - Simplest possible model
- [Core Concepts](core-concepts.md) - Understand the fundamentals
### Reference
- [Mathematical Notation](mathematical-notation/index.md) - Detailed specifications
- [API Reference](../api-reference/index.md) - Complete class documentation
-- [Examples](../examples/index.md) - Working code to learn from
+- [Examples](../notebooks/index.md) - Working code to learn from
### Help
diff --git a/docs/user-guide/mathematical-notation/elements/Bus.md b/docs/user-guide/mathematical-notation/elements/Bus.md
index 464381fe8..ca089bfec 100644
--- a/docs/user-guide/mathematical-notation/elements/Bus.md
+++ b/docs/user-guide/mathematical-notation/elements/Bus.md
@@ -2,6 +2,29 @@
A Bus is where flows meet and must balance — inputs equal outputs at every timestep.
+## Carriers
+
+Buses can optionally be assigned a **carrier** — a type of energy or material (e.g., electricity, heat, gas). Carriers enable:
+
+- **Automatic coloring** in plots based on energy type
+- **Unit tracking** for better result visualization
+- **Semantic grouping** of buses by type
+
+```python
+# Assign a carrier by name (uses CONFIG.Carriers defaults)
+heat_bus = fx.Bus('HeatNetwork', carrier='heat')
+elec_bus = fx.Bus('Grid', carrier='electricity')
+
+# Or register custom carriers on the FlowSystem
+biogas = fx.Carrier('biogas', color='#228B22', unit='kW', description='Biogas fuel')
+flow_system.add_carrier(biogas)
+gas_bus = fx.Bus('BiogasNetwork', carrier='biogas')
+```
+
+See [Color Management](../../../user-guide/results-plotting.md#color-management) for more on how carriers affect visualization.
+
+---
+
## Basic: Balance Equation
$$
diff --git a/docs/user-guide/migration-guide-v5.md b/docs/user-guide/migration-guide-v5.md
new file mode 100644
index 000000000..856571a61
--- /dev/null
+++ b/docs/user-guide/migration-guide-v5.md
@@ -0,0 +1,428 @@
+# Migration Guide: v4.x → v5.0.0
+
+!!! tip "Quick Start"
+ ```bash
+ pip install --upgrade flixopt
+ ```
+ The new API is simpler and more intuitive. Review this guide to update your code.
+
+---
+
+## Overview
+
+v5.0.0 introduces a streamlined API for optimization and results access. The key changes are:
+
+| Aspect | Old API (v4.x) | New API (v5.0.0) |
+|--------|----------------|------------------|
+| **Optimization** | `fx.Optimization` class | `FlowSystem.optimize()` method |
+| **Results access** | `element.submodel.variable.solution` | `flow_system.solution['variable_name']` |
+| **Results storage** | `Results` class | `xarray.Dataset` on `flow_system.solution` |
+
+---
+
+## 💥 Breaking Changes in v5.0.0
+
+### Optimization API
+
+The `Optimization` class is removed. Use `FlowSystem.optimize()` directly.
+
+=== "v4.x (Old)"
+ ```python
+ import flixopt as fx
+
+ # Create flow system
+ flow_system = fx.FlowSystem(timesteps)
+ flow_system.add_elements(...)
+
+ # Create Optimization object
+ optimization = fx.Optimization('my_model', flow_system)
+ optimization.do_modeling()
+ optimization.solve(fx.solvers.HighsSolver())
+
+ # Access results via Optimization object
+ results = optimization.results
+ costs = results.model['costs'].solution.item()
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ import flixopt as fx
+
+ # Create flow system
+ flow_system = fx.FlowSystem(timesteps)
+ flow_system.add_elements(...)
+
+ # Optimize directly on FlowSystem
+ flow_system.optimize(fx.solvers.HighsSolver())
+
+ # Access results via flow_system.solution
+ costs = flow_system.solution['costs'].item()
+ ```
+
+!!! note "Two-step alternative"
+ If you need access to the model before solving:
+ ```python
+ flow_system.build_model() # Creates flow_system.model
+ flow_system.solve(fx.solvers.HighsSolver())
+ ```
+
+---
+
+### Results Access
+
+Results are now accessed via `flow_system.solution`, which is an `xarray.Dataset`.
+
+#### Effect Values
+
+=== "v4.x (Old)"
+ ```python
+ # Via element reference
+ costs = flow_system.effects['costs']
+ total_costs = costs.submodel.total.solution.item()
+
+ # Or via results object
+ total_costs = optimization.results.model['costs'].solution.item()
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ # Direct access via solution Dataset
+ total_costs = flow_system.solution['costs'].item()
+
+ # Temporal and periodic components
+ temporal_costs = flow_system.solution['costs(temporal)'].values
+ periodic_costs = flow_system.solution['costs(periodic)'].values
+ per_timestep = flow_system.solution['costs(temporal)|per_timestep'].values
+ ```
+
+#### Flow Rates
+
+=== "v4.x (Old)"
+ ```python
+ boiler = flow_system.components['Boiler']
+ flow_rate = boiler.thermal_flow.submodel.flow_rate.solution.values
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ flow_rate = flow_system.solution['Boiler(Q_th)|flow_rate'].values
+ ```
+
+#### Investment Variables
+
+=== "v4.x (Old)"
+ ```python
+ boiler = flow_system.components['Boiler']
+ size = boiler.thermal_flow.submodel.investment.size.solution.item()
+ invested = boiler.thermal_flow.submodel.investment.invested.solution.item()
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ size = flow_system.solution['Boiler(Q_th)|size'].item()
+ invested = flow_system.solution['Boiler(Q_th)|invested'].item()
+ ```
+
+#### Status Variables
+
+=== "v4.x (Old)"
+ ```python
+ boiler = flow_system.components['Boiler']
+ status = boiler.thermal_flow.submodel.status.status.solution.values
+ startup = boiler.thermal_flow.submodel.status.startup.solution.values
+ shutdown = boiler.thermal_flow.submodel.status.shutdown.solution.values
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ status = flow_system.solution['Boiler(Q_th)|status'].values
+ startup = flow_system.solution['Boiler(Q_th)|startup'].values
+ shutdown = flow_system.solution['Boiler(Q_th)|shutdown'].values
+ ```
+
+#### Storage Variables
+
+=== "v4.x (Old)"
+ ```python
+ storage = flow_system.components['Speicher']
+ charge_state = storage.submodel.charge_state.solution.values
+ netto_discharge = storage.submodel.netto_discharge.solution.values
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ charge_state = flow_system.solution['Speicher|charge_state'].values
+ netto_discharge = flow_system.solution['Speicher|netto_discharge'].values
+ final_charge = flow_system.solution['Speicher|charge_state|final'].item()
+ ```
+
+---
+
+## Variable Naming Convention
+
+The new API uses a consistent naming pattern:
+
+```text
+ComponentLabel(FlowLabel)|variable_name
+```
+
+### Pattern Reference
+
+| Variable Type | Pattern | Example |
+|--------------|---------|---------|
+| **Flow rate** | `Component(Flow)\|flow_rate` | `Boiler(Q_th)\|flow_rate` |
+| **Size** | `Component(Flow)\|size` | `Boiler(Q_th)\|size` |
+| **Invested** | `Component(Flow)\|invested` | `Boiler(Q_th)\|invested` |
+| **Status** | `Component(Flow)\|status` | `Boiler(Q_th)\|status` |
+| **Startup** | `Component(Flow)\|startup` | `Boiler(Q_th)\|startup` |
+| **Shutdown** | `Component(Flow)\|shutdown` | `Boiler(Q_th)\|shutdown` |
+| **Inactive** | `Component(Flow)\|inactive` | `Boiler(Q_th)\|inactive` |
+| **Active hours** | `Component(Flow)\|active_hours` | `Boiler(Q_th)\|active_hours` |
+| **Total flow** | `Component(Flow)\|total_flow_hours` | `Boiler(Q_th)\|total_flow_hours` |
+| **Storage charge** | `Storage\|charge_state` | `Speicher\|charge_state` |
+| **Storage final** | `Storage\|charge_state\|final` | `Speicher\|charge_state\|final` |
+| **Netto discharge** | `Storage\|netto_discharge` | `Speicher\|netto_discharge` |
+
+### Effects Pattern
+
+| Variable Type | Pattern | Example |
+|--------------|---------|---------|
+| **Total** | `effect_label` | `costs` |
+| **Temporal** | `effect_label(temporal)` | `costs(temporal)` |
+| **Periodic** | `effect_label(periodic)` | `costs(periodic)` |
+| **Per timestep** | `effect_label(temporal)\|per_timestep` | `costs(temporal)\|per_timestep` |
+| **Contribution** | `Component(Flow)->effect(temporal)` | `Gastarif(Q_Gas)->costs(temporal)` |
+
+---
+
+## Discovering Variable Names
+
+Use these methods to find available variable names:
+
+```python
+# List all variables in the solution
+print(list(flow_system.solution.data_vars))
+
+# Filter for specific patterns
+costs_vars = [v for v in flow_system.solution.data_vars if 'costs' in v]
+boiler_vars = [v for v in flow_system.solution.data_vars if 'Boiler' in v]
+```
+
+---
+
+## Results I/O
+
+### Saving Results
+
+=== "v4.x (Old)"
+ ```python
+ optimization.results.to_file(folder='results', name='my_model')
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ # Save entire FlowSystem with solution
+ flow_system.to_netcdf('results/my_model.nc4')
+
+ # Or save just the solution Dataset
+ flow_system.solution.to_netcdf('results/solution.nc4')
+ ```
+
+### Loading Results
+
+=== "v4.x (Old)"
+ ```python
+ results = fx.results.Results.from_file('results', 'my_model')
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ import xarray as xr
+
+ # Load FlowSystem with solution
+ flow_system = fx.FlowSystem.from_netcdf('results/my_model.nc4')
+
+ # Or load just the solution
+ solution = xr.open_dataset('results/solution.nc4')
+ ```
+
+### Migrating Old Result Files
+
+If you have result files saved with the old API (v4.x), you can migrate them to the new format using `FlowSystem.from_old_results()`. This method:
+
+- Loads the old multi-file format (`*--flow_system.nc4`, `*--solution.nc4`)
+- Renames deprecated parameters in the FlowSystem structure (e.g., `on_off_parameters` → `status_parameters`)
+- Attaches the solution data to the FlowSystem
+
+```python
+# Load old results
+flow_system = fx.FlowSystem.from_old_results('results_folder', 'my_model')
+
+# Access basic solution data (flow rates, sizes, charge states, etc.)
+flow_system.solution['Boiler(Q_th)|flow_rate'].plot()
+
+# Save in new single-file format
+flow_system.to_netcdf('results/my_model_migrated.nc4')
+```
+
+!!! warning "Limitations"
+ This is a best-effort migration for accessing old results:
+
+ - **Solution variable names are NOT renamed** - only basic variables work
+ (flow rates, sizes, charge states, effect totals)
+ - Advanced variable access may require using the original variable names
+ - Summary metadata (solver info, timing) is not loaded
+
+ For full compatibility, re-run optimizations with the new API.
+
+---
+
+## Working with xarray Dataset
+
+The `flow_system.solution` is an `xarray.Dataset`, giving you powerful data manipulation:
+
+```python
+# Access a single variable
+costs = flow_system.solution['costs']
+
+# Get values as numpy array
+values = flow_system.solution['Boiler(Q_th)|flow_rate'].values
+
+# Get scalar value
+total = flow_system.solution['costs'].item()
+
+# Sum over time dimension
+total_flow = flow_system.solution['Boiler(Q_th)|flow_rate'].sum(dim='time')
+
+# Select by time
+subset = flow_system.solution.sel(time=slice('2020-01-01', '2020-01-02'))
+
+# Convert to DataFrame
+df = flow_system.solution.to_dataframe()
+```
+
+---
+
+## Segmented & Clustered Optimization
+
+The new API also applies to advanced optimization modes:
+
+=== "v4.x (Old)"
+ ```python
+ calc = fx.SegmentedOptimization('model', flow_system,
+ timesteps_per_segment=96)
+ calc.do_modeling_and_solve(solver)
+ results = calc.results
+ ```
+
+=== "v5.0.0 (New)"
+ ```python
+ # Use transform accessor for segmented optimization
+ flow_system.transform.segment(timesteps_per_segment=96)
+ flow_system.optimize(solver)
+ # Results in flow_system.solution
+ ```
+
+---
+
+## Statistics Accessor
+
+The new `statistics` accessor provides convenient aggregated data:
+
+```python
+stats = flow_system.statistics
+
+# Flow data (clean labels, no |flow_rate suffix)
+stats.flow_rates['Boiler(Q_th)'] # Not 'Boiler(Q_th)|flow_rate'
+stats.flow_hours['Boiler(Q_th)']
+stats.sizes['Boiler(Q_th)']
+stats.charge_states['Battery']
+
+# Effect breakdown by contributor (replaces effects_per_component)
+stats.temporal_effects['costs'] # Per timestep, per contributor
+stats.periodic_effects['costs'] # Investment costs per contributor
+stats.total_effects['costs'] # Total per contributor
+
+# Group by component or component type
+stats.total_effects['costs'].groupby('component').sum()
+stats.total_effects['costs'].groupby('component_type').sum()
+```
+
+---
+
+## 🔧 Quick Reference
+
+### Common Conversions
+
+| Old Pattern | New Pattern |
+|-------------|-------------|
+| `optimization.results.model['costs'].solution.item()` | `flow_system.solution['costs'].item()` |
+| `comp.flow.submodel.flow_rate.solution.values` | `flow_system.solution['Comp(Flow)\|flow_rate'].values` |
+| `comp.flow.submodel.investment.size.solution.item()` | `flow_system.solution['Comp(Flow)\|size'].item()` |
+| `comp.flow.submodel.status.status.solution.values` | `flow_system.solution['Comp(Flow)\|status'].values` |
+| `storage.submodel.charge_state.solution.values` | `flow_system.solution['Storage\|charge_state'].values` |
+| `effects['CO2'].submodel.total.solution.item()` | `flow_system.solution['CO2'].item()` |
+
+---
+
+## ✅ Migration Checklist
+
+| Task | Description |
+|------|-------------|
+| **Replace Optimization class** | Use `flow_system.optimize(solver)` instead |
+| **Update results access** | Use `flow_system.solution['var_name']` pattern |
+| **Update I/O code** | Use `to_netcdf()` / `from_netcdf()` |
+| **Migrate old result files** | Use `FlowSystem.from_old_results(folder, name)` |
+| **Update transform methods** | Use `flow_system.transform.sel/isel/resample()` instead |
+| **Test thoroughly** | Verify results match v4.x outputs |
+| **Remove deprecated imports** | Remove `fx.Optimization`, `fx.Results` |
+
+---
+
+## Transform Methods Moved to Accessor
+
+The `sel()`, `isel()`, and `resample()` methods have been moved from `FlowSystem` to the `TransformAccessor`:
+
+=== "Old (deprecated)"
+ ```python
+ # These still work but emit deprecation warnings
+ fs_subset = flow_system.sel(time=slice('2023-01-01', '2023-06-30'))
+ fs_indexed = flow_system.isel(time=slice(0, 24))
+ fs_resampled = flow_system.resample(time='4h', method='mean')
+ ```
+
+=== "New (recommended)"
+ ```python
+ # Use the transform accessor
+ fs_subset = flow_system.transform.sel(time=slice('2023-01-01', '2023-06-30'))
+ fs_indexed = flow_system.transform.isel(time=slice(0, 24))
+ fs_resampled = flow_system.transform.resample(time='4h', method='mean')
+ ```
+
+!!! info "Solution is dropped"
+ All transform methods return a **new FlowSystem without a solution**. You must re-optimize the transformed system:
+ ```python
+ fs_subset = flow_system.transform.sel(time=slice('2023-01-01', '2023-01-31'))
+ fs_subset.optimize(solver) # Re-optimize the subset
+ ```
+
+---
+
+## Deprecation Timeline
+
+| Version | Status |
+|---------|--------|
+| v4.x | `Optimization` and `Results` classes available |
+| v5.0.0 | `Optimization` and `Results` deprecated, new API available |
+
+!!! warning "Update your code"
+ The `Optimization` and `Results` classes are deprecated and will be removed in a future version.
+ The `flow_system.sel()`, `flow_system.isel()`, and `flow_system.resample()` methods are deprecated
+ in favor of `flow_system.transform.sel/isel/resample()`.
+ Update your code to the new API to avoid breaking changes when upgrading.
+
+---
+
+:material-book: [Docs](https://flixopt.github.io/flixopt/) • :material-github: [Issues](https://github.com/flixOpt/flixopt/issues)
+
+!!! success "Welcome to the new flixopt API! 🎉"
diff --git a/docs/user-guide/optimization/index.md b/docs/user-guide/optimization/index.md
index 7010acfc5..1d36eb9ba 100644
--- a/docs/user-guide/optimization/index.md
+++ b/docs/user-guide/optimization/index.md
@@ -2,69 +2,76 @@
This section covers how to run optimizations in flixOpt, including different optimization modes and solver configuration.
-## Optimization Modes
+## Verifying Your Model
-flixOpt provides three optimization modes to handle different problem sizes and requirements:
+Before running an optimization, it's helpful to visualize your system structure:
-### Optimization (Full)
+```python
+# Generate an interactive network diagram
+flow_system.topology.plot(path='my_system.html')
+
+# Or get structure info programmatically
+nodes, edges = flow_system.topology.infos()
+print(f"Components: {[n for n, d in nodes.items() if d['class'] == 'Component']}")
+print(f"Buses: {[n for n, d in nodes.items() if d['class'] == 'Bus']}")
+print(f"Flows: {list(edges.keys())}")
+```
+
+## Standard Optimization
-[`Optimization`][flixopt.optimization.Optimization] solves the entire problem at once.
+The recommended way to run an optimization is directly on the `FlowSystem`:
```python
import flixopt as fx
-optimization = fx.Optimization('my_model', flow_system)
-optimization.solve(fx.solvers.HighsSolver())
+# Simple one-liner
+flow_system.optimize(fx.solvers.HighsSolver())
+
+# Access results directly
+print(flow_system.solution['Boiler(Q_th)|flow_rate'])
+print(flow_system.components['Boiler'].solution)
```
-**Best for:**
+For more control over the optimization process, you can split model building and solving:
-- Small to medium problems
-- When you need the globally optimal solution
-- Problems without time-coupling simplifications
-
-### SegmentedOptimization
+```python
+# Build the model first
+flow_system.build_model()
-[`SegmentedOptimization`][flixopt.optimization.SegmentedOptimization] splits the time horizon into segments and solves them sequentially.
+# Optionally inspect or modify the model
+print(flow_system.model.constraints)
-```python
-optimization = fx.SegmentedOptimization(
- 'segmented_model',
- flow_system,
- segment_length=24, # Hours per segment
- overlap_length=4 # Hours of overlap between segments
-)
-optimization.solve(fx.solvers.HighsSolver())
+# Then solve
+flow_system.solve(fx.solvers.HighsSolver())
```
**Best for:**
-- Large problems that don't fit in memory
-- Long time horizons (weeks, months)
-- Problems where decisions are mostly local in time
-
-**Trade-offs:**
-
-- Faster solve times
-- May miss globally optimal solutions
-- Overlap helps maintain solution quality at segment boundaries
+- Small to medium problems
+- When you need the globally optimal solution
+- Problems without time-coupling simplifications
-### ClusteredOptimization
+## Clustered Optimization
-[`ClusteredOptimization`][flixopt.optimization.ClusteredOptimization] uses time series aggregation to reduce problem size by identifying representative periods.
+For large problems, use time series clustering to reduce computational complexity:
```python
-clustering_params = fx.ClusteringParameters(
- n_periods=8, # Number of typical periods
- hours_per_period=24 # Hours per typical period
+# Define clustering parameters
+params = fx.ClusteringParameters(
+ hours_per_period=24, # Hours per typical period
+ nr_of_periods=8, # Number of typical periods
+ fix_storage_flows=True,
+ aggregate_data_and_fix_non_binary_vars=True,
)
-optimization = fx.ClusteredOptimization(
- 'clustered_model',
- flow_system,
- clustering_params
-)
-optimization.solve(fx.solvers.HighsSolver())
+# Create clustered FlowSystem
+clustered_fs = flow_system.transform.cluster(params)
+
+# Optimize the clustered system
+clustered_fs.optimize(fx.solvers.HighsSolver())
+
+# Access results - same structure as original
+print(clustered_fs.solution)
```
**Best for:**
@@ -83,9 +90,159 @@ optimization.solve(fx.solvers.HighsSolver())
| Mode | Problem Size | Solve Time | Solution Quality |
|------|-------------|------------|------------------|
-| `Optimization` | Small-Medium | Slow | Optimal |
-| `SegmentedOptimization` | Large | Medium | Near-optimal |
-| `ClusteredOptimization` | Very Large | Fast | Approximate |
+| Standard | Small-Medium | Slow | Optimal |
+| Clustered | Very Large | Fast | Approximate |
+
+## Transform Accessor
+
+The `transform` accessor provides methods to create modified copies of your FlowSystem. All transform methods return a **new FlowSystem without a solution** — you must re-optimize the transformed system.
+
+### Selecting Subsets
+
+Select a subset of your data by label or index:
+
+```python
+# Select by label (like xarray.sel)
+fs_january = flow_system.transform.sel(time=slice('2024-01-01', '2024-01-31'))
+fs_scenario = flow_system.transform.sel(scenario='base')
+
+# Select by integer index (like xarray.isel)
+fs_first_week = flow_system.transform.isel(time=slice(0, 168))
+fs_first_scenario = flow_system.transform.isel(scenario=0)
+
+# Re-optimize the subset
+fs_january.optimize(fx.solvers.HighsSolver())
+```
+
+### Resampling Time Series
+
+Change the temporal resolution of your FlowSystem:
+
+```python
+# Resample to 4-hour intervals
+fs_4h = flow_system.transform.resample(time='4h', method='mean')
+
+# Resample to daily
+fs_daily = flow_system.transform.resample(time='1D', method='mean')
+
+# Re-optimize with new resolution
+fs_4h.optimize(fx.solvers.HighsSolver())
+```
+
+**Available resampling methods:** `'mean'`, `'sum'`, `'max'`, `'min'`, `'first'`, `'last'`
+
+### Clustering
+
+See [Clustered Optimization](#clustered-optimization) above.
+
+### Use Cases
+
+| Method | Use Case |
+|--------|----------|
+| `sel()` / `isel()` | Analyze specific time periods, scenarios, or periods |
+| `resample()` | Reduce problem size, test at lower resolution |
+| `cluster()` | Investment planning with typical periods |
+
+## Custom Constraints
+
+flixOpt is built on [linopy](https://github.com/PyPSA/linopy), allowing you to add custom constraints beyond what's available through the standard API.
+
+### Adding Custom Constraints
+
+To add custom constraints, build the model first, then access the underlying linopy model:
+
+```python
+# Build the model (without solving)
+flow_system.build_model()
+
+# Access the linopy model
+model = flow_system.model
+
+# Access variables from the solution namespace
+# Variables are named: "ElementLabel|variable_name"
+boiler_flow = model.variables['Boiler(Q_th)|flow_rate']
+chp_flow = model.variables['CHP(Q_th)|flow_rate']
+
+# Add a custom constraint: Boiler must produce at least as much as CHP
+model.add_constraints(
+ boiler_flow >= chp_flow,
+ name='boiler_min_chp'
+)
+
+# Solve with the custom constraint
+flow_system.solve(fx.solvers.HighsSolver())
+```
+
+### Common Use Cases
+
+**Minimum runtime constraint:**
+```python
+# Require component to run at least 100 hours total
+on_var = model.variables['CHP|on'] # Binary on/off variable
+hours = flow_system.hours_per_timestep
+model.add_constraints(
+ (on_var * hours).sum() >= 100,
+ name='chp_min_runtime'
+)
+```
+
+**Linking flows across components:**
+```python
+# Heat pump and boiler combined must meet minimum base load
+hp_flow = model.variables['HeatPump(Q_th)|flow_rate']
+boiler_flow = model.variables['Boiler(Q_th)|flow_rate']
+model.add_constraints(
+ hp_flow + boiler_flow >= 50, # At least 50 kW combined
+ name='min_heat_supply'
+)
+```
+
+**Seasonal constraints:**
+```python
+import pandas as pd
+
+# Different constraints for summer vs winter
+summer_mask = flow_system.timesteps.month.isin([6, 7, 8])
+winter_mask = flow_system.timesteps.month.isin([12, 1, 2])
+
+flow_var = model.variables['Boiler(Q_th)|flow_rate']
+
+# Lower capacity in summer
+model.add_constraints(
+ flow_var.sel(time=flow_system.timesteps[summer_mask]) <= 100,
+ name='summer_limit'
+)
+```
+
+### Inspecting the Model
+
+Before adding constraints, inspect available variables and existing constraints:
+
+```python
+flow_system.build_model()
+model = flow_system.model
+
+# List all variables
+print(model.variables)
+
+# List all constraints
+print(model.constraints)
+
+# Get details about a specific variable
+print(model.variables['Boiler(Q_th)|flow_rate'])
+```
+
+### Variable Naming Convention
+
+Variables follow this naming pattern:
+
+| Element Type | Pattern | Example |
+|--------------|---------|---------|
+| Flow rate | `Component(FlowLabel)\|flow_rate` | `Boiler(Q_th)\|flow_rate` |
+| Flow size | `Component(FlowLabel)\|size` | `Boiler(Q_th)\|size` |
+| On/off status | `Component\|on` | `CHP\|on` |
+| Charge state | `Storage\|charge_state` | `Battery\|charge_state` |
+| Effect totals | `effect_name\|total` | `costs\|total` |
## Solver Configuration
@@ -104,10 +261,10 @@ optimization.solve(fx.solvers.HighsSolver())
```python
# Basic usage with defaults
-optimization.solve(fx.solvers.HighsSolver())
+flow_system.optimize(fx.solvers.HighsSolver())
# With custom options
-optimization.solve(
+flow_system.optimize(
fx.solvers.GurobiSolver(
time_limit_seconds=3600,
mip_gap=0.01,
@@ -166,7 +323,7 @@ If your model has no feasible solution:
2. **Use Gurobi for infeasibility analysis** - When using GurobiSolver and the model is infeasible, flixOpt automatically extracts and logs the Irreducible Inconsistent Subsystem (IIS):
```python
# Gurobi provides detailed infeasibility analysis
- optimization.solve(fx.solvers.GurobiSolver())
+ flow_system.optimize(fx.solvers.GurobiSolver())
# If infeasible, check the model documentation file for IIS details
```
The infeasible constraints are saved to the model documentation file in the results folder.
@@ -190,6 +347,6 @@ If solutions don't match expectations:
## Next Steps
-- See [Examples](../../examples/03-Optimization Modes.md) for working code
+- See [Examples](../../notebooks/index.md) for working code
- Learn about [Mathematical Notation](../mathematical-notation/index.md)
- Explore [Recipes](../recipes/index.md) for common patterns
diff --git a/docs/user-guide/recipes/index.md b/docs/user-guide/recipes/index.md
index 0317b2c70..38c7fa001 100644
--- a/docs/user-guide/recipes/index.md
+++ b/docs/user-guide/recipes/index.md
@@ -1,22 +1,10 @@
# Recipes
-**Coming Soon!** 🚧
+Short, focused code snippets showing **how to do specific things** in FlixOpt. Unlike full examples, recipes focus on a single concept.
-This section will contain quick, copy-paste ready code snippets for common FlixOpt patterns.
+## Available Recipes
----
-
-## What Will Be Here?
-
-Short, focused code snippets showing **how to do specific things** in FlixOpt:
-
-- Common modeling patterns
-- Integration with other tools
-- Performance optimizations
-- Domain-specific solutions
-- Data analysis shortcuts
-
-Unlike full examples, recipes will be focused snippets showing a single concept.
+- [Plotting Custom Data](plotting-custom-data.md) - Create faceted plots with your own xarray data using Plotly Express
---
@@ -37,9 +25,10 @@ Unlike full examples, recipes will be focused snippets showing a single concept.
## Want to Contribute?
-**We need your help!** If you have recurring modeling patterns or clever solutions to share, please contribute via [GitHub issues](https://github.com/flixopt/flixopt/issues) or pull requests.
+If you have recurring modeling patterns or clever solutions to share, please contribute via [GitHub issues](https://github.com/flixopt/flixopt/issues) or pull requests.
Guidelines:
+
1. Keep it short (< 100 lines of code)
2. Focus on one specific technique
3. Add brief explanation and when to use it
diff --git a/docs/user-guide/recipes/plotting-custom-data.md b/docs/user-guide/recipes/plotting-custom-data.md
new file mode 100644
index 000000000..3c539e6ce
--- /dev/null
+++ b/docs/user-guide/recipes/plotting-custom-data.md
@@ -0,0 +1,125 @@
+# Plotting Custom Data
+
+The plot accessor (`flow_system.statistics.plot`) is designed for visualizing optimization results using element labels. If you want to create faceted plots with your own custom data (not from a FlowSystem), you can use Plotly Express directly with xarray data.
+
+## Faceted Plots with Custom xarray Data
+
+The key is converting your xarray Dataset to a long-form DataFrame that Plotly Express expects:
+
+```python
+import xarray as xr
+import pandas as pd
+import plotly.express as px
+
+# Your custom xarray Dataset
+my_data = xr.Dataset({
+ 'Solar': (['time', 'scenario'], solar_values),
+ 'Wind': (['time', 'scenario'], wind_values),
+ 'Demand': (['time', 'scenario'], demand_values),
+}, coords={
+ 'time': timestamps,
+ 'scenario': ['Base', 'High RE', 'Low Demand']
+})
+
+# Convert to long-form DataFrame for Plotly Express
+df = (
+ my_data
+ .to_dataframe()
+ .reset_index()
+ .melt(
+ id_vars=['time', 'scenario'], # Keep as columns
+ var_name='variable',
+ value_name='value'
+ )
+)
+
+# Faceted stacked bar chart
+fig = px.bar(
+ df,
+ x='time',
+ y='value',
+ color='variable',
+ facet_col='scenario',
+ barmode='relative',
+ title='Energy Balance by Scenario'
+)
+fig.show()
+
+# Faceted line plot
+fig = px.line(
+ df,
+ x='time',
+ y='value',
+ color='variable',
+ facet_col='scenario'
+)
+fig.show()
+
+# Faceted area chart
+fig = px.area(
+ df,
+ x='time',
+ y='value',
+ color='variable',
+ facet_col='scenario'
+)
+fig.show()
+```
+
+## Common Plotly Express Faceting Options
+
+| Parameter | Description |
+|-----------|-------------|
+| `facet_col` | Dimension for column subplots |
+| `facet_row` | Dimension for row subplots |
+| `animation_frame` | Dimension for animation slider |
+| `facet_col_wrap` | Number of columns before wrapping |
+
+```python
+# Row and column facets
+fig = px.line(df, x='time', y='value', color='variable',
+ facet_col='scenario', facet_row='region')
+
+# Animation over time periods
+fig = px.bar(df, x='variable', y='value', color='variable',
+ animation_frame='period', barmode='group')
+
+# Wrap columns
+fig = px.line(df, x='time', y='value', color='variable',
+ facet_col='scenario', facet_col_wrap=2)
+```
+
+## Heatmaps with Custom Data
+
+For heatmaps, you can pass 2D arrays directly to `px.imshow`:
+
+```python
+import plotly.express as px
+
+# 2D data (e.g., days × hours)
+heatmap_data = my_data['Solar'].sel(scenario='Base').values.reshape(365, 24)
+
+fig = px.imshow(
+ heatmap_data,
+ labels={'x': 'Hour', 'y': 'Day', 'color': 'Power [kW]'},
+ aspect='auto',
+ color_continuous_scale='portland'
+)
+fig.show()
+
+# Faceted heatmaps using subplots
+from plotly.subplots import make_subplots
+import plotly.graph_objects as go
+
+scenarios = ['Base', 'High RE']
+fig = make_subplots(rows=1, cols=len(scenarios), subplot_titles=scenarios)
+
+for i, scenario in enumerate(scenarios, 1):
+ data = my_data['Solar'].sel(scenario=scenario).values.reshape(365, 24)
+ fig.add_trace(go.Heatmap(z=data, colorscale='portland'), row=1, col=i)
+
+fig.update_layout(title='Solar Output by Scenario')
+fig.show()
+```
+
+This approach gives you full control over your visualizations while leveraging Plotly's powerful faceting capabilities.
diff --git a/docs/user-guide/results-plotting.md b/docs/user-guide/results-plotting.md
new file mode 100644
index 000000000..1ecd26aa1
--- /dev/null
+++ b/docs/user-guide/results-plotting.md
@@ -0,0 +1,545 @@
+# Plotting Results
+
+After solving an optimization, flixOpt provides a powerful plotting API to visualize and analyze your results. The API is designed to be intuitive and chainable, giving you quick access to common plots while still allowing deep customization.
+
+## The Plot Accessor
+
+All plotting is accessed through the `statistics.plot` accessor on your FlowSystem:
+
+```python
+# Run optimization
+flow_system.optimize(fx.solvers.HighsSolver())
+
+# Access plotting via statistics
+flow_system.statistics.plot.balance('ElectricityBus')
+flow_system.statistics.plot.sankey.flows()
+flow_system.statistics.plot.heatmap('Boiler(Q_th)|flow_rate')
+```
+
+## PlotResult: Data + Figure
+
+Every plot method returns a [`PlotResult`][flixopt.plot_result.PlotResult] object containing both:
+
+- **`data`**: An xarray Dataset with the prepared data
+- **`figure`**: A Plotly Figure object
+
+This gives you full access to export data, customize the figure, or use the data for your own visualizations:
+
+```python
+result = flow_system.statistics.plot.balance('Bus')
+
+# Access the xarray data
+print(result.data)
+result.data.to_dataframe() # Convert to pandas DataFrame
+result.data.to_netcdf('balance_data.nc') # Export as netCDF
+
+# Access and modify the figure
+result.figure.update_layout(title='Custom Title')
+result.figure.show()
+```
+
+### Method Chaining
+
+All `PlotResult` methods return `self`, enabling fluent chaining:
+
+```python
+flow_system.statistics.plot.balance('Bus') \
+ .update(title='Custom Title', height=600) \
+ .update_traces(opacity=0.8) \
+ .to_csv('data.csv') \
+ .to_html('plot.html') \
+ .show()
+```
+
+Available methods:
+
+| Method | Description |
+|--------|-------------|
+| `.show()` | Display the figure |
+| `.update(**kwargs)` | Update figure layout (passes to `fig.update_layout()`) |
+| `.update_traces(**kwargs)` | Update traces (passes to `fig.update_traces()`) |
+| `.to_html(path)` | Save as interactive HTML |
+| `.to_image(path)` | Save as static image (png, svg, pdf) |
+| `.to_csv(path)` | Export data to CSV (converts xarray to DataFrame) |
+| `.to_netcdf(path)` | Export data to netCDF (native xarray format) |
+
+## Available Plot Methods
+
+### Balance Plot
+
+Plot the energy/material balance at a node (Bus or Component), showing inputs and outputs:
+
+```python
+flow_system.statistics.plot.balance('ElectricityBus')
+flow_system.statistics.plot.balance('Boiler', mode='area')
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `node` | str | Label of the Bus or Component |
+| `mode` | `'bar'`, `'line'`, `'area'` | Visual style (default: `'bar'`) |
+| `unit` | `'flow_rate'`, `'flow_hours'` | Power (kW) or energy (kWh) |
+| `include` | str or list | Only include flows containing these substrings |
+| `exclude` | str or list | Exclude flows containing these substrings |
+| `aggregate` | `'sum'`, `'mean'`, `'max'`, `'min'` | Aggregate over time |
+| `select` | dict | xarray-style data selection |
+
+### Storage Plot
+
+Visualize storage components with charge state and flow balance:
+
+```python
+flow_system.statistics.plot.storage('Battery')
+flow_system.statistics.plot.storage('ThermalStorage', mode='line')
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `component` | str | Storage component label |
+| `mode` | `'bar'`, `'line'`, `'area'` | Visual style |
+
+### Heatmap
+
+Create heatmaps of time series data, with automatic time reshaping:
+
+```python
+flow_system.statistics.plot.heatmap('Boiler(Q_th)|flow_rate')
+flow_system.statistics.plot.heatmap(['CHP|on', 'Boiler|on'], facet_col='variable')
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `variables` | str or list | Variable name(s) to plot |
+| `reshape` | tuple | Time reshaping pattern, e.g., `('D', 'h')` for days × hours |
+| `colorscale` | str | Plotly colorscale name |
+
+Common reshape patterns:
+
+- `('D', 'h')`: Days × Hours (default)
+- `('W', 'D')`: Weeks × Days
+- `('MS', 'D')`: Months × Days
+
+### Flows Plot
+
+Plot flow rates filtered by nodes or components:
+
+```python
+flow_system.statistics.plot.flows(component='Boiler')
+flow_system.statistics.plot.flows(start='ElectricityBus')
+flow_system.statistics.plot.flows(unit='flow_hours', aggregate='sum')
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `start` | str or list | Filter by source node(s) |
+| `end` | str or list | Filter by destination node(s) |
+| `component` | str or list | Filter by parent component(s) |
+| `unit` | `'flow_rate'`, `'flow_hours'` | Power or energy |
+| `aggregate` | str | Time aggregation |
+
+### Compare Plot
+
+Compare multiple elements side-by-side:
+
+```python
+flow_system.statistics.plot.compare(['Boiler', 'CHP', 'HeatPump'], variable='flow_rate')
+flow_system.statistics.plot.compare(['Battery1', 'Battery2'], variable='charge_state')
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `elements` | list | Element labels to compare |
+| `variable` | str | Variable suffix to compare |
+| `mode` | `'overlay'`, `'facet'` | Same axes or subplots |
+
+### Sankey Diagram
+
+Visualize energy/material flows as a Sankey diagram. Access via the `sankey` accessor:
+
+```python
+# Energy flow amounts (default)
+flow_system.statistics.plot.sankey.flows()
+flow_system.statistics.plot.sankey.flows(select={'time': '2023-01-01 12:00'}) # specific time
+flow_system.statistics.plot.sankey.flows(aggregate='mean') # mean instead of sum
+
+# Investment sizes/capacities
+flow_system.statistics.plot.sankey.sizes()
+
+# Peak flow rates
+flow_system.statistics.plot.sankey.peak_flow()
+
+# Effect contributions (costs, CO2, etc.)
+flow_system.statistics.plot.sankey.effects()
+flow_system.statistics.plot.sankey.effects(select={'effect': 'costs'})
+```
+
+**Available methods:**
+
+| Method | Description |
+|--------|-------------|
+| `sankey.flows()` | Energy/material flow amounts |
+| `sankey.sizes()` | Investment sizes/capacities |
+| `sankey.peak_flow()` | Maximum flow rates |
+| `sankey.effects()` | Component contributions to effects |
+
+**Select options for filtering:**
+
+```python
+# Filter by bus or component
+flow_system.statistics.plot.sankey.flows(select={'bus': 'HeatBus'})
+flow_system.statistics.plot.sankey.flows(select={'component': ['Boiler', 'CHP']})
+
+# Filter effects by name
+flow_system.statistics.plot.sankey.effects(select={'effect': 'costs'})
+flow_system.statistics.plot.sankey.effects(select={'effect': ['costs', 'CO2']})
+```
+
+### Effects Plot
+
+Plot cost, emissions, or other effect breakdowns. Effects can be grouped by component, individual contributor (flows), or time.
+
+```python
+flow_system.statistics.plot.effects() # Total of all effects by component
+flow_system.statistics.plot.effects(effect='costs') # Just costs
+flow_system.statistics.plot.effects(by='contributor') # By individual flows/components
+flow_system.statistics.plot.effects(aspect='temporal', by='time') # Over time
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `aspect` | `'total'`, `'temporal'`, `'periodic'` | Which aspect to plot (default: `'total'`) |
+| `effect` | str or None | Specific effect to plot (e.g., `'costs'`, `'CO2'`). If None, plots all. |
+| `by` | `'component'`, `'contributor'`, `'time'` | Grouping dimension (default: `'component'`) |
+| `select` | dict | xarray-style data selection |
+| `colors` | dict | Color overrides for categories |
+| `facet_col` | str | Dimension for column facets (default: `'scenario'`) |
+| `facet_row` | str | Dimension for row facets (default: `'period'`) |
+
+**Grouping options:**
+
+- **`by='component'`**: Groups effects by parent component (e.g., all flows from a Boiler are summed together)
+- **`by='contributor'`**: Shows individual contributors - flows and components that directly contribute to effects
+- **`by='time'`**: Shows effects over time (only valid for `aspect='temporal'`)
+
+!!! note "Contributors vs Components"
+ Contributors include not just flows, but also components that directly contribute to effects (e.g., via `effects_per_active_hour`). The system automatically detects all contributors from the optimization solution.
+
+### Variable Plot
+
+Plot the same variable type across multiple elements for comparison:
+
+```python
+flow_system.statistics.plot.variable('on') # All binary operation states
+flow_system.statistics.plot.variable('flow_rate', include='Boiler')
+flow_system.statistics.plot.variable('charge_state') # All storage charge states
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `pattern` | str | Variable suffix to match (e.g., `'on'`, `'flow_rate'`) |
+| `include` | str or list | Only include elements containing these substrings |
+| `exclude` | str or list | Exclude elements containing these substrings |
+| `aggregate` | str | Time aggregation method |
+| `mode` | `'line'`, `'bar'`, `'area'` | Visual style |
+
+### Duration Curve
+
+Plot load duration curves (sorted time series) to understand utilization patterns:
+
+```python
+flow_system.statistics.plot.duration_curve('Boiler(Q_th)')
+flow_system.statistics.plot.duration_curve(['CHP(Q_th)', 'HeatPump(Q_th)'])
+flow_system.statistics.plot.duration_curve('Demand(in)', normalize=True)
+```
+
+**Key parameters:**
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `variables` | str or list | Variable name(s) to plot |
+| `normalize` | bool | Normalize x-axis to 0-100% (default: False) |
+| `mode` | `'line'`, `'area'` | Visual style |
+
+## Common Parameters
+
+Most plot methods share these parameters:
+
+### Data Selection
+
+Use xarray-style selection to filter data before plotting:
+
+```python
+# Single value
+flow_system.statistics.plot.balance('Bus', select={'scenario': 'base'})
+
+# Multiple values
+flow_system.statistics.plot.balance('Bus', select={'scenario': ['base', 'high_demand']})
+
+# Time slices
+flow_system.statistics.plot.balance('Bus', select={'time': slice('2024-01', '2024-06')})
+
+# Combined
+flow_system.statistics.plot.balance('Bus', select={
+ 'scenario': 'base',
+ 'time': slice('2024-01-01', '2024-01-07')
+})
+```
+
+### Faceting and Animation
+
+Control how multi-dimensional data is displayed:
+
+```python
+# Facet by scenario
+flow_system.statistics.plot.balance('Bus', facet_col='scenario')
+
+# Animate by period
+flow_system.statistics.plot.balance('Bus', animate_by='period')
+
+# Both
+flow_system.statistics.plot.balance('Bus', facet_col='scenario', animate_by='period')
+```
+
+!!! note
+ Facet and animation dimensions are automatically ignored if not present in the data. Defaults are `facet_col='scenario'` and `animate_by='period'` for balance plots.
+
+### Include/Exclude Filtering
+
+Filter flows using simple substring matching:
+
+```python
+# Only show flows containing 'Q_th'
+flow_system.statistics.plot.balance('Bus', include='Q_th')
+
+# Exclude flows containing 'Gas' or 'Grid'
+flow_system.statistics.plot.balance('Bus', exclude=['Gas', 'Grid'])
+
+# Combine include and exclude
+flow_system.statistics.plot.balance('Bus', include='Boiler', exclude='auxiliary')
+```
+
+### Colors
+
+Override colors using a dictionary:
+
+```python
+flow_system.statistics.plot.balance('Bus', colors={
+ 'Boiler(Q_th)': '#ff6b6b',
+ 'CHP(Q_th)': '#4ecdc4',
+})
+```
+
+## Color Management
+
+flixOpt provides centralized color management through the `flow_system.colors` accessor and carriers. This ensures consistent colors across all visualizations.
+
+### Carriers
+
+[`Carriers`][flixopt.carrier.Carrier] define energy or material types with associated colors. Built-in carriers are available in `CONFIG.Carriers`:
+
+| Carrier | Color | Description |
+|---------|-------|-------------|
+| `electricity` | `#FECB52` | Yellow - lightning/energy |
+| `heat` | `#D62728` | Red - warmth/fire |
+| `gas` | `#1F77B4` | Blue - natural gas |
+| `hydrogen` | `#9467BD` | Purple - clean/future |
+| `fuel` | `#8C564B` | Brown - fossil/oil |
+| `biomass` | `#2CA02C` | Green - organic/renewable |
+
+Colors are from the D3/Plotly palettes for professional consistency.
+
+Assign carriers to buses for automatic coloring:
+
+```python
+# Buses use carrier colors automatically
+heat_bus = fx.Bus('HeatNetwork', carrier='heat')
+elec_bus = fx.Bus('Grid', carrier='electricity')
+
+# Plots automatically use carrier colors for bus-related elements
+flow_system.statistics.plot.sankey.flows() # Buses colored by carrier
+```
+
+### Custom Carriers
+
+Register custom carriers on your FlowSystem:
+
+```python
+# Create a custom carrier
+biogas = fx.Carrier('biogas', color='#228B22', unit='kW', description='Biogas fuel')
+hydrogen = fx.Carrier('hydrogen', color='#00CED1', unit='kg/h')
+
+# Register with FlowSystem (overrides CONFIG.Carriers defaults)
+flow_system.add_carrier(biogas)
+flow_system.add_carrier(hydrogen)
+
+# Access registered carriers
+flow_system.carriers # CarrierContainer with locally registered carriers
+flow_system.get_carrier('biogas') # Returns Carrier object
+```
+
+### Color Accessor
+
+The `flow_system.colors` accessor provides centralized color configuration:
+
+```python
+# Configure colors for components
+flow_system.colors.setup({
+ 'Boiler': '#D35400',
+ 'CHP': '#8E44AD',
+ 'HeatPump': '#27AE60',
+})
+
+# Or set individual colors
+flow_system.colors.set_component_color('Boiler', '#D35400')
+flow_system.colors.set_carrier_color('biogas', '#228B22')
+
+# Load from file
+flow_system.colors.setup('colors.json') # or .yaml
+```
+
+### Context-Aware Coloring
+
+Plot colors are automatically resolved based on context:
+
+- **Bus balance plots**: Colors based on the connected component
+- **Component balance plots**: Colors based on the connected bus/carrier
+- **Sankey diagrams**: Buses use carrier colors, components use configured colors
+
+```python
+# Plotting a bus balance → flows colored by their parent component
+flow_system.statistics.plot.balance('ElectricityBus')
+
+# Plotting a component balance → flows colored by their connected bus/carrier
+flow_system.statistics.plot.balance('CHP')
+```
+
+### Color Resolution Priority
+
+Colors are resolved in this order:
+
+1. **Explicit colors** passed to plot methods (always override)
+2. **Component/bus colors** set via `flow_system.colors.setup()`
+3. **Element `meta_data['color']`** if present
+4. **Carrier colors** from FlowSystem or CONFIG.Carriers
+5. **Default colorscale** (controlled by `CONFIG.Plotting.default_qualitative_colorscale`)
+
+### Persistence
+
+Color configurations are automatically saved with the FlowSystem:
+
+```python
+# Colors are persisted
+flow_system.to_netcdf('my_system.nc')
+
+# And restored
+loaded = fx.FlowSystem.from_netcdf('my_system.nc')
+loaded.colors # Configuration restored
+```
+
+### Display Control
+
+Control whether plots are shown automatically:
+
+```python
+# Don't show (useful in scripts)
+result = flow_system.statistics.plot.balance('Bus', show=False)
+
+# Show later
+result.show()
+```
+
+The default behavior is controlled by `CONFIG.Plotting.default_show`.
+
+## Complete Examples
+
+### Analyzing a Bus Balance
+
+```python
+# Quick overview
+flow_system.statistics.plot.balance('ElectricityBus')
+
+# Detailed analysis with exports
+result = flow_system.statistics.plot.balance(
+ 'ElectricityBus',
+ mode='area',
+ unit='flow_hours',
+ select={'time': slice('2024-06-01', '2024-06-07')},
+ show=False
+)
+
+# Access xarray data for further analysis
+print(result.data) # xarray Dataset
+df = result.data.to_dataframe() # Convert to pandas
+
+# Export data
+result.to_netcdf('electricity_balance.nc') # Native xarray format
+result.to_csv('electricity_balance.csv') # As CSV
+
+# Customize and display
+result.update(
+ title='Electricity Balance - First Week of June',
+ yaxis_title='Energy [kWh]'
+).show()
+```
+
+### Comparing Storage Units
+
+```python
+# Compare charge states
+flow_system.statistics.plot.compare(
+ ['Battery1', 'Battery2', 'ThermalStorage'],
+ variable='charge_state',
+ mode='overlay'
+).update(title='Storage Comparison')
+```
+
+### Creating a Report
+
+```python
+# Generate multiple plots for a report
+plots = {
+ 'balance': flow_system.statistics.plot.balance('HeatBus', show=False),
+ 'storage': flow_system.statistics.plot.storage('ThermalStorage', show=False),
+ 'sankey': flow_system.statistics.plot.sankey.flows(show=False),
+ 'costs': flow_system.statistics.plot.effects(effect='costs', show=False),
+}
+
+# Export all
+for name, plot in plots.items():
+ plot.to_html(f'report_{name}.html')
+ plot.to_netcdf(f'report_{name}.nc') # xarray native format
+```
+
+### Working with xarray Data
+
+The `.data` attribute returns xarray objects, giving you full access to xarray's powerful data manipulation capabilities:
+
+```python
+result = flow_system.statistics.plot.balance('Bus', show=False)
+
+# Access the xarray Dataset
+ds = result.data
+
+# Use xarray operations
+ds.mean(dim='time') # Average over time
+ds.sel(time='2024-06') # Select specific time
+ds.to_dataframe() # Convert to pandas
+
+# Export options
+ds.to_netcdf('data.nc') # Native xarray format
+ds.to_zarr('data.zarr') # Zarr format for large datasets
+```
diff --git a/docs/user-guide/results/index.md b/docs/user-guide/results/index.md
index 92656010d..a9b40f7f9 100644
--- a/docs/user-guide/results/index.md
+++ b/docs/user-guide/results/index.md
@@ -1,18 +1,283 @@
# Analyzing Results
-!!! note "Under Development"
- This section is being expanded with detailed tutorials.
+After running an optimization, flixOpt provides powerful tools to access, analyze, and visualize your results.
-Learn how to work with optimization results:
+## Accessing Solution Data
-- Accessing solution data
-- Plotting flows and states
-- Exporting to various formats
-- Comparing scenarios and periods
+### Raw Solution
-## Getting Started
+The `solution` property contains all optimization variables as an xarray Dataset:
-For now, see:
+```python
+# Run optimization
+flow_system.optimize(fx.solvers.HighsSolver())
-- **[Examples](../../examples/index.md)** - Result analysis patterns in working code
-- **[API Reference](../../api-reference/results.md)** - Results class documentation
+# Access the full solution dataset
+solution = flow_system.solution
+print(solution)
+
+# Access specific variables
+print(solution['Boiler(Q_th)|flow_rate'])
+print(solution['Battery|charge_state'])
+```
+
+### Element-Specific Solutions
+
+Access solution data for individual elements:
+
+```python
+# Component solutions
+boiler = flow_system.components['Boiler']
+print(boiler.solution) # All variables for this component
+
+# Flow solutions
+flow = flow_system.flows['Boiler(Q_th)']
+print(flow.solution)
+
+# Bus solutions (if imbalance is allowed)
+bus = flow_system.buses['Heat']
+print(bus.solution)
+```
+
+## Statistics Accessor
+
+The `statistics` accessor provides pre-computed aggregations for common analysis tasks:
+
+```python
+# Access via the statistics property
+stats = flow_system.statistics
+```
+
+### Available Data Properties
+
+| Property | Description |
+|----------|-------------|
+| `flow_rates` | All flow rate variables as xarray Dataset |
+| `flow_hours` | Flow hours (flow_rate × hours_per_timestep) |
+| `sizes` | All size variables (fixed and optimized) |
+| `charge_states` | Storage charge state variables |
+| `temporal_effects` | Temporal effects per contributor per timestep |
+| `periodic_effects` | Periodic (investment) effects per contributor |
+| `total_effects` | Total effects (temporal + periodic) per contributor |
+| `effect_share_factors` | Conversion factors between effects |
+
+### Examples
+
+```python
+# Get all flow rates
+flow_rates = flow_system.statistics.flow_rates
+print(flow_rates)
+
+# Get flow hours (energy)
+flow_hours = flow_system.statistics.flow_hours
+total_heat = flow_hours['Boiler(Q_th)'].sum()
+
+# Get sizes (capacities)
+sizes = flow_system.statistics.sizes
+print(f"Boiler size: {sizes['Boiler(Q_th)'].values}")
+
+# Get storage charge states
+charge_states = flow_system.statistics.charge_states
+
+# Get effect breakdown by contributor
+temporal = flow_system.statistics.temporal_effects
+print(temporal['costs']) # Costs per contributor per timestep
+
+# Group by component
+temporal['costs'].groupby('component').sum()
+```
+
+### Effect Analysis
+
+Analyze how effects (costs, emissions, etc.) are distributed:
+
+```python
+# Access effects via the new properties
+stats = flow_system.statistics
+
+# Temporal effects per timestep (costs, CO2, etc. per contributor)
+stats.temporal_effects['costs'] # DataArray with dims [time, contributor]
+stats.temporal_effects['costs'].sum('contributor') # Total per timestep
+
+# Periodic effects (investment costs, etc.)
+stats.periodic_effects['costs'] # DataArray with dim [contributor]
+
+# Total effects (temporal + periodic combined)
+stats.total_effects['costs'].sum('contributor') # Grand total
+
+# Group by component or component type
+stats.total_effects['costs'].groupby('component').sum()
+stats.total_effects['costs'].groupby('component_type').sum()
+```
+
+!!! tip "Contributors"
+ Contributors are automatically detected from the optimization solution and include:
+
+ - **Flows**: Individual flows with `effects_per_flow_hour`
+ - **Components**: Components with `effects_per_active_hour` or similar direct effects
+
+ Each contributor has associated metadata (`component` and `component_type` coordinates) for flexible groupby operations.
+
+## Plotting Results
+
+The `statistics.plot` accessor provides visualization methods:
+
+```python
+# Balance plots
+flow_system.statistics.plot.balance('HeatBus')
+flow_system.statistics.plot.balance('Boiler')
+
+# Heatmaps
+flow_system.statistics.plot.heatmap('Boiler(Q_th)|flow_rate')
+
+# Duration curves
+flow_system.statistics.plot.duration_curve('Boiler(Q_th)')
+
+# Sankey diagrams
+flow_system.statistics.plot.sankey()
+
+# Effects breakdown
+flow_system.statistics.plot.effects() # Total costs by component
+flow_system.statistics.plot.effects(effect='costs', by='contributor') # By individual flows
+flow_system.statistics.plot.effects(aspect='temporal', by='time') # Over time
+```
+
+See [Plotting Results](../results-plotting.md) for comprehensive plotting documentation.
+
+## Network Visualization
+
+The `topology` accessor lets you visualize and inspect your system structure:
+
+### Static HTML Visualization
+
+Generate an interactive network diagram using PyVis:
+
+```python
+# Default: saves to 'flow_system.html' and opens in browser
+flow_system.topology.plot()
+
+# Custom options
+flow_system.topology.plot(
+ path='output/my_network.html',
+ controls=['nodes', 'layout', 'physics'],
+ show=True
+)
+```
+
+**Parameters:**
+
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `path` | str, Path, or False | `'flow_system.html'` | Where to save the HTML file |
+| `controls` | bool or list | `True` | UI controls to show |
+| `show` | bool | `None` | Whether to open in browser |
+
+### Interactive App
+
+Launch a Dash/Cytoscape application for exploring the network:
+
+```python
+# Start the visualization server
+flow_system.topology.start_app()
+
+# ... interact with the visualization in your browser ...
+
+# Stop when done
+flow_system.topology.stop_app()
+```
+
+!!! note "Optional Dependencies"
+ The interactive app requires additional packages:
+ ```bash
+ pip install flixopt[network_viz]
+ ```
+
+### Network Structure Info
+
+Get node and edge information programmatically:
+
+```python
+nodes, edges = flow_system.topology.infos()
+
+# nodes: dict mapping labels to properties
+# {'Boiler': {'label': 'Boiler', 'class': 'Component', 'infos': '...'}, ...}
+
+# edges: dict mapping flow labels to properties
+# {'Boiler(Q_th)': {'label': 'Q_th', 'start': 'Boiler', 'end': 'Heat', ...}, ...}
+
+print(f"Components and buses: {list(nodes.keys())}")
+print(f"Flows: {list(edges.keys())}")
+```
+
+## Saving and Loading
+
+Save the FlowSystem (including solution) for later analysis:
+
+```python
+# Save to NetCDF (recommended for large datasets)
+flow_system.to_netcdf('results/my_system.nc')
+
+# Load later
+loaded_fs = fx.FlowSystem.from_netcdf('results/my_system.nc')
+print(loaded_fs.solution)
+
+# Save to JSON (human-readable, smaller datasets)
+flow_system.to_json('results/my_system.json')
+loaded_fs = fx.FlowSystem.from_json('results/my_system.json')
+```
+
+## Working with xarray
+
+All result data uses [xarray](https://docs.xarray.dev/), giving you powerful data manipulation:
+
+```python
+solution = flow_system.solution
+
+# Select specific times
+summer = solution.sel(time=slice('2024-06-01', '2024-08-31'))
+
+# Aggregate over dimensions
+daily_avg = solution.resample(time='D').mean()
+
+# Convert to pandas
+df = solution['Boiler(Q_th)|flow_rate'].to_dataframe()
+
+# Export to various formats
+solution.to_netcdf('full_solution.nc')
+df.to_csv('boiler_flow.csv')
+```
+
+## Complete Example
+
+```python
+import flixopt as fx
+import pandas as pd
+
+# Build and optimize
+timesteps = pd.date_range('2024-01-01', periods=168, freq='h')
+flow_system = fx.FlowSystem(timesteps)
+# ... add elements ...
+flow_system.optimize(fx.solvers.HighsSolver())
+
+# Visualize network structure
+flow_system.topology.plot(path='system_network.html')
+
+# Analyze results
+print("=== Flow Statistics ===")
+print(flow_system.statistics.flow_hours)
+
+print("\n=== Effect Breakdown ===")
+print(flow_system.statistics.total_effects)
+
+# Create plots
+flow_system.statistics.plot.balance('HeatBus')
+flow_system.statistics.plot.heatmap('Boiler(Q_th)|flow_rate')
+
+# Save for later
+flow_system.to_netcdf('results/optimized_system.nc')
+```
+
+## Next Steps
+
+- [Plotting Results](../results-plotting.md) - Detailed plotting documentation
+- [Examples](../../notebooks/index.md) - Working code examples
diff --git a/docs/user-guide/support.md b/docs/user-guide/support.md
index 5f26cdd24..517a353a1 100644
--- a/docs/user-guide/support.md
+++ b/docs/user-guide/support.md
@@ -15,7 +15,7 @@ When opening an issue, include:
- [FAQ](faq.md) — Common questions
- [Troubleshooting](troubleshooting.md) — Common issues
-- [Examples](../examples/index.md) — Working code
+- [Examples](../notebooks/index.md) — Working code
- [API Reference](../api-reference/index.md) — Technical docs
## Contributing
diff --git a/examples/00_Minmal/minimal_example.py b/examples/00_Minmal/minimal_example.py
index 7a94b2222..207faa9a9 100644
--- a/examples/00_Minmal/minimal_example.py
+++ b/examples/00_Minmal/minimal_example.py
@@ -32,5 +32,5 @@
),
)
- optimization = fx.Optimization('Simulation1', flow_system).solve(fx.solvers.HighsSolver(0.01, 60))
- optimization.results['Heat'].plot_node_balance()
+ flow_system.optimize(fx.solvers.HighsSolver(0.01, 60))
+ flow_system.statistics.plot.balance('Heat')
diff --git a/examples/01_Simple/simple_example.py b/examples/01_Simple/simple_example.py
index c2d6d88e1..b63260ece 100644
--- a/examples/01_Simple/simple_example.py
+++ b/examples/01_Simple/simple_example.py
@@ -21,7 +21,12 @@
# --- Define Energy Buses ---
# These represent nodes, where the used medias are balanced (electricity, heat, and gas)
- flow_system.add_elements(fx.Bus(label='Strom'), fx.Bus(label='Fernwärme'), fx.Bus(label='Gas'))
+ # Carriers provide automatic color assignment in plots (yellow for electricity, red for heat, etc.)
+ flow_system.add_elements(
+ fx.Bus(label='Strom', carrier='electricity'),
+ fx.Bus(label='Fernwärme', carrier='heat'),
+ fx.Bus(label='Gas', carrier='gas'),
+ )
# --- Define Effects (Objective and CO2 Emissions) ---
# Cost effect: used as the optimization objective --> minimizing costs
@@ -100,28 +105,22 @@
flow_system.add_elements(costs, CO2, boiler, storage, chp, heat_sink, gas_source, power_sink)
# Visualize the flow system for validation purposes
- flow_system.plot_network()
-
- # --- Define and Run Calculation ---
- # Create a calculation object to model the Flow System
- optimization = fx.Optimization(name='Sim1', flow_system=flow_system)
- optimization.do_modeling() # Translate the model to a solvable form, creating equations and Variables
+ flow_system.topology.plot()
- # --- Solve the Calculation and Save Results ---
- optimization.solve(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30))
+ # --- Define and Solve Optimization ---
+ flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30))
# --- Analyze Results ---
- # Colors are automatically assigned using default colormap
- # Optional: Configure custom colors with
- optimization.results.setup_colors()
- optimization.results['Fernwärme'].plot_node_balance_pie()
- optimization.results['Fernwärme'].plot_node_balance()
- optimization.results['Storage'].plot_charge_state()
- optimization.results.plot_heatmap('CHP(Q_th)|flow_rate')
-
- # Convert the results for the storage component to a dataframe and display
- df = optimization.results['Storage'].node_balance_with_charge_state()
- print(df)
-
- # Save results to file for later usage
- optimization.results.to_file()
+ # Plotting through statistics accessor - returns PlotResult with .data and .figure
+ flow_system.statistics.plot.balance('Fernwärme')
+ flow_system.statistics.plot.balance('Storage')
+ flow_system.statistics.plot.heatmap('CHP(Q_th)')
+ flow_system.statistics.plot.heatmap('Storage')
+
+ # Access data as xarray Datasets
+ print(flow_system.statistics.flow_rates)
+ print(flow_system.statistics.charge_states)
+
+ # Duration curve and effects analysis
+ flow_system.statistics.plot.duration_curve('Boiler(Q_th)')
+ print(flow_system.statistics.temporal_effects)
diff --git a/examples/02_Complex/complex_example.py b/examples/02_Complex/complex_example.py
index 3806fde40..3f38ff954 100644
--- a/examples/02_Complex/complex_example.py
+++ b/examples/02_Complex/complex_example.py
@@ -15,7 +15,6 @@
check_penalty = False
imbalance_penalty = 1e5
use_chp_with_piecewise_conversion = True
- time_indices = None # Define specific time steps for custom optimizations, or use the entire series
# --- Define Demand and Price Profiles ---
# Input data for electricity and heat demands, as well as electricity price
@@ -33,10 +32,11 @@
# --- Define Energy Buses ---
# Represent node balances (inputs=outputs) for the different energy carriers (electricity, heat, gas) in the system
+ # Carriers provide automatic color assignment in plots (yellow for electricity, red for heat, blue for gas)
flow_system.add_elements(
- fx.Bus('Strom', imbalance_penalty_per_flow_hour=imbalance_penalty),
- fx.Bus('Fernwärme', imbalance_penalty_per_flow_hour=imbalance_penalty),
- fx.Bus('Gas', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Strom', carrier='electricity', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Fernwärme', carrier='heat', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Gas', carrier='gas', imbalance_penalty_per_flow_hour=imbalance_penalty),
)
# --- Define Effects ---
@@ -189,22 +189,19 @@
print(flow_system) # Get a string representation of the FlowSystem
try:
- flow_system.start_network_app() # Start the network app
+ flow_system.topology.start_app() # Start the network app
except ImportError as e:
print(f'Network app requires extra dependencies: {e}')
# --- Solve FlowSystem ---
- optimization = fx.Optimization('complex example', flow_system, time_indices)
- optimization.do_modeling()
-
- optimization.solve(fx.solvers.HighsSolver(0.01, 60))
+ flow_system.optimize(fx.solvers.HighsSolver(0.01, 60))
# --- Results ---
- # You can analyze results directly or save them to file and reload them later.
- optimization.results.to_file()
-
- # But let's plot some results anyway
- optimization.results.plot_heatmap('BHKW2(Q_th)|flow_rate')
- optimization.results['BHKW2'].plot_node_balance()
- optimization.results['Speicher'].plot_charge_state()
- optimization.results['Fernwärme'].plot_node_balance_pie()
+ # Save the flow system with solution to file for later analysis
+ flow_system.to_netcdf('results/complex_example.nc')
+
+ # Plot results using the statistics accessor
+ flow_system.statistics.plot.heatmap('BHKW2(Q_th)') # Flow label - auto-resolves to flow_rate
+ flow_system.statistics.plot.balance('BHKW2')
+ flow_system.statistics.plot.heatmap('Speicher') # Storage label - auto-resolves to charge_state
+ flow_system.statistics.plot.balance('Fernwärme')
diff --git a/examples/02_Complex/complex_example_results.py b/examples/02_Complex/complex_example_results.py
index c4e9bb4f2..6978caff1 100644
--- a/examples/02_Complex/complex_example_results.py
+++ b/examples/02_Complex/complex_example_results.py
@@ -1,5 +1,5 @@
"""
-This script shows how load results of a prior calcualtion and how to analyze them.
+This script shows how to load results of a prior optimization and how to analyze them.
"""
import flixopt as fx
@@ -7,31 +7,32 @@
if __name__ == '__main__':
fx.CONFIG.exploring()
- # --- Load Results ---
+ # --- Load FlowSystem with Solution ---
try:
- results = fx.results.Results.from_file('results', 'complex example')
+ flow_system = fx.FlowSystem.from_netcdf('results/complex_example.nc')
except FileNotFoundError as e:
raise FileNotFoundError(
- f"Results file not found in the specified directory ('results'). "
+ f"Results file not found ('results/complex_example.nc'). "
f"Please ensure that the file is generated by running 'complex_example.py'. "
f'Original error: {e}'
) from e
# --- Basic overview ---
- results.plot_network()
- results['Fernwärme'].plot_node_balance()
+ flow_system.topology.plot()
+ flow_system.statistics.plot.balance('Fernwärme')
# --- Detailed Plots ---
- # In depth plot for individual flow rates ('__' is used as the delimiter between Component and Flow
- results.plot_heatmap('Wärmelast(Q_th_Last)|flow_rate')
- for bus in results.buses.values():
- bus.plot_node_balance_pie(show=False, save=f'results/{bus.label}--pie.html')
- bus.plot_node_balance(show=False, save=f'results/{bus.label}--balance.html')
+ # In-depth plot for individual flow rates
+ flow_system.statistics.plot.heatmap('Wärmelast(Q_th_Last)|flow_rate')
+
+ # Plot balances for all buses
+ for bus in flow_system.buses.values():
+ flow_system.statistics.plot.balance(bus.label).to_html(f'results/{bus.label}--balance.html')
# --- Plotting internal variables manually ---
- results.plot_heatmap('BHKW2(Q_th)|status')
- results.plot_heatmap('Kessel(Q_th)|status')
+ flow_system.statistics.plot.heatmap('BHKW2(Q_th)|status')
+ flow_system.statistics.plot.heatmap('Kessel(Q_th)|status')
- # Dataframes from results:
- fw_bus = results['Fernwärme'].node_balance().to_dataframe()
- all = results.solution.to_dataframe()
+ # Access data as DataFrames:
+ print(flow_system.statistics.flow_rates.to_dataframe())
+ print(flow_system.solution.to_dataframe())
diff --git a/examples/03_Optimization_modes/example_optimization_modes.py b/examples/03_Optimization_modes/example_optimization_modes.py
index 8f26d84b4..1f9968357 100644
--- a/examples/03_Optimization_modes/example_optimization_modes.py
+++ b/examples/03_Optimization_modes/example_optimization_modes.py
@@ -16,9 +16,11 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset:
dataarrays = []
for optimization in optimizations:
if optimization.name == 'Segmented':
+ # SegmentedOptimization requires special handling to remove overlaps
dataarrays.append(optimization.results.solution_without_overlap(variable).rename(optimization.name))
else:
- dataarrays.append(optimization.results.solution[variable].rename(optimization.name))
+ # For Full and Clustered, access solution from the flow_system
+ dataarrays.append(optimization.flow_system.solution[variable].rename(optimization.name))
return xr.merge(dataarrays, join='outer')
@@ -67,10 +69,10 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset:
flow_system = fx.FlowSystem(timesteps)
flow_system.add_elements(
- fx.Bus('Strom', imbalance_penalty_per_flow_hour=imbalance_penalty),
- fx.Bus('Fernwärme', imbalance_penalty_per_flow_hour=imbalance_penalty),
- fx.Bus('Gas', imbalance_penalty_per_flow_hour=imbalance_penalty),
- fx.Bus('Kohle', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Strom', carrier='electricity', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Fernwärme', carrier='heat', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Gas', carrier='gas', imbalance_penalty_per_flow_hour=imbalance_penalty),
+ fx.Bus('Kohle', carrier='fuel', imbalance_penalty_per_flow_hour=imbalance_penalty),
)
# Effects
@@ -176,7 +178,7 @@ def get_solutions(optimizations: list, variable: str) -> xr.Dataset:
a_kwk,
a_speicher,
)
- flow_system.plot_network()
+ flow_system.topology.plot()
# Optimizations
optimizations: list[fx.Optimization | fx.ClusteredOptimization | fx.SegmentedOptimization] = []
diff --git a/examples/04_Scenarios/scenario_example.py b/examples/04_Scenarios/scenario_example.py
index 672df5c7f..820336e93 100644
--- a/examples/04_Scenarios/scenario_example.py
+++ b/examples/04_Scenarios/scenario_example.py
@@ -89,7 +89,12 @@
# --- Define Energy Buses ---
# These represent nodes, where the used medias are balanced (electricity, heat, and gas)
- flow_system.add_elements(fx.Bus(label='Strom'), fx.Bus(label='Fernwärme'), fx.Bus(label='Gas'))
+ # Carriers provide automatic color assignment in plots (yellow for electricity, red for heat, blue for gas)
+ flow_system.add_elements(
+ fx.Bus(label='Strom', carrier='electricity'),
+ fx.Bus(label='Fernwärme', carrier='heat'),
+ fx.Bus(label='Gas', carrier='gas'),
+ )
# --- Define Effects (Objective and CO2 Emissions) ---
# Cost effect: used as the optimization objective --> minimizing costs
@@ -120,7 +125,7 @@
thermal_flow=fx.Flow(
label='Q_th',
bus='Fernwärme',
- size=50,
+ size=100,
relative_minimum=0.1,
relative_maximum=1,
status_parameters=fx.StatusParameters(),
@@ -135,7 +140,7 @@
thermal_efficiency=0.48, # Realistic thermal efficiency (48%)
electrical_efficiency=0.40, # Realistic electrical efficiency (40%)
electrical_flow=fx.Flow(
- 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters()
+ 'P_el', bus='Strom', size=80, relative_minimum=5 / 80, status_parameters=fx.StatusParameters()
),
thermal_flow=fx.Flow('Q_th', bus='Fernwärme'),
fuel_flow=fx.Flow('Q_fu', bus='Gas'),
@@ -192,35 +197,18 @@
flow_system.add_elements(costs, CO2, boiler, storage, chp, heat_sink, gas_source, power_sink)
# Visualize the flow system for validation purposes
- flow_system.plot_network()
-
- # --- Define and Run Calculation ---
- # Create a calculation object to model the Flow System
- optimization = fx.Optimization(name='Sim1', flow_system=flow_system)
- optimization.do_modeling() # Translate the model to a solvable form, creating equations and Variables
-
- # --- Solve the Calculation and Save Results ---
- optimization.solve(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30))
-
- optimization.results.setup_colors(
- {
- 'CHP': 'red',
- 'Greys': ['Gastarif', 'Einspeisung', 'Heat Demand'],
- 'Storage': 'blue',
- 'Boiler': 'orange',
- }
- )
+ flow_system.topology.plot()
- optimization.results.plot_heatmap('CHP(Q_th)|flow_rate')
+ # --- Define and Solve Optimization ---
+ flow_system.optimize(fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=30))
# --- Analyze Results ---
- optimization.results['Fernwärme'].plot_node_balance(mode='stacked_bar')
- optimization.results.plot_heatmap('CHP(Q_th)|flow_rate')
- optimization.results['Storage'].plot_charge_state()
- optimization.results['Fernwärme'].plot_node_balance_pie(select={'period': 2020, 'scenario': 'Base Case'})
-
- # Convert the results for the storage component to a dataframe and display
- df = optimization.results['Storage'].node_balance_with_charge_state()
-
- # Save results to file for later usage
- optimization.results.to_file()
+ # Plotting through statistics accessor - returns PlotResult with .data and .figure
+ flow_system.statistics.plot.heatmap('CHP(Q_th)') # Flow label - auto-resolves to flow_rate
+ flow_system.statistics.plot.balance('Fernwärme')
+ flow_system.statistics.plot.balance('Storage')
+ flow_system.statistics.plot.heatmap('Storage') # Storage label - auto-resolves to charge_state
+
+ # Access data as xarray Datasets
+ print(flow_system.statistics.flow_rates)
+ print(flow_system.statistics.charge_states)
diff --git a/examples/05_Two-stage-optimization/two_stage_optimization.py b/examples/05_Two-stage-optimization/two_stage_optimization.py
index 9e102c44f..3f3278477 100644
--- a/examples/05_Two-stage-optimization/two_stage_optimization.py
+++ b/examples/05_Two-stage-optimization/two_stage_optimization.py
@@ -11,6 +11,7 @@
import pathlib
import timeit
+import numpy as np
import pandas as pd
import xarray as xr
@@ -37,11 +38,12 @@
gas_price = filtered_data['Gaspr.€/MWh'].to_numpy()
flow_system = fx.FlowSystem(timesteps)
+ # Carriers provide automatic color assignment in plots
flow_system.add_elements(
- fx.Bus('Strom'),
- fx.Bus('Fernwärme'),
- fx.Bus('Gas'),
- fx.Bus('Kohle'),
+ fx.Bus('Strom', carrier='electricity'),
+ fx.Bus('Fernwärme', carrier='heat'),
+ fx.Bus('Gas', carrier='gas'),
+ fx.Bus('Kohle', carrier='fuel'),
fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True),
fx.Effect('CO2', 'kg', 'CO2_e-Emissionen'),
fx.Effect('PE', 'kWh_PE', 'Primärenergie'),
@@ -53,7 +55,7 @@
label='Q_fu',
bus='Gas',
size=fx.InvestParameters(
- effects_of_investment_per_size={'costs': 1_000}, minimum_size=10, maximum_size=500
+ effects_of_investment_per_size={'costs': 1_000}, minimum_size=10, maximum_size=600
),
relative_minimum=0.2,
previous_flow_rate=20,
@@ -87,8 +89,8 @@
eta_discharge=1,
relative_loss_per_hour=0.001,
prevent_simultaneous_charge_and_discharge=True,
- charging=fx.Flow('Q_th_load', size=137, bus='Fernwärme'),
- discharging=fx.Flow('Q_th_unload', size=158, bus='Fernwärme'),
+ charging=fx.Flow('Q_th_load', size=200, bus='Fernwärme'),
+ discharging=fx.Flow('Q_th_unload', size=200, bus='Fernwärme'),
),
fx.Sink(
'Wärmelast', inputs=[fx.Flow('Q_th_Last', bus='Fernwärme', size=1, fixed_relative_profile=heat_demand)]
@@ -122,34 +124,39 @@
)
# Separate optimization of flow sizes and dispatch
+ # Stage 1: Optimize sizes using downsampled (2h) data
start = timeit.default_timer()
calculation_sizing = fx.Optimization('Sizing', flow_system.resample('2h'))
calculation_sizing.do_modeling()
calculation_sizing.solve(fx.solvers.HighsSolver(0.1 / 100, 60))
timer_sizing = timeit.default_timer() - start
+ # Stage 2: Optimize dispatch with fixed sizes from Stage 1
start = timeit.default_timer()
calculation_dispatch = fx.Optimization('Dispatch', flow_system)
calculation_dispatch.do_modeling()
- calculation_dispatch.fix_sizes(calculation_sizing.results.solution)
+ calculation_dispatch.fix_sizes(calculation_sizing.flow_system.solution)
calculation_dispatch.solve(fx.solvers.HighsSolver(0.1 / 100, 60))
timer_dispatch = timeit.default_timer() - start
- if (calculation_dispatch.results.sizes().round(5) == calculation_sizing.results.sizes().round(5)).all().item():
+ # Verify sizes were correctly fixed
+ dispatch_sizes = calculation_dispatch.flow_system.statistics.sizes
+ sizing_sizes = calculation_sizing.flow_system.statistics.sizes
+ if np.allclose(dispatch_sizes.to_dataarray(), sizing_sizes.to_dataarray(), rtol=1e-5):
logger.info('Sizes were correctly equalized')
else:
raise RuntimeError('Sizes were not correctly equalized')
- # Optimization of both flow sizes and dispatch together
+ # Combined optimization: optimize both sizes and dispatch together
start = timeit.default_timer()
calculation_combined = fx.Optimization('Combined', flow_system)
calculation_combined.do_modeling()
calculation_combined.solve(fx.solvers.HighsSolver(0.1 / 100, 600))
timer_combined = timeit.default_timer() - start
- # Comparison of results
+ # Comparison of results - access solutions from flow_system
comparison = xr.concat(
- [calculation_combined.results.solution, calculation_dispatch.results.solution], dim='mode'
+ [calculation_combined.flow_system.solution, calculation_dispatch.flow_system.solution], dim='mode'
).assign_coords(mode=['Combined', 'Two-stage'])
comparison['Duration [s]'] = xr.DataArray([timer_combined, timer_sizing + timer_dispatch], dims='mode')
diff --git a/flixopt/__init__.py b/flixopt/__init__.py
index 8874811b3..1e3fee5bd 100644
--- a/flixopt/__init__.py
+++ b/flixopt/__init__.py
@@ -14,6 +14,7 @@
# Import commonly used classes and functions
from . import linear_converters, plotting, results, solvers
+from .carrier import Carrier, CarrierContainer
from .clustering import ClusteringParameters
from .components import (
LinearConverter,
@@ -30,10 +31,13 @@
from .flow_system import FlowSystem
from .interface import InvestParameters, Piece, Piecewise, PiecewiseConversion, PiecewiseEffects, StatusParameters
from .optimization import ClusteredOptimization, Optimization, SegmentedOptimization
+from .plot_result import PlotResult
__all__ = [
'TimeSeriesData',
'CONFIG',
+ 'Carrier',
+ 'CarrierContainer',
'Flow',
'Bus',
'Effect',
@@ -55,6 +59,7 @@
'PiecewiseConversion',
'PiecewiseEffects',
'ClusteringParameters',
+ 'PlotResult',
'plotting',
'results',
'linear_converters',
diff --git a/flixopt/carrier.py b/flixopt/carrier.py
new file mode 100644
index 000000000..8a663eca9
--- /dev/null
+++ b/flixopt/carrier.py
@@ -0,0 +1,159 @@
+"""Carrier class for energy/material type definitions.
+
+Carriers represent types of energy or materials that flow through buses,
+such as electricity, heat, gas, or water. They provide consistent styling
+and metadata across visualizations.
+"""
+
+from __future__ import annotations
+
+from .structure import ContainerMixin, Interface, register_class_for_io
+
+
+@register_class_for_io
+class Carrier(Interface):
+ """Definition of an energy or material carrier type.
+
+ Carriers represent the type of energy or material flowing through a Bus.
+ They provide consistent color, unit, and description across all visualizations
+ and can be shared between multiple buses of the same type.
+
+ Inherits from Interface to provide serialization capabilities.
+
+ Args:
+ name: Identifier for the carrier (e.g., 'electricity', 'heat', 'gas').
+ color: Hex color string for visualizations (e.g., '#FFD700').
+ unit: Unit string for display (e.g., 'kW', 'kW_th', 'm³/h').
+ description: Optional human-readable description.
+
+ Examples:
+ Creating custom carriers:
+
+ ```python
+ import flixopt as fx
+
+ # Define custom carriers
+ electricity = fx.Carrier('electricity', '#FFD700', 'kW', 'Electrical power')
+ district_heat = fx.Carrier('district_heat', '#FF6B6B', 'kW_th', 'District heating')
+ hydrogen = fx.Carrier('hydrogen', '#00CED1', 'kg/h', 'Hydrogen fuel')
+
+ # Register with FlowSystem
+ flow_system.add_carrier(electricity)
+ flow_system.add_carrier(district_heat)
+
+ # Use with buses (just reference by name)
+ elec_bus = fx.Bus('MainGrid', carrier='electricity')
+ heat_bus = fx.Bus('HeatingNetwork', carrier='district_heat')
+ ```
+
+ Using predefined carriers from CONFIG:
+
+ ```python
+ # Access built-in carriers
+ elec = fx.CONFIG.Carriers.electricity
+ heat = fx.CONFIG.Carriers.heat
+
+ # Use directly
+ bus = fx.Bus('Grid', carrier='electricity')
+ ```
+
+ Adding custom carriers to CONFIG:
+
+ ```python
+ # Add a new carrier globally
+ fx.CONFIG.Carriers.add(fx.Carrier('biogas', '#228B22', 'kW', 'Biogas'))
+
+ # Now available as
+ fx.CONFIG.Carriers.biogas
+ ```
+
+ Note:
+ Carriers are compared by name for equality, allowing flexible usage
+ patterns where the same carrier type can be referenced by name string
+ or Carrier object interchangeably.
+ """
+
+ def __init__(
+ self,
+ name: str,
+ color: str = '',
+ unit: str = '',
+ description: str = '',
+ ) -> None:
+ """Initialize a Carrier.
+
+ Args:
+ name: Identifier for the carrier (normalized to lowercase).
+ color: Hex color string for visualizations.
+ unit: Unit string for display.
+ description: Optional human-readable description.
+ """
+ self.name = name.lower()
+ self.color = color
+ self.unit = unit
+ self.description = description
+
+ def transform_data(self, name_prefix: str = '') -> None:
+ """Transform data to match FlowSystem dimensions.
+
+ Carriers don't have time-series data, so this is a no-op.
+
+ Args:
+ name_prefix: Ignored for Carrier.
+ """
+ pass # Carriers have no data to transform
+
+ @property
+ def label(self) -> str:
+ """Label for container keying (alias for name)."""
+ return self.name
+
+ def __hash__(self):
+ return hash(self.name)
+
+ def __eq__(self, other):
+ if isinstance(other, Carrier):
+ return self.name == other.name
+ if isinstance(other, str):
+ return self.name == other.lower()
+ return False
+
+ def __repr__(self):
+ return f"Carrier('{self.name}', color='{self.color}', unit='{self.unit}')"
+
+ def __str__(self):
+ return self.name
+
+
+class CarrierContainer(ContainerMixin['Carrier']):
+ """Container for Carrier objects.
+
+ Uses carrier.name for keying. Provides dict-like access to carriers
+ registered with a FlowSystem.
+
+ Examples:
+ ```python
+ # Access via FlowSystem
+ carriers = flow_system.carriers
+
+ # Dict-like access
+ elec = carriers['electricity']
+ 'heat' in carriers # True/False
+
+ # Iteration
+ for name in carriers:
+ print(name)
+ ```
+ """
+
+ def __init__(self, carriers: list[Carrier] | dict[str, Carrier] | None = None):
+ """Initialize a CarrierContainer.
+
+ Args:
+ carriers: Initial carriers to add.
+ """
+ super().__init__(elements=carriers, element_type_name='carriers')
+
+ def _get_label(self, carrier: Carrier) -> str:
+ """Extract name from Carrier for keying."""
+ return carrier.name
diff --git a/flixopt/clustering.py b/flixopt/clustering.py
index 1c6f7511b..d392167a1 100644
--- a/flixopt/clustering.py
+++ b/flixopt/clustering.py
@@ -7,7 +7,6 @@
import copy
import logging
-import pathlib
import timeit
from typing import TYPE_CHECKING
@@ -23,6 +22,7 @@
from .color_processing import process_colors
from .components import Storage
from .config import CONFIG
+from .plot_result import PlotResult
from .structure import (
FlowSystemModel,
Submodel,
@@ -31,7 +31,6 @@
if TYPE_CHECKING:
import linopy
import pandas as pd
- import plotly.graph_objects as go
from .core import Scalar, TimeSeriesData
from .elements import Component
@@ -144,8 +143,28 @@ def describe_clusters(self) -> str:
def use_extreme_periods(self):
return self.time_series_for_high_peaks or self.time_series_for_low_peaks
- def plot(self, colormap: str | None = None, show: bool = True, save: pathlib.Path | None = None) -> go.Figure:
- from . import plotting
+ def plot(self, colormap: str | None = None, show: bool | None = None) -> PlotResult:
+ """Plot original vs aggregated data comparison.
+
+ Visualizes the original time series (dashed lines) overlaid with
+ the aggregated/clustered time series (solid lines) for comparison.
+
+ Args:
+ colormap: Colorscale name for the time series colors.
+ Defaults to CONFIG.Plotting.default_qualitative_colorscale.
+ show: Whether to display the figure.
+ Defaults to CONFIG.Plotting.default_show.
+
+ Returns:
+ PlotResult containing the comparison figure and underlying data.
+
+ Examples:
+ >>> clustering.cluster()
+ >>> clustering.plot()
+ >>> clustering.plot(colormap='Set2', show=False).to_html('clustering.html')
+ """
+ import plotly.express as px
+ import xarray as xr
df_org = self.original_data.copy().rename(
columns={col: f'Original - {col}' for col in self.original_data.columns}
@@ -156,10 +175,17 @@ def plot(self, colormap: str | None = None, show: bool = True, save: pathlib.Pat
colors = list(
process_colors(colormap or CONFIG.Plotting.default_qualitative_colorscale, list(df_org.columns)).values()
)
- fig = plotting.with_plotly(df_org.to_xarray(), 'line', colors=colors, xlabel='Time in h')
+
+ # Create line plot for original data (dashed)
+ index_name = df_org.index.name or 'index'
+ df_org_long = df_org.reset_index().melt(id_vars=index_name, var_name='variable', value_name='value')
+ fig = px.line(df_org_long, x=index_name, y='value', color='variable', color_discrete_sequence=colors)
for trace in fig.data:
- trace.update(dict(line=dict(dash='dash')))
- fig2 = plotting.with_plotly(df_agg.to_xarray(), 'line', colors=colors, xlabel='Time in h')
+ trace.update(line=dict(dash='dash'))
+
+ # Add aggregated data (solid lines)
+ df_agg_long = df_agg.reset_index().melt(id_vars=index_name, var_name='variable', value_name='value')
+ fig2 = px.line(df_agg_long, x=index_name, y='value', color='variable', color_discrete_sequence=colors)
for trace in fig2.data:
fig.add_trace(trace)
@@ -169,16 +195,21 @@ def plot(self, colormap: str | None = None, show: bool = True, save: pathlib.Pat
yaxis_title='Value',
)
- plotting.export_figure(
- figure_like=fig,
- default_path=pathlib.Path('aggregated data.html'),
- default_filetype='.html',
- user_path=save,
- show=show,
- save=save is not None,
+ # Build xarray Dataset with both original and aggregated data
+ data = xr.Dataset(
+ {
+ 'original': self.original_data.to_xarray().to_array(dim='variable'),
+ 'aggregated': self.aggregated_data.to_xarray().to_array(dim='variable'),
+ }
)
+ result = PlotResult(data=data, figure=fig)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ result.show()
- return fig
+ return result
def get_cluster_indices(self) -> dict[str, list[np.ndarray]]:
"""
diff --git a/flixopt/color_processing.py b/flixopt/color_processing.py
index 2959acc82..62d8a9542 100644
--- a/flixopt/color_processing.py
+++ b/flixopt/color_processing.py
@@ -15,6 +15,57 @@
logger = logging.getLogger('flixopt')
+# Type alias for flexible color input
+ColorType = str | list[str] | dict[str, str]
+"""Flexible color specification type supporting multiple input formats for visualization.
+
+Color specifications can take several forms to accommodate different use cases:
+
+**Named colorscales** (str):
+ - Standard colorscales: 'turbo', 'plasma', 'cividis', 'tab10', 'Set1'
+ - Energy-focused: 'portland' (custom flixopt colorscale for energy systems)
+ - Backend-specific maps available in Plotly and Matplotlib
+
+**Color Lists** (list[str]):
+ - Explicit color sequences: ['red', 'blue', 'green', 'orange']
+ - HEX codes: ['#FF0000', '#0000FF', '#00FF00', '#FFA500']
+ - Mixed formats: ['red', '#0000FF', 'green', 'orange']
+
+**Label-to-Color Mapping** (dict[str, str]):
+ - Explicit associations: {'Wind': 'skyblue', 'Solar': 'gold', 'Gas': 'brown'}
+ - Ensures consistent colors across different plots and datasets
+ - Ideal for energy system components with semantic meaning
+
+Examples:
+ ```python
+ # Named colorscale
+ colors = 'turbo' # Automatic color generation
+
+ # Explicit color list
+ colors = ['red', 'blue', 'green', '#FFD700']
+
+ # Component-specific mapping
+ colors = {
+ 'Wind_Turbine': 'skyblue',
+ 'Solar_Panel': 'gold',
+ 'Natural_Gas': 'brown',
+ 'Battery': 'green',
+ 'Electric_Load': 'darkred'
+ }
+ ```
+
+Color Format Support:
+ - **Named Colors**: 'red', 'blue', 'forestgreen', 'darkorange'
+ - **HEX Codes**: '#FF0000', '#0000FF', '#228B22', '#FF8C00'
+ - **RGB Tuples**: (255, 0, 0), (0, 0, 255) [Matplotlib only]
+ - **RGBA**: 'rgba(255,0,0,0.8)' [Plotly only]
+
+References:
+ - HTML Color Names: https://htmlcolorcodes.com/color-names/
+ - Matplotlib colorscales: https://matplotlib.org/stable/tutorials/colors/colorscales.html
+ - Plotly Built-in Colorscales: https://plotly.com/python/builtin-colorscales/
+"""
+
def _rgb_string_to_hex(color: str) -> str:
"""Convert Plotly RGB/RGBA string format to hex.
@@ -58,6 +109,59 @@ def _rgb_string_to_hex(color: str) -> str:
return color
+def color_to_rgba(color: str | None, alpha: float = 1.0) -> str:
+ """Convert any valid color to RGBA string format.
+
+ Handles hex colors (with or without #), named colors, and rgb/rgba strings.
+
+ Args:
+ color: Color in any valid format (hex '#FF0000' or 'FF0000',
+ named 'red', rgb 'rgb(255,0,0)', rgba 'rgba(255,0,0,1)').
+ alpha: Alpha/opacity value between 0.0 and 1.0.
+
+ Returns:
+ Color in RGBA format 'rgba(R, G, B, A)'.
+
+ Examples:
+ >>> color_to_rgba('#FF0000')
+ 'rgba(255, 0, 0, 1.0)'
+ >>> color_to_rgba('FF0000')
+ 'rgba(255, 0, 0, 1.0)'
+ >>> color_to_rgba('red', 0.5)
+ 'rgba(255, 0, 0, 0.5)'
+ >>> color_to_rgba('forestgreen', 0.4)
+ 'rgba(34, 139, 34, 0.4)'
+ >>> color_to_rgba(None)
+ 'rgba(200, 200, 200, 1.0)'
+ """
+ if not color:
+ return f'rgba(200, 200, 200, {alpha})'
+
+ try:
+ # Use matplotlib's robust color conversion (handles hex, named, etc.)
+ rgba = mcolors.to_rgba(color)
+ except ValueError:
+ # Try adding # prefix for bare hex colors (e.g., 'FF0000' -> '#FF0000')
+ if len(color) == 6 and all(c in '0123456789ABCDEFabcdef' for c in color):
+ try:
+ rgba = mcolors.to_rgba(f'#{color}')
+ except ValueError:
+ return f'rgba(200, 200, 200, {alpha})'
+ else:
+ return f'rgba(200, 200, 200, {alpha})'
+ except TypeError:
+ return f'rgba(200, 200, 200, {alpha})'
+
+ r = int(round(rgba[0] * 255))
+ g = int(round(rgba[1] * 255))
+ b = int(round(rgba[2] * 255))
+ return f'rgba({r}, {g}, {b}, {alpha})'
+
+
+# Alias for backwards compatibility
+hex_to_rgba = color_to_rgba
+
+
def process_colors(
colors: None | str | list[str] | dict[str, str],
labels: list[str],
diff --git a/flixopt/components.py b/flixopt/components.py
index 0cfed39eb..267c144af 100644
--- a/flixopt/components.py
+++ b/flixopt/components.py
@@ -180,11 +180,11 @@ def create_model(self, model: FlowSystemModel) -> LinearConverterModel:
self.submodel = LinearConverterModel(model, self)
return self.submodel
- def _set_flow_system(self, flow_system) -> None:
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
"""Propagate flow_system reference to parent Component and piecewise_conversion."""
- super()._set_flow_system(flow_system)
+ super().link_to_flow_system(flow_system, prefix)
if self.piecewise_conversion is not None:
- self.piecewise_conversion._set_flow_system(flow_system)
+ self.piecewise_conversion.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseConversion'))
def _plausibility_checks(self) -> None:
super()._plausibility_checks()
@@ -216,14 +216,13 @@ def _plausibility_checks(self) -> None:
f'({flow.label_full}).'
)
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
- super().transform_data(prefix)
+ def transform_data(self) -> None:
+ super().transform_data()
if self.conversion_factors:
self.conversion_factors = self._transform_conversion_factors()
if self.piecewise_conversion:
self.piecewise_conversion.has_time_dim = True
- self.piecewise_conversion.transform_data(f'{prefix}|PiecewiseConversion')
+ self.piecewise_conversion.transform_data()
def _transform_conversion_factors(self) -> list[dict[str, xr.DataArray]]:
"""Converts all conversion factors to internal datatypes"""
@@ -266,7 +265,9 @@ class Storage(Component):
charging: Incoming flow for loading the storage.
discharging: Outgoing flow for unloading the storage.
capacity_in_flow_hours: Storage capacity in flow-hours (kWh, m³, kg).
- Scalar for fixed size or InvestParameters for optimization.
+ Scalar for fixed size, InvestParameters for optimization, or None (unbounded).
+ Default: None (unbounded capacity). When using InvestParameters,
+ maximum_size (or fixed_size) must be explicitly set for proper model scaling.
relative_minimum_charge_state: Minimum charge state (0-1). Default: 0.
relative_maximum_charge_state: Maximum charge state (0-1). Default: 1.
initial_charge_state: Charge at start. Numeric or 'equals_final'. Default: 0.
@@ -367,6 +368,11 @@ class Storage(Component):
variables enforce mutual exclusivity, increasing solution time but preventing unrealistic
simultaneous charging and discharging.
+ **Unbounded capacity**: When capacity_in_flow_hours is None (default), the storage has
+ unlimited capacity. Note that prevent_simultaneous_charge_and_discharge requires the
+ charging and discharging flows to have explicit sizes. Use prevent_simultaneous_charge_and_discharge=False
+ with unbounded storages, or set flow sizes explicitly.
+
**Units**: Flow rates and charge states are related by the concept of 'flow hours' (=flow_rate * time).
With flow rates in kW, the charge state is therefore (usually) kWh.
With flow rates in m3/h, the charge state is therefore in m3.
@@ -379,7 +385,7 @@ def __init__(
label: str,
charging: Flow,
discharging: Flow,
- capacity_in_flow_hours: Numeric_PS | InvestParameters,
+ capacity_in_flow_hours: Numeric_PS | InvestParameters | None = None,
relative_minimum_charge_state: Numeric_TPS = 0,
relative_maximum_charge_state: Numeric_TPS = 1,
initial_charge_state: Numeric_PS | Literal['equals_final'] = 0,
@@ -427,49 +433,50 @@ def create_model(self, model: FlowSystemModel) -> StorageModel:
self.submodel = StorageModel(model, self)
return self.submodel
- def _set_flow_system(self, flow_system) -> None:
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
"""Propagate flow_system reference to parent Component and capacity_in_flow_hours if it's InvestParameters."""
- super()._set_flow_system(flow_system)
+ super().link_to_flow_system(flow_system, prefix)
if isinstance(self.capacity_in_flow_hours, InvestParameters):
- self.capacity_in_flow_hours._set_flow_system(flow_system)
+ self.capacity_in_flow_hours.link_to_flow_system(flow_system, self._sub_prefix('InvestParameters'))
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
- super().transform_data(prefix)
+ def transform_data(self) -> None:
+ super().transform_data()
self.relative_minimum_charge_state = self._fit_coords(
- f'{prefix}|relative_minimum_charge_state', self.relative_minimum_charge_state
+ f'{self.prefix}|relative_minimum_charge_state', self.relative_minimum_charge_state
)
self.relative_maximum_charge_state = self._fit_coords(
- f'{prefix}|relative_maximum_charge_state', self.relative_maximum_charge_state
+ f'{self.prefix}|relative_maximum_charge_state', self.relative_maximum_charge_state
+ )
+ self.eta_charge = self._fit_coords(f'{self.prefix}|eta_charge', self.eta_charge)
+ self.eta_discharge = self._fit_coords(f'{self.prefix}|eta_discharge', self.eta_discharge)
+ self.relative_loss_per_hour = self._fit_coords(
+ f'{self.prefix}|relative_loss_per_hour', self.relative_loss_per_hour
)
- self.eta_charge = self._fit_coords(f'{prefix}|eta_charge', self.eta_charge)
- self.eta_discharge = self._fit_coords(f'{prefix}|eta_discharge', self.eta_discharge)
- self.relative_loss_per_hour = self._fit_coords(f'{prefix}|relative_loss_per_hour', self.relative_loss_per_hour)
if not isinstance(self.initial_charge_state, str):
self.initial_charge_state = self._fit_coords(
- f'{prefix}|initial_charge_state', self.initial_charge_state, dims=['period', 'scenario']
+ f'{self.prefix}|initial_charge_state', self.initial_charge_state, dims=['period', 'scenario']
)
self.minimal_final_charge_state = self._fit_coords(
- f'{prefix}|minimal_final_charge_state', self.minimal_final_charge_state, dims=['period', 'scenario']
+ f'{self.prefix}|minimal_final_charge_state', self.minimal_final_charge_state, dims=['period', 'scenario']
)
self.maximal_final_charge_state = self._fit_coords(
- f'{prefix}|maximal_final_charge_state', self.maximal_final_charge_state, dims=['period', 'scenario']
+ f'{self.prefix}|maximal_final_charge_state', self.maximal_final_charge_state, dims=['period', 'scenario']
)
self.relative_minimum_final_charge_state = self._fit_coords(
- f'{prefix}|relative_minimum_final_charge_state',
+ f'{self.prefix}|relative_minimum_final_charge_state',
self.relative_minimum_final_charge_state,
dims=['period', 'scenario'],
)
self.relative_maximum_final_charge_state = self._fit_coords(
- f'{prefix}|relative_maximum_final_charge_state',
+ f'{self.prefix}|relative_maximum_final_charge_state',
self.relative_maximum_final_charge_state,
dims=['period', 'scenario'],
)
if isinstance(self.capacity_in_flow_hours, InvestParameters):
- self.capacity_in_flow_hours.transform_data(f'{prefix}|InvestParameters')
+ self.capacity_in_flow_hours.transform_data()
else:
self.capacity_in_flow_hours = self._fit_coords(
- f'{prefix}|capacity_in_flow_hours', self.capacity_in_flow_hours, dims=['period', 'scenario']
+ f'{self.prefix}|capacity_in_flow_hours', self.capacity_in_flow_hours, dims=['period', 'scenario']
)
def _plausibility_checks(self) -> None:
@@ -485,31 +492,58 @@ def _plausibility_checks(self) -> None:
raise PlausibilityError(f'initial_charge_state has undefined value: {self.initial_charge_state}')
initial_equals_final = True
- # Use new InvestParameters methods to get capacity bounds
- if isinstance(self.capacity_in_flow_hours, InvestParameters):
- minimum_capacity = self.capacity_in_flow_hours.minimum_or_fixed_size
- maximum_capacity = self.capacity_in_flow_hours.maximum_or_fixed_size
- else:
- maximum_capacity = self.capacity_in_flow_hours
- minimum_capacity = self.capacity_in_flow_hours
-
- # Initial capacity should not constraint investment decision
- minimum_initial_capacity = maximum_capacity * self.relative_minimum_charge_state.isel(time=0)
- maximum_initial_capacity = minimum_capacity * self.relative_maximum_charge_state.isel(time=0)
-
- # Only perform numeric comparisons if not using 'equals_final'
- if not initial_equals_final:
- if (self.initial_charge_state > maximum_initial_capacity).any():
+ # Capacity is required when using non-default relative bounds
+ if self.capacity_in_flow_hours is None:
+ if np.any(self.relative_minimum_charge_state > 0):
+ raise PlausibilityError(
+ f'Storage "{self.label_full}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. '
+ f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.'
+ )
+ if np.any(self.relative_maximum_charge_state < 1):
+ raise PlausibilityError(
+ f'Storage "{self.label_full}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. '
+ f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.'
+ )
+ if self.relative_minimum_final_charge_state is not None:
raise PlausibilityError(
- f'{self.label_full}: {self.initial_charge_state=} '
- f'is constraining the investment decision. Chosse a value above {maximum_initial_capacity}'
+ f'Storage "{self.label_full}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. '
+ f'A capacity is required for relative final charge state constraints.'
)
- if (self.initial_charge_state < minimum_initial_capacity).any():
+ if self.relative_maximum_final_charge_state is not None:
raise PlausibilityError(
- f'{self.label_full}: {self.initial_charge_state=} '
- f'is constraining the investment decision. Chosse a value below {minimum_initial_capacity}'
+ f'Storage "{self.label_full}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. '
+ f'A capacity is required for relative final charge state constraints.'
)
+ # Skip capacity-related checks if capacity is None (unbounded)
+ if self.capacity_in_flow_hours is not None:
+ # Use new InvestParameters methods to get capacity bounds
+ if isinstance(self.capacity_in_flow_hours, InvestParameters):
+ minimum_capacity = self.capacity_in_flow_hours.minimum_or_fixed_size
+ maximum_capacity = self.capacity_in_flow_hours.maximum_or_fixed_size
+ else:
+ maximum_capacity = self.capacity_in_flow_hours
+ minimum_capacity = self.capacity_in_flow_hours
+
+ # Initial charge state should not constrain investment decision
+ # If initial > (min_cap * rel_max), investment is forced to increase capacity
+ # If initial < (max_cap * rel_min), investment is forced to decrease capacity
+ min_initial_at_max_capacity = maximum_capacity * self.relative_minimum_charge_state.isel(time=0)
+ max_initial_at_min_capacity = minimum_capacity * self.relative_maximum_charge_state.isel(time=0)
+
+ # Only perform numeric comparisons if not using 'equals_final'
+ if not initial_equals_final:
+ if (self.initial_charge_state > max_initial_at_min_capacity).any():
+ raise PlausibilityError(
+ f'{self.label_full}: {self.initial_charge_state=} '
+ f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.'
+ )
+ if (self.initial_charge_state < min_initial_at_max_capacity).any():
+ raise PlausibilityError(
+ f'{self.label_full}: {self.initial_charge_state=} '
+ f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.'
+ )
+
if self.balanced:
if not isinstance(self.charging.size, InvestParameters) or not isinstance(
self.discharging.size, InvestParameters
@@ -518,13 +552,13 @@ def _plausibility_checks(self) -> None:
f'Balancing charging and discharging Flows in {self.label_full} is only possible with Investments.'
)
- if (self.charging.size.minimum_size > self.discharging.size.maximum_size).any() or (
- self.charging.size.maximum_size < self.discharging.size.minimum_size
+ if (self.charging.size.minimum_or_fixed_size > self.discharging.size.maximum_or_fixed_size).any() or (
+ self.charging.size.maximum_or_fixed_size < self.discharging.size.minimum_or_fixed_size
).any():
raise PlausibilityError(
f'Balancing charging and discharging Flows in {self.label_full} need compatible minimum and maximum sizes.'
- f'Got: {self.charging.size.minimum_size=}, {self.charging.size.maximum_size=} and '
- f'{self.discharging.size.minimum_size=}, {self.discharging.size.maximum_size=}.'
+ f'Got: {self.charging.size.minimum_or_fixed_size=}, {self.charging.size.maximum_or_fixed_size=} and '
+ f'{self.discharging.size.minimum_or_fixed_size=}, {self.discharging.size.maximum_or_fixed_size=}.'
)
def __repr__(self) -> str:
@@ -705,8 +739,8 @@ def _plausibility_checks(self):
).any():
raise ValueError(
f'Balanced Transmission needs compatible minimum and maximum sizes.'
- f'Got: {self.in1.size.minimum_size=}, {self.in1.size.maximum_size=}, {self.in1.size.fixed_size=} and '
- f'{self.in2.size.minimum_size=}, {self.in2.size.maximum_size=}, {self.in2.size.fixed_size=}.'
+ f'Got: {self.in1.size.minimum_or_fixed_size=}, {self.in1.size.maximum_or_fixed_size=} and '
+ f'{self.in2.size.minimum_or_fixed_size=}, {self.in2.size.maximum_or_fixed_size=}.'
)
def create_model(self, model) -> TransmissionModel:
@@ -714,11 +748,10 @@ def create_model(self, model) -> TransmissionModel:
self.submodel = TransmissionModel(model, self)
return self.submodel
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
- super().transform_data(prefix)
- self.relative_losses = self._fit_coords(f'{prefix}|relative_losses', self.relative_losses)
- self.absolute_losses = self._fit_coords(f'{prefix}|absolute_losses', self.absolute_losses)
+ def transform_data(self) -> None:
+ super().transform_data()
+ self.relative_losses = self._fit_coords(f'{self.prefix}|relative_losses', self.relative_losses)
+ self.absolute_losses = self._fit_coords(f'{self.prefix}|absolute_losses', self.absolute_losses)
class TransmissionModel(ComponentModel):
@@ -729,6 +762,9 @@ def __init__(self, model: FlowSystemModel, element: Transmission):
for flow in element.inputs + element.outputs:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
+ flow.status_parameters.link_to_flow_system(
+ model.flow_system, f'{flow.label_full}|status_parameters'
+ )
super().__init__(model, element)
@@ -936,15 +972,18 @@ def _initial_and_final_charge_state(self):
@property
def _absolute_charge_state_bounds(self) -> tuple[xr.DataArray, xr.DataArray]:
relative_lower_bound, relative_upper_bound = self._relative_charge_state_bounds
- if not isinstance(self.element.capacity_in_flow_hours, InvestParameters):
+ if self.element.capacity_in_flow_hours is None:
+ # Unbounded storage: lower bound is 0, upper bound is infinite
+ return (0, np.inf)
+ elif isinstance(self.element.capacity_in_flow_hours, InvestParameters):
return (
- relative_lower_bound * self.element.capacity_in_flow_hours,
- relative_upper_bound * self.element.capacity_in_flow_hours,
+ relative_lower_bound * self.element.capacity_in_flow_hours.minimum_or_fixed_size,
+ relative_upper_bound * self.element.capacity_in_flow_hours.maximum_or_fixed_size,
)
else:
return (
- relative_lower_bound * self.element.capacity_in_flow_hours.minimum_size,
- relative_upper_bound * self.element.capacity_in_flow_hours.maximum_size,
+ relative_lower_bound * self.element.capacity_in_flow_hours,
+ relative_upper_bound * self.element.capacity_in_flow_hours,
)
@property
diff --git a/flixopt/config.py b/flixopt/config.py
index f090430b0..9560042e3 100644
--- a/flixopt/config.py
+++ b/flixopt/config.py
@@ -20,7 +20,7 @@
COLORLOG_AVAILABLE = False
escape_codes = None
-__all__ = ['CONFIG', 'MultilineFormatter', 'SUCCESS_LEVEL']
+__all__ = ['CONFIG', 'MultilineFormatter', 'SUCCESS_LEVEL', 'DEPRECATION_REMOVAL_VERSION']
if COLORLOG_AVAILABLE:
__all__.append('ColoredMultilineFormatter')
@@ -171,6 +171,7 @@ def format(self, record):
'time_limit_seconds': 300,
'log_to_console': True,
'log_main_results': True,
+ 'compute_infeasibilities': True,
}
),
}
@@ -526,6 +527,7 @@ class Solving:
time_limit_seconds: Default time limit in seconds for solver runs.
log_to_console: Whether solver should output to console.
log_main_results: Whether to log main results after solving.
+ compute_infeasibilities: Whether to compute infeasibility analysis when the model is infeasible.
Examples:
```python
@@ -540,6 +542,7 @@ class Solving:
time_limit_seconds: int = _DEFAULTS['solving']['time_limit_seconds']
log_to_console: bool = _DEFAULTS['solving']['log_to_console']
log_main_results: bool = _DEFAULTS['solving']['log_main_results']
+ compute_infeasibilities: bool = _DEFAULTS['solving']['compute_infeasibilities']
class Plotting:
"""Plotting configuration.
@@ -572,6 +575,36 @@ class Plotting:
default_sequential_colorscale: str = _DEFAULTS['plotting']['default_sequential_colorscale']
default_qualitative_colorscale: str = _DEFAULTS['plotting']['default_qualitative_colorscale']
+ class Carriers:
+ """Default carrier definitions for common energy types.
+
+ Provides convenient defaults for carriers. Colors are from D3/Plotly palettes.
+
+ Predefined: electricity, heat, gas, hydrogen, fuel, biomass
+
+ Examples:
+ ```python
+ import flixopt as fx
+
+ # Access predefined carriers
+ fx.CONFIG.Carriers.electricity # Carrier with color '#FECB52'
+ fx.CONFIG.Carriers.heat.color # '#D62728'
+
+ # Use with buses
+ bus = fx.Bus('Grid', carrier='electricity')
+ ```
+ """
+
+ from .carrier import Carrier
+
+ # Default carriers - colors from D3/Plotly palettes
+ electricity: Carrier = Carrier('electricity', '#FECB52') # Yellow
+ heat: Carrier = Carrier('heat', '#D62728') # Red
+ gas: Carrier = Carrier('gas', '#1F77B4') # Blue
+ hydrogen: Carrier = Carrier('hydrogen', '#9467BD') # Purple
+ fuel: Carrier = Carrier('fuel', '#8C564B') # Brown
+ biomass: Carrier = Carrier('biomass', '#2CA02C') # Green
+
config_name: str = _DEFAULTS['config_name']
@classmethod
@@ -598,6 +631,16 @@ def reset(cls) -> None:
for key, value in _DEFAULTS['plotting'].items():
setattr(cls.Plotting, key, value)
+ # Reset Carriers to defaults
+ from .carrier import Carrier
+
+ cls.Carriers.electricity = Carrier('electricity', '#FECB52')
+ cls.Carriers.heat = Carrier('heat', '#D62728')
+ cls.Carriers.gas = Carrier('gas', '#1F77B4')
+ cls.Carriers.hydrogen = Carrier('hydrogen', '#9467BD')
+ cls.Carriers.fuel = Carrier('fuel', '#8C564B')
+ cls.Carriers.biomass = Carrier('biomass', '#2CA02C')
+
cls.config_name = _DEFAULTS['config_name']
# Reset logging to default (silent)
@@ -622,6 +665,7 @@ def to_dict(cls) -> dict:
'time_limit_seconds': cls.Solving.time_limit_seconds,
'log_to_console': cls.Solving.log_to_console,
'log_main_results': cls.Solving.log_main_results,
+ 'compute_infeasibilities': cls.Solving.compute_infeasibilities,
},
'plotting': {
'default_show': cls.Plotting.default_show,
@@ -741,6 +785,45 @@ def browser_plotting(cls) -> type[CONFIG]:
return cls
+ @classmethod
+ def notebook(cls) -> type[CONFIG]:
+ """Configure for Jupyter notebook environments.
+
+ Optimizes settings for notebook usage:
+ - Sets plotly renderer to 'notebook' for inline display
+ - Disables automatic plot.show() calls (notebooks display via _repr_html_)
+ - Enables INFO-level console logging
+ - Disables solver console output (too verbose for notebooks)
+
+ Examples:
+ ```python
+ # At the start of your notebook
+ import flixopt as fx
+
+ fx.CONFIG.notebook()
+
+ # Now plots display inline automatically
+ flow_system.statistics.plot.balance('Heat') # Displays inline
+ ```
+ """
+ import plotly.io as pio
+
+ # Set plotly to render inline in notebooks
+ pio.renderers.default = 'notebook'
+ pio.templates.default = 'plotly_white'
+
+ # Disable default show since notebooks render via _repr_html_
+ cls.Plotting.default_show = False
+
+ # Light logging - SUCCESS level without too much noise
+ cls.Logging.enable_console('SUCCESS')
+
+ # Disable solver console output (too verbose for notebooks)
+ cls.Solving.log_to_console = True
+ cls.Solving.log_main_results = True
+
+ return cls
+
@classmethod
def load_from_file(cls, config_file: str | Path) -> type[CONFIG]:
"""Load configuration from YAML file and apply it.
diff --git a/flixopt/effects.py b/flixopt/effects.py
index 5dd53258f..cdac7ca7d 100644
--- a/flixopt/effects.py
+++ b/flixopt/effects.py
@@ -187,7 +187,7 @@ def __init__(
self,
label: str,
unit: str,
- description: str,
+ description: str = '',
meta_data: dict | None = None,
is_standard: bool = False,
is_objective: bool = False,
@@ -237,50 +237,56 @@ def __init__(
self.minimum_over_periods = minimum_over_periods
self.maximum_over_periods = maximum_over_periods
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
- self.minimum_per_hour = self._fit_coords(f'{prefix}|minimum_per_hour', self.minimum_per_hour)
- self.maximum_per_hour = self._fit_coords(f'{prefix}|maximum_per_hour', self.maximum_per_hour)
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
+ """Link this effect to a FlowSystem.
+
+ Elements use their label_full as prefix by default, ignoring the passed prefix.
+ """
+ super().link_to_flow_system(flow_system, self.label_full)
+
+ def transform_data(self) -> None:
+ self.minimum_per_hour = self._fit_coords(f'{self.prefix}|minimum_per_hour', self.minimum_per_hour)
+ self.maximum_per_hour = self._fit_coords(f'{self.prefix}|maximum_per_hour', self.maximum_per_hour)
self.share_from_temporal = self._fit_effect_coords(
prefix=None,
effect_values=self.share_from_temporal,
- suffix=f'(temporal)->{prefix}(temporal)',
+ suffix=f'(temporal)->{self.prefix}(temporal)',
dims=['time', 'period', 'scenario'],
)
self.share_from_periodic = self._fit_effect_coords(
prefix=None,
effect_values=self.share_from_periodic,
- suffix=f'(periodic)->{prefix}(periodic)',
+ suffix=f'(periodic)->{self.prefix}(periodic)',
dims=['period', 'scenario'],
)
self.minimum_temporal = self._fit_coords(
- f'{prefix}|minimum_temporal', self.minimum_temporal, dims=['period', 'scenario']
+ f'{self.prefix}|minimum_temporal', self.minimum_temporal, dims=['period', 'scenario']
)
self.maximum_temporal = self._fit_coords(
- f'{prefix}|maximum_temporal', self.maximum_temporal, dims=['period', 'scenario']
+ f'{self.prefix}|maximum_temporal', self.maximum_temporal, dims=['period', 'scenario']
)
self.minimum_periodic = self._fit_coords(
- f'{prefix}|minimum_periodic', self.minimum_periodic, dims=['period', 'scenario']
+ f'{self.prefix}|minimum_periodic', self.minimum_periodic, dims=['period', 'scenario']
)
self.maximum_periodic = self._fit_coords(
- f'{prefix}|maximum_periodic', self.maximum_periodic, dims=['period', 'scenario']
+ f'{self.prefix}|maximum_periodic', self.maximum_periodic, dims=['period', 'scenario']
)
self.minimum_total = self._fit_coords(
- f'{prefix}|minimum_total', self.minimum_total, dims=['period', 'scenario']
+ f'{self.prefix}|minimum_total', self.minimum_total, dims=['period', 'scenario']
)
self.maximum_total = self._fit_coords(
- f'{prefix}|maximum_total', self.maximum_total, dims=['period', 'scenario']
+ f'{self.prefix}|maximum_total', self.maximum_total, dims=['period', 'scenario']
)
self.minimum_over_periods = self._fit_coords(
- f'{prefix}|minimum_over_periods', self.minimum_over_periods, dims=['scenario']
+ f'{self.prefix}|minimum_over_periods', self.minimum_over_periods, dims=['scenario']
)
self.maximum_over_periods = self._fit_coords(
- f'{prefix}|maximum_over_periods', self.maximum_over_periods, dims=['scenario']
+ f'{self.prefix}|maximum_over_periods', self.maximum_over_periods, dims=['scenario']
)
self.period_weights = self._fit_coords(
- f'{prefix}|period_weights', self.period_weights, dims=['period', 'scenario']
+ f'{self.prefix}|period_weights', self.period_weights, dims=['period', 'scenario']
)
def create_model(self, model: FlowSystemModel) -> EffectModel:
@@ -670,7 +676,7 @@ def _do_modeling(self):
penalty_effect = self.effects._create_penalty_effect()
# Link to FlowSystem (should already be linked, but ensure it)
if penalty_effect._flow_system is None:
- penalty_effect._set_flow_system(self._model.flow_system)
+ penalty_effect.link_to_flow_system(self._model.flow_system)
# Create EffectModel for each effect
for effect in self.effects.values():
diff --git a/flixopt/elements.py b/flixopt/elements.py
index 74ed7bde4..2933eb95a 100644
--- a/flixopt/elements.py
+++ b/flixopt/elements.py
@@ -20,7 +20,6 @@
Element,
ElementModel,
FlowSystemModel,
- Interface,
register_class_for_io,
)
@@ -93,8 +92,9 @@ def __init__(
status_parameters: StatusParameters | None = None,
prevent_simultaneous_flows: list[Flow] | None = None,
meta_data: dict | None = None,
+ color: str | None = None,
):
- super().__init__(label, meta_data=meta_data)
+ super().__init__(label, meta_data=meta_data, color=color)
self.inputs: list[Flow] = inputs or []
self.outputs: list[Flow] = outputs or []
self.status_parameters = status_parameters
@@ -110,21 +110,23 @@ def create_model(self, model: FlowSystemModel) -> ComponentModel:
self.submodel = ComponentModel(model, self)
return self.submodel
- def _set_flow_system(self, flow_system) -> None:
- """Propagate flow_system reference to nested Interface objects and flows."""
- super()._set_flow_system(flow_system)
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
+ """Propagate flow_system reference to nested Interface objects and flows.
+
+ Elements use their label_full as prefix by default, ignoring the passed prefix.
+ """
+ super().link_to_flow_system(flow_system, self.label_full)
if self.status_parameters is not None:
- self.status_parameters._set_flow_system(flow_system)
+ self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters'))
for flow in self.inputs + self.outputs:
- flow._set_flow_system(flow_system)
+ flow.link_to_flow_system(flow_system)
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
+ def transform_data(self) -> None:
if self.status_parameters is not None:
- self.status_parameters.transform_data(prefix)
+ self.status_parameters.transform_data()
for flow in self.inputs + self.outputs:
- flow.transform_data() # Flow doesnt need the name_prefix
+ flow.transform_data()
def _check_unique_flow_labels(self):
all_flow_labels = [flow.label for flow in self.inputs + self.outputs]
@@ -136,6 +138,17 @@ def _check_unique_flow_labels(self):
def _plausibility_checks(self) -> None:
self._check_unique_flow_labels()
+ # Component with status_parameters requires all flows to have sizes set
+ # (status_parameters are propagated to flows in _do_modeling, which need sizes for big-M constraints)
+ if self.status_parameters is not None:
+ flows_without_size = [flow.label for flow in self.inputs + self.outputs if flow.size is None]
+ if flows_without_size:
+ raise PlausibilityError(
+ f'Component "{self.label_full}" has status_parameters, but the following flows have no size: '
+ f'{flows_without_size}. All flows need explicit sizes when the component uses status_parameters '
+ f'(required for big-M constraints).'
+ )
+
def _connect_flows(self):
# Inputs
for flow in self.inputs:
@@ -194,6 +207,9 @@ class Bus(Element):
Args:
label: The label of the Element. Used to identify it in the FlowSystem.
+ carrier: Name of the energy/material carrier type (e.g., 'electricity', 'heat', 'gas').
+ Carriers are registered via ``flow_system.add_carrier()`` or available as
+ predefined defaults in CONFIG.Carriers. Used for automatic color assignment in plots.
imbalance_penalty_per_flow_hour: Penalty costs for bus balance violations.
When None (default), no imbalance is allowed (hard constraint). When set to a
value > 0, allows bus imbalances at penalty cost.
@@ -201,30 +217,30 @@ class Bus(Element):
in results. Only use Python native types.
Examples:
- Electrical bus with strict balance:
+ Using predefined carrier names:
```python
- electricity_bus = Bus(
- label='main_electrical_bus',
- imbalance_penalty_per_flow_hour=None, # No imbalance allowed
- )
+ electricity_bus = Bus(label='main_grid', carrier='electricity')
+ heat_bus = Bus(label='district_heating', carrier='heat')
```
- Heat network with penalty for imbalances:
+ Registering custom carriers on FlowSystem:
```python
- heat_network = Bus(
- label='district_heating_network',
- imbalance_penalty_per_flow_hour=1000, # €1000/MWh penalty for imbalance
- )
+ import flixopt as fx
+
+ fs = fx.FlowSystem(timesteps)
+ fs.add_carrier(fx.Carrier('biogas', '#228B22', 'kW'))
+ biogas_bus = fx.Bus(label='biogas_network', carrier='biogas')
```
- Material flow with time-varying penalties:
+ Heat network with penalty for imbalances:
```python
- material_hub = Bus(
- label='material_processing_hub',
- imbalance_penalty_per_flow_hour=waste_disposal_costs, # Time series
+ heat_bus = Bus(
+ label='district_heating',
+ carrier='heat',
+ imbalance_penalty_per_flow_hour=1000,
)
```
@@ -245,6 +261,7 @@ class Bus(Element):
def __init__(
self,
label: str,
+ carrier: str | None = None,
imbalance_penalty_per_flow_hour: Numeric_TPS | None = None,
meta_data: dict | None = None,
**kwargs,
@@ -254,6 +271,7 @@ def __init__(
kwargs, 'excess_penalty_per_flow_hour', 'imbalance_penalty_per_flow_hour', imbalance_penalty_per_flow_hour
)
self._validate_kwargs(kwargs)
+ self.carrier = carrier.lower() if carrier else None # Store as lowercase string
self.imbalance_penalty_per_flow_hour = imbalance_penalty_per_flow_hour
self.inputs: list[Flow] = []
self.outputs: list[Flow] = []
@@ -263,16 +281,18 @@ def create_model(self, model: FlowSystemModel) -> BusModel:
self.submodel = BusModel(model, self)
return self.submodel
- def _set_flow_system(self, flow_system) -> None:
- """Propagate flow_system reference to nested flows."""
- super()._set_flow_system(flow_system)
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
+ """Propagate flow_system reference to nested flows.
+
+ Elements use their label_full as prefix by default, ignoring the passed prefix.
+ """
+ super().link_to_flow_system(flow_system, self.label_full)
for flow in self.inputs + self.outputs:
- flow._set_flow_system(flow_system)
+ flow.link_to_flow_system(flow_system)
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
+ def transform_data(self) -> None:
self.imbalance_penalty_per_flow_hour = self._fit_coords(
- f'{prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour
+ f'{self.prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour
)
def _plausibility_checks(self) -> None:
@@ -335,7 +355,7 @@ class Flow(Element):
Args:
label: Unique flow identifier within its component.
bus: Bus label this flow connects to.
- size: Flow capacity. Scalar, InvestParameters, or None (uses CONFIG.Modeling.big).
+ size: Flow capacity. Scalar, InvestParameters, or None (unbounded).
relative_minimum: Minimum flow rate as fraction of size (0-1). Default: 0.
relative_maximum: Maximum flow rate as fraction of size. Default: 1.
load_factor_min: Minimum average utilization (0-1). Default: 0.
@@ -436,7 +456,8 @@ class Flow(Element):
`relative_maximum` for upper bounds on optimization variables.
Notes:
- - Default size (CONFIG.Modeling.big) is used when size=None
+ - size=None means unbounded (no capacity constraint)
+ - size must be set when using status_parameters or fixed_relative_profile
- list inputs for previous_flow_rate are converted to NumPy arrays
- Flow direction is determined by component input/output designation
@@ -451,7 +472,7 @@ def __init__(
self,
label: str,
bus: str,
- size: Numeric_PS | InvestParameters = None,
+ size: Numeric_PS | InvestParameters | None = None,
fixed_relative_profile: Numeric_TPS | None = None,
relative_minimum: Numeric_TPS = 0,
relative_maximum: Numeric_TPS = 1,
@@ -467,7 +488,7 @@ def __init__(
meta_data: dict | None = None,
):
super().__init__(label, meta_data=meta_data)
- self.size = CONFIG.Modeling.big if size is None else size
+ self.size = size
self.relative_minimum = relative_minimum
self.relative_maximum = relative_maximum
self.fixed_relative_profile = fixed_relative_profile
@@ -499,58 +520,92 @@ def create_model(self, model: FlowSystemModel) -> FlowModel:
self.submodel = FlowModel(model, self)
return self.submodel
- def _set_flow_system(self, flow_system) -> None:
- """Propagate flow_system reference to nested Interface objects."""
- super()._set_flow_system(flow_system)
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
+ """Propagate flow_system reference to nested Interface objects.
+
+ Elements use their label_full as prefix by default, ignoring the passed prefix.
+ """
+ super().link_to_flow_system(flow_system, self.label_full)
if self.status_parameters is not None:
- self.status_parameters._set_flow_system(flow_system)
- if isinstance(self.size, Interface):
- self.size._set_flow_system(flow_system)
-
- def transform_data(self, name_prefix: str = '') -> None:
- prefix = '|'.join(filter(None, [name_prefix, self.label_full]))
- self.relative_minimum = self._fit_coords(f'{prefix}|relative_minimum', self.relative_minimum)
- self.relative_maximum = self._fit_coords(f'{prefix}|relative_maximum', self.relative_maximum)
- self.fixed_relative_profile = self._fit_coords(f'{prefix}|fixed_relative_profile', self.fixed_relative_profile)
- self.effects_per_flow_hour = self._fit_effect_coords(prefix, self.effects_per_flow_hour, 'per_flow_hour')
+ self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters'))
+ if isinstance(self.size, InvestParameters):
+ self.size.link_to_flow_system(flow_system, self._sub_prefix('InvestParameters'))
+
+ def transform_data(self) -> None:
+ self.relative_minimum = self._fit_coords(f'{self.prefix}|relative_minimum', self.relative_minimum)
+ self.relative_maximum = self._fit_coords(f'{self.prefix}|relative_maximum', self.relative_maximum)
+ self.fixed_relative_profile = self._fit_coords(
+ f'{self.prefix}|fixed_relative_profile', self.fixed_relative_profile
+ )
+ self.effects_per_flow_hour = self._fit_effect_coords(self.prefix, self.effects_per_flow_hour, 'per_flow_hour')
self.flow_hours_max = self._fit_coords(
- f'{prefix}|flow_hours_max', self.flow_hours_max, dims=['period', 'scenario']
+ f'{self.prefix}|flow_hours_max', self.flow_hours_max, dims=['period', 'scenario']
)
self.flow_hours_min = self._fit_coords(
- f'{prefix}|flow_hours_min', self.flow_hours_min, dims=['period', 'scenario']
+ f'{self.prefix}|flow_hours_min', self.flow_hours_min, dims=['period', 'scenario']
)
self.flow_hours_max_over_periods = self._fit_coords(
- f'{prefix}|flow_hours_max_over_periods', self.flow_hours_max_over_periods, dims=['scenario']
+ f'{self.prefix}|flow_hours_max_over_periods', self.flow_hours_max_over_periods, dims=['scenario']
)
self.flow_hours_min_over_periods = self._fit_coords(
- f'{prefix}|flow_hours_min_over_periods', self.flow_hours_min_over_periods, dims=['scenario']
+ f'{self.prefix}|flow_hours_min_over_periods', self.flow_hours_min_over_periods, dims=['scenario']
)
self.load_factor_max = self._fit_coords(
- f'{prefix}|load_factor_max', self.load_factor_max, dims=['period', 'scenario']
+ f'{self.prefix}|load_factor_max', self.load_factor_max, dims=['period', 'scenario']
)
self.load_factor_min = self._fit_coords(
- f'{prefix}|load_factor_min', self.load_factor_min, dims=['period', 'scenario']
+ f'{self.prefix}|load_factor_min', self.load_factor_min, dims=['period', 'scenario']
)
if self.status_parameters is not None:
- self.status_parameters.transform_data(prefix)
+ self.status_parameters.transform_data()
if isinstance(self.size, InvestParameters):
- self.size.transform_data(prefix)
- else:
- self.size = self._fit_coords(f'{prefix}|size', self.size, dims=['period', 'scenario'])
+ self.size.transform_data()
+ elif self.size is not None:
+ self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario'])
def _plausibility_checks(self) -> None:
# TODO: Incorporate into Variable? (Lower_bound can not be greater than upper bound
if (self.relative_minimum > self.relative_maximum).any():
raise PlausibilityError(self.label_full + ': Take care, that relative_minimum <= relative_maximum!')
- if not isinstance(self.size, InvestParameters) and (
- np.any(self.size == CONFIG.Modeling.big) and self.fixed_relative_profile is not None
- ): # Default Size --> Most likely by accident
- logger.warning(
- f'Flow "{self.label_full}" has no size assigned, but a "fixed_relative_profile". '
- f'The default size is {CONFIG.Modeling.big}. As "flow_rate = size * fixed_relative_profile", '
- f'the resulting flow_rate will be very high. To fix this, assign a size to the Flow {self}.'
+ # Size is required when using StatusParameters (for big-M constraints)
+ if self.status_parameters is not None and self.size is None:
+ raise PlausibilityError(
+ f'Flow "{self.label_full}" has status_parameters but no size defined. '
+ f'A size is required when using status_parameters to bound the flow rate.'
+ )
+
+ if self.size is None and self.fixed_relative_profile is not None:
+ raise PlausibilityError(
+ f'Flow "{self.label_full}" has a fixed_relative_profile but no size defined. '
+ f'A size is required because flow_rate = size * fixed_relative_profile.'
+ )
+
+ # Size is required when using non-default relative bounds (flow_rate = size * relative_bound)
+ if self.size is None and np.any(self.relative_minimum > 0):
+ raise PlausibilityError(
+ f'Flow "{self.label_full}" has relative_minimum > 0 but no size defined. '
+ f'A size is required because the lower bound is size * relative_minimum.'
+ )
+
+ if self.size is None and np.any(self.relative_maximum < 1):
+ raise PlausibilityError(
+ f'Flow "{self.label_full}" has relative_maximum != 1 but no size defined. '
+ f'A size is required because the upper bound is size * relative_maximum.'
+ )
+
+ # Size is required for load factor constraints (total_flow_hours / size)
+ if self.size is None and self.load_factor_min is not None:
+ raise PlausibilityError(
+ f'Flow "{self.label_full}" has load_factor_min but no size defined. '
+ f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.'
+ )
+
+ if self.size is None and self.load_factor_max is not None:
+ raise PlausibilityError(
+ f'Flow "{self.label_full}" has load_factor_max but no size defined. '
+ f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.'
)
if self.fixed_relative_profile is not None and self.status_parameters is not None:
@@ -816,15 +871,18 @@ def absolute_flow_rate_bounds(self) -> tuple[xr.DataArray, xr.DataArray]:
if not self.with_status:
if not self.with_investment:
# Basic case without investment and without Status
- lb = lb_relative * self.element.size
+ if self.element.size is not None:
+ lb = lb_relative * self.element.size
elif self.with_investment and self.element.size.mandatory:
# With mandatory Investment
lb = lb_relative * self.element.size.minimum_or_fixed_size
if self.with_investment:
ub = ub_relative * self.element.size.maximum_or_fixed_size
- else:
+ elif self.element.size is not None:
ub = ub_relative * self.element.size
+ else:
+ ub = np.inf # Unbounded when size is None
return lb, ub
@@ -949,11 +1007,17 @@ def _do_modeling(self):
for flow in all_flows:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
+ flow.status_parameters.link_to_flow_system(
+ self._model.flow_system, f'{flow.label_full}|status_parameters'
+ )
if self.element.prevent_simultaneous_flows:
for flow in self.element.prevent_simultaneous_flows:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
+ flow.status_parameters.link_to_flow_system(
+ self._model.flow_system, f'{flow.label_full}|status_parameters'
+ )
# Create FlowModels (which creates their variables and constraints)
for flow in all_flows:
diff --git a/flixopt/features.py b/flixopt/features.py
index cd9e07151..4dfe48964 100644
--- a/flixopt/features.py
+++ b/flixopt/features.py
@@ -156,7 +156,7 @@ class StatusModel(Submodel):
state transitions, duration tracking, and operational effects.
Mathematical Formulation:
- See
+ See
"""
def __init__(
@@ -345,7 +345,7 @@ def _get_previous_downtime(self):
if self._previous_status is None:
return hours_per_step
else:
- return ModelingUtilities.compute_consecutive_hours_in_state(self._previous_status * -1 + 1, hours_per_step)
+ return ModelingUtilities.compute_consecutive_hours_in_state(1 - self._previous_status, hours_per_step)
class PieceModel(Submodel):
diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py
index 9015de3e4..1e0503e52 100644
--- a/flixopt/flow_system.py
+++ b/flixopt/flow_system.py
@@ -4,9 +4,10 @@
from __future__ import annotations
+import json
import logging
+import pathlib
import warnings
-from collections import defaultdict
from itertools import chain
from typing import TYPE_CHECKING, Any, Literal
@@ -15,7 +16,8 @@
import xarray as xr
from . import io as fx_io
-from .config import CONFIG
+from .components import Storage
+from .config import CONFIG, DEPRECATION_REMOVAL_VERSION
from .core import (
ConversionError,
DataConverter,
@@ -24,16 +26,22 @@
)
from .effects import Effect, EffectCollection
from .elements import Bus, Component, Flow
+from .optimize_accessor import OptimizeAccessor
+from .statistics_accessor import StatisticsAccessor
from .structure import CompositeContainerMixin, Element, ElementContainer, FlowSystemModel, Interface
+from .topology_accessor import TopologyAccessor
+from .transform_accessor import TransformAccessor
if TYPE_CHECKING:
- import pathlib
from collections.abc import Collection
import pyvis
+ from .solvers import _Solver
from .types import Effect_TPS, Numeric_S, Numeric_TPS, NumericOrBool
+from .carrier import Carrier, CarrierContainer
+
logger = logging.getLogger('flixopt')
@@ -163,6 +171,7 @@ def __init__(
scenario_weights: Numeric_S | None = None,
scenario_independent_sizes: bool | list[str] = True,
scenario_independent_flow_rates: bool | list[str] = False,
+ name: str | None = None,
):
self.timesteps = self._validate_timesteps(timesteps)
@@ -202,10 +211,31 @@ def __init__(
self._network_app = None
self._flows_cache: ElementContainer[Flow] | None = None
+ # Solution dataset - populated after optimization or loaded from file
+ self._solution: xr.Dataset | None = None
+
+ # Clustering info - populated by transform.cluster()
+ self._clustering_info: dict | None = None
+
+ # Statistics accessor cache - lazily initialized, invalidated on new solution
+ self._statistics: StatisticsAccessor | None = None
+
+ # Topology accessor cache - lazily initialized, invalidated on structure change
+ self._topology: TopologyAccessor | None = None
+
+ # Carrier container - local carriers override CONFIG.Carriers
+ self._carriers: CarrierContainer = CarrierContainer()
+
+ # Cached flow→carrier mapping (built lazily after connect_and_transform)
+ self._flow_carriers: dict[str, str] | None = None
+
# Use properties to validate and store scenario dimension settings
self.scenario_independent_sizes = scenario_independent_sizes
self.scenario_independent_flow_rates = scenario_independent_flow_rates
+ # Optional name for identification (derived from filename on load)
+ self.name = name
+
@staticmethod
def _validate_timesteps(timesteps: pd.DatetimeIndex) -> pd.DatetimeIndex:
"""Validate timesteps format and rename if needed."""
@@ -484,6 +514,28 @@ def _update_period_metadata(
return dataset
+ @classmethod
+ def _update_scenario_metadata(cls, dataset: xr.Dataset) -> xr.Dataset:
+ """
+ Update scenario-related attributes and data variables in dataset based on its scenario index.
+
+ Recomputes or removes scenario weights. This ensures scenario metadata stays synchronized with the actual
+ scenarios after operations like selection.
+
+ This is analogous to _update_period_metadata() for time-related metadata.
+
+ Args:
+ dataset: Dataset to update (will be modified in place)
+
+ Returns:
+ The same dataset with updated scenario-related attributes and data variables
+ """
+ new_scenario_index = dataset.indexes.get('scenario')
+ if new_scenario_index is None or len(new_scenario_index) <= 1:
+ dataset.attrs.pop('scenario_weights')
+
+ return dataset
+
def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
"""
Override Interface method to handle FlowSystem-specific serialization.
@@ -529,6 +581,11 @@ def to_dataset(self) -> xr.Dataset:
Convert the FlowSystem to an xarray Dataset.
Ensures FlowSystem is connected before serialization.
+ If a solution is present, it will be included in the dataset with variable names
+ prefixed by 'solution|' to avoid conflicts with FlowSystem configuration variables.
+ Solution time coordinates are renamed to 'solution_time' to preserve them
+ independently of the FlowSystem's time coordinates.
+
Returns:
xr.Dataset: Dataset containing all DataArrays with structure in attributes
"""
@@ -536,7 +593,34 @@ def to_dataset(self) -> xr.Dataset:
logger.warning('FlowSystem is not connected_and_transformed. Connecting and transforming data now.')
self.connect_and_transform()
- return super().to_dataset()
+ ds = super().to_dataset()
+
+ # Include solution data if present
+ if self.solution is not None:
+ # Rename 'time' to 'solution_time' in solution variables to preserve full solution
+ # (linopy solution may have extra timesteps, e.g., for final charge states)
+ solution_renamed = (
+ self.solution.rename({'time': 'solution_time'}) if 'time' in self.solution.dims else self.solution
+ )
+ # Add solution variables with 'solution|' prefix to avoid conflicts
+ solution_vars = {f'solution|{name}': var for name, var in solution_renamed.data_vars.items()}
+ ds = ds.assign(solution_vars)
+ # Also add the solution_time coordinate if it exists
+ if 'solution_time' in solution_renamed.coords:
+ ds = ds.assign_coords(solution_time=solution_renamed.coords['solution_time'])
+ ds.attrs['has_solution'] = True
+ else:
+ ds.attrs['has_solution'] = False
+
+ # Include carriers if any are registered
+ if self._carriers:
+ carriers_structure = {}
+ for name, carrier in self._carriers.items():
+ carrier_ref, _ = carrier._create_reference_structure()
+ carriers_structure[name] = carrier_ref
+ ds.attrs['carriers'] = json.dumps(carriers_structure)
+
+ return ds
@classmethod
def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
@@ -544,6 +628,10 @@ def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
Create a FlowSystem from an xarray Dataset.
Handles FlowSystem-specific reconstruction logic.
+ If the dataset contains solution data (variables prefixed with 'solution|'),
+ the solution will be restored to the FlowSystem. Solution time coordinates
+ are renamed back from 'solution_time' to 'time'.
+
Args:
ds: Dataset containing the FlowSystem data
@@ -553,8 +641,20 @@ def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
# Get the reference structure from attrs
reference_structure = dict(ds.attrs)
- # Create arrays dictionary from dataset variables
- arrays_dict = {name: array for name, array in ds.data_vars.items()}
+ # Separate solution variables from config variables
+ solution_prefix = 'solution|'
+ solution_vars = {}
+ config_vars = {}
+ for name, array in ds.data_vars.items():
+ if name.startswith(solution_prefix):
+ # Remove prefix for solution dataset
+ original_name = name[len(solution_prefix) :]
+ solution_vars[original_name] = array
+ else:
+ config_vars[name] = array
+
+ # Create arrays dictionary from config variables only
+ arrays_dict = config_vars
# Create FlowSystem instance with constructor parameters
flow_system = cls(
@@ -569,6 +669,7 @@ def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
else None,
scenario_independent_sizes=reference_structure.get('scenario_independent_sizes', True),
scenario_independent_flow_rates=reference_structure.get('scenario_independent_flow_rates', False),
+ name=reference_structure.get('name'),
)
# Restore components
@@ -595,24 +696,199 @@ def from_dataset(cls, ds: xr.Dataset) -> FlowSystem:
logger.critical(f'Restoring effect {effect_label} failed.')
flow_system._add_effects(effect)
+ # Restore solution if present
+ if reference_structure.get('has_solution', False) and solution_vars:
+ solution_ds = xr.Dataset(solution_vars)
+ # Rename 'solution_time' back to 'time' if present
+ if 'solution_time' in solution_ds.dims:
+ solution_ds = solution_ds.rename({'solution_time': 'time'})
+ flow_system.solution = solution_ds
+
+ # Restore carriers if present
+ if 'carriers' in reference_structure:
+ carriers_structure = json.loads(reference_structure['carriers'])
+ for carrier_data in carriers_structure.values():
+ carrier = cls._resolve_reference_structure(carrier_data, {})
+ flow_system._carriers.add(carrier)
+
+ # Reconnect network to populate bus inputs/outputs (not stored in NetCDF).
+ flow_system.connect_and_transform()
+
return flow_system
- def to_netcdf(self, path: str | pathlib.Path, compression: int = 0):
+ def to_netcdf(self, path: str | pathlib.Path, compression: int = 5, overwrite: bool = False):
"""
Save the FlowSystem to a NetCDF file.
Ensures FlowSystem is connected before saving.
+ The FlowSystem's name is automatically set from the filename
+ (without extension) when saving.
+
Args:
- path: The path to the netCDF file.
- compression: The compression level to use when saving the file.
+ path: The path to the netCDF file. Parent directories are created if they don't exist.
+ compression: The compression level to use when saving the file (0-9).
+ overwrite: If True, overwrite existing file. If False, raise error if file exists.
+
+ Raises:
+ FileExistsError: If overwrite=False and file already exists.
"""
if not self.connected_and_transformed:
logger.warning('FlowSystem is not connected. Calling connect_and_transform() now.')
self.connect_and_transform()
- super().to_netcdf(path, compression)
+ path = pathlib.Path(path)
+ # Set name from filename (without extension)
+ self.name = path.stem
+
+ super().to_netcdf(path, compression, overwrite)
logger.info(f'Saved FlowSystem to {path}')
+ @classmethod
+ def from_netcdf(cls, path: str | pathlib.Path) -> FlowSystem:
+ """
+ Load a FlowSystem from a NetCDF file.
+
+ The FlowSystem's name is automatically derived from the filename
+ (without extension), overriding any name that may have been stored.
+
+ Args:
+ path: Path to the NetCDF file
+
+ Returns:
+ FlowSystem instance with name set from filename
+ """
+ path = pathlib.Path(path)
+ flow_system = super().from_netcdf(path)
+ # Derive name from filename (without extension)
+ flow_system.name = path.stem
+ return flow_system
+
+ @classmethod
+ def from_old_results(cls, folder: str | pathlib.Path, name: str) -> FlowSystem:
+ """
+ Load a FlowSystem from old-format Results files (pre-v5 API).
+
+ This method loads results saved with the deprecated Results API
+ (which used multiple files: ``*--flow_system.nc4``, ``*--solution.nc4``)
+ and converts them to a FlowSystem with the solution attached.
+
+ The method performs the following:
+
+ - Loads the old multi-file format
+ - Renames deprecated parameters in the FlowSystem structure
+ (e.g., ``on_off_parameters`` → ``status_parameters``)
+ - Attaches the solution data to the FlowSystem
+
+ Args:
+ folder: Directory containing the saved result files
+ name: Base name of the saved files (without extensions)
+
+ Returns:
+ FlowSystem instance with solution attached
+
+ Warning:
+ This is a best-effort migration for accessing old results:
+
+ - **Solution variable names are NOT renamed** - only basic variables
+ work (flow rates, sizes, charge states, effect totals)
+ - Advanced variable access may require using the original names
+ - Summary metadata (solver info, timing) is not loaded
+
+ For full compatibility, re-run optimizations with the new API.
+
+ Examples:
+ ```python
+ # Load old results
+ fs = FlowSystem.from_old_results('results_folder', 'my_optimization')
+
+ # Access basic solution data
+ fs.solution['Boiler(Q_th)|flow_rate'].plot()
+
+ # Save in new single-file format
+ fs.to_netcdf('my_optimization.nc')
+ ```
+
+ Deprecated:
+ This method will be removed in v6.
+ """
+ warnings.warn(
+ f'from_old_results() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'This utility is only for migrating results from flixopt versions before v5.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ import json
+
+ from flixopt.io import convert_old_dataset, load_dataset_from_netcdf
+
+ folder = pathlib.Path(folder)
+
+ # Load datasets directly (old format used --flow_system.nc4 and --solution.nc4)
+ flow_system_path = folder / f'{name}--flow_system.nc4'
+ solution_path = folder / f'{name}--solution.nc4'
+
+ flow_system_data = load_dataset_from_netcdf(flow_system_path)
+ solution = load_dataset_from_netcdf(solution_path)
+
+ # Convert flow_system_data to new parameter names
+ convert_old_dataset(flow_system_data)
+
+ # Reconstruct FlowSystem
+ flow_system = cls.from_dataset(flow_system_data)
+ flow_system.name = name
+
+ # Attach solution (convert attrs from dicts to JSON strings for consistency)
+ for key in ['Components', 'Buses', 'Effects', 'Flows']:
+ if key in solution.attrs and isinstance(solution.attrs[key], dict):
+ solution.attrs[key] = json.dumps(solution.attrs[key])
+ flow_system.solution = solution
+
+ return flow_system
+
+ def copy(self) -> FlowSystem:
+ """Create a copy of the FlowSystem without optimization state.
+
+ Creates a new FlowSystem with copies of all elements, but without:
+ - The solution dataset
+ - The optimization model
+ - Element submodels and variable/constraint names
+
+ This is useful for creating variations of a FlowSystem for different
+ optimization scenarios without affecting the original.
+
+ Returns:
+ A new FlowSystem instance that can be modified and optimized independently.
+
+ Examples:
+ >>> original = FlowSystem(timesteps)
+ >>> original.add_elements(boiler, bus)
+ >>> original.optimize(solver) # Original now has solution
+ >>>
+ >>> # Create a copy to try different parameters
+ >>> variant = original.copy() # No solution, can be modified
+ >>> variant.add_elements(new_component)
+ >>> variant.optimize(solver)
+ """
+ # Temporarily clear solution to use standard serialization without solution data
+ original_solution = self._solution
+ self._solution = None
+ try:
+ ds = self.to_dataset()
+ finally:
+ self._solution = original_solution
+
+ # Create new FlowSystem from dataset (without solution)
+ new_fs = FlowSystem.from_dataset(ds.copy(deep=True))
+ return new_fs
+
+ def __copy__(self):
+ """Support for copy.copy()."""
+ return self.copy()
+
+ def __deepcopy__(self, memo):
+ """Support for copy.deepcopy()."""
+ return self.copy()
+
def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
"""
Get FlowSystem structure.
@@ -710,12 +986,38 @@ def fit_effects_to_model_coords(
}
def connect_and_transform(self):
- """Transform data for all elements using the new simplified approach."""
+ """Connect the network and transform all element data to model coordinates.
+
+ This method performs the following steps:
+
+ 1. Connects flows to buses (establishing the network topology)
+ 2. Registers any missing carriers from CONFIG defaults
+ 3. Assigns colors to elements without explicit colors
+ 4. Transforms all element data to xarray DataArrays aligned with
+ FlowSystem coordinates (time, period, scenario)
+ 5. Validates system integrity
+
+ This is called automatically by :meth:`build_model` and :meth:`optimize`.
+
+ Warning:
+ After this method runs, element attributes (e.g., ``flow.size``,
+ ``flow.relative_minimum``) contain transformed xarray DataArrays,
+ not the original input values. If you modify element attributes after
+ transformation, call :meth:`invalidate` to ensure the changes take
+ effect on the next optimization.
+
+ Note:
+ This method is idempotent within a single model lifecycle - calling
+ it multiple times has no effect once ``connected_and_transformed``
+ is True. Use :meth:`invalidate` to reset this flag.
+ """
if self.connected_and_transformed:
logger.debug('FlowSystem already connected and transformed')
return
self._connect_network()
+ self._register_missing_carriers()
+ self._assign_element_colors()
for element in chain(self.components.values(), self.effects.values(), self.buses.values()):
element.transform_data()
@@ -724,6 +1026,40 @@ def connect_and_transform(self):
self._connected_and_transformed = True
+ def _register_missing_carriers(self) -> None:
+ """Auto-register carriers from CONFIG for buses that reference unregistered carriers."""
+ for bus in self.buses.values():
+ if bus.carrier and bus.carrier not in self._carriers:
+ # Try to get from CONFIG defaults
+ default_carrier = getattr(CONFIG.Carriers, bus.carrier, None)
+ if default_carrier is not None:
+ self._carriers[bus.carrier] = default_carrier
+ logger.debug(f"Auto-registered carrier '{bus.carrier}' from CONFIG")
+
+ def _assign_element_colors(self) -> None:
+ """Auto-assign colors to elements that don't have explicit colors set.
+
+ Components and buses without explicit colors are assigned colors from the
+ default qualitative colorscale. This ensures zero-config color support
+ while still allowing users to override with explicit colors.
+ """
+ from .color_processing import process_colors
+
+ # Collect elements without colors (components only - buses use carrier colors)
+ elements_without_colors = [comp.label for comp in self.components.values() if comp.color is None]
+
+ if not elements_without_colors:
+ return
+
+ # Generate colors from the default colorscale
+ colorscale = CONFIG.Plotting.default_qualitative_colorscale
+ color_mapping = process_colors(colorscale, elements_without_colors)
+
+ # Assign colors to elements
+ for label, color in color_mapping.items():
+ self.components[label].color = color
+ logger.debug(f"Auto-assigned color '{color}' to component '{label}'")
+
def add_elements(self, *elements: Element) -> None:
"""
Add Components(Storages, Boilers, Heatpumps, ...), Buses or Effects to the FlowSystem
@@ -731,13 +1067,25 @@ def add_elements(self, *elements: Element) -> None:
Args:
*elements: childs of Element like Boiler, HeatPump, Bus,...
modeling Elements
+
+ Raises:
+ RuntimeError: If the FlowSystem is locked (has a solution).
+ Call `reset()` to unlock it first.
"""
- if self.connected_and_transformed:
+ if self.is_locked:
+ raise RuntimeError(
+ 'Cannot add elements to a FlowSystem that has a solution. '
+ 'Call `reset()` first to clear the solution and allow modifications.'
+ )
+
+ if self.model is not None:
warnings.warn(
- 'You are adding elements to an already connected FlowSystem. This is not recommended (But it works).',
+ 'Adding elements to a FlowSystem with an existing model. The model will be invalidated.',
stacklevel=2,
)
- self._connected_and_transformed = False
+ # Always invalidate when adding elements to ensure new elements get transformed
+ if self.model is not None or self._connected_and_transformed:
+ self._invalidate_model()
for new_element in list(elements):
# Validate element type first
@@ -762,6 +1110,121 @@ def add_elements(self, *elements: Element) -> None:
element_type = type(new_element).__name__
logger.info(f'Registered new {element_type}: {new_element.label_full}')
+ def add_carriers(self, *carriers: Carrier) -> None:
+ """Register a custom carrier for this FlowSystem.
+
+ Custom carriers registered on the FlowSystem take precedence over
+ CONFIG.Carriers defaults when resolving colors and units for buses.
+
+ Args:
+ carriers: Carrier objects defining the carrier properties.
+
+ Raises:
+ RuntimeError: If the FlowSystem is locked (has a solution).
+ Call `reset()` to unlock it first.
+
+ Examples:
+ ```python
+ import flixopt as fx
+
+ fs = fx.FlowSystem(timesteps)
+
+ # Define and register custom carriers
+ biogas = fx.Carrier('biogas', '#228B22', 'kW', 'Biogas fuel')
+ fs.add_carrier(biogas)
+
+ # Now buses can reference this carrier by name
+ bus = fx.Bus('BioGasNetwork', carrier='biogas')
+ fs.add_elements(bus)
+
+ # The carrier color will be used in plots automatically
+ ```
+ """
+ if self.is_locked:
+ raise RuntimeError(
+ 'Cannot add carriers to a FlowSystem that has a solution. '
+ 'Call `reset()` first to clear the solution and allow modifications.'
+ )
+
+ if self.model is not None:
+ warnings.warn(
+ 'Adding carriers to a FlowSystem with an existing model. The model will be invalidated.',
+ stacklevel=2,
+ )
+ # Always invalidate when adding carriers to ensure proper re-transformation
+ if self.model is not None or self._connected_and_transformed:
+ self._invalidate_model()
+
+ for carrier in list(carriers):
+ if not isinstance(carrier, Carrier):
+ raise TypeError(f'Expected Carrier object, got {type(carrier)}')
+ self._carriers.add(carrier)
+ logger.debug(f'Adding carrier {carrier} to FlowSystem')
+
+ def get_carrier(self, label: str) -> Carrier | None:
+ """Get the carrier for a bus or flow.
+
+ Args:
+ label: Bus label (e.g., 'Fernwärme') or flow label (e.g., 'Boiler(Q_th)').
+
+ Returns:
+ Carrier or None if not found.
+
+ Note:
+ To access a carrier directly by name, use ``flow_system.carriers['electricity']``.
+
+ Raises:
+ RuntimeError: If FlowSystem is not connected_and_transformed.
+ """
+ if not self.connected_and_transformed:
+ raise RuntimeError(
+ 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
+ )
+
+ # Try as bus label
+ bus = self.buses.get(label)
+ if bus and bus.carrier:
+ return self._carriers.get(bus.carrier.lower())
+
+ # Try as flow label
+ flow = self.flows.get(label)
+ if flow and flow.bus:
+ bus = self.buses.get(flow.bus)
+ if bus and bus.carrier:
+ return self._carriers.get(bus.carrier.lower())
+
+ return None
+
+ @property
+ def carriers(self) -> CarrierContainer:
+ """Carriers registered on this FlowSystem."""
+ return self._carriers
+
+ @property
+ def flow_carriers(self) -> dict[str, str]:
+ """Cached mapping of flow labels to carrier names.
+
+ Returns:
+ Dict mapping flow label to carrier name (lowercase).
+ Flows without a carrier are not included.
+
+ Raises:
+ RuntimeError: If FlowSystem is not connected_and_transformed.
+ """
+ if not self.connected_and_transformed:
+ raise RuntimeError(
+ 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
+ )
+
+ if self._flow_carriers is None:
+ self._flow_carriers = {}
+ for flow_label, flow in self.flows.items():
+ bus = self.buses.get(flow.bus)
+ if bus and bus.carrier:
+ self._flow_carriers[flow_label] = bus.carrier.lower()
+
+ return self._flow_carriers
+
def create_model(self, normalize_weights: bool = True) -> FlowSystemModel:
"""
Create a linopy model from the FlowSystem.
@@ -777,124 +1240,399 @@ def create_model(self, normalize_weights: bool = True) -> FlowSystemModel:
self.model = FlowSystemModel(self, normalize_weights)
return self.model
- def plot_network(
- self,
- path: bool | str | pathlib.Path = 'flow_system.html',
- controls: bool
- | list[
- Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
- ] = True,
- show: bool | None = None,
- ) -> pyvis.network.Network | None:
+ def build_model(self, normalize_weights: bool = True) -> FlowSystem:
"""
- Visualizes the network structure of a FlowSystem using PyVis, saving it as an interactive HTML file.
+ Build the optimization model for this FlowSystem.
+
+ This method prepares the FlowSystem for optimization by:
+ 1. Connecting and transforming all elements (if not already done)
+ 2. Creating the FlowSystemModel with all variables and constraints
+ 3. Adding clustering constraints (if this is a clustered FlowSystem)
+
+ After calling this method, `self.model` will be available for inspection
+ before solving.
Args:
- path: Path to save the HTML visualization.
- - `False`: Visualization is created but not saved.
- - `str` or `Path`: Specifies file path (default: 'flow_system.html').
- controls: UI controls to add to the visualization.
- - `True`: Enables all available controls.
- - `List`: Specify controls, e.g., ['nodes', 'layout'].
- - Options: 'nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer'.
- show: Whether to open the visualization in the web browser.
+ normalize_weights: Whether to normalize scenario/period weights to sum to 1.
Returns:
- - 'pyvis.network.Network' | None: The `Network` instance representing the visualization, or `None` if `pyvis` is not installed.
+ Self, for method chaining.
Examples:
- >>> flow_system.plot_network()
- >>> flow_system.plot_network(show=False)
- >>> flow_system.plot_network(path='output/custom_network.html', controls=['nodes', 'layout'])
+ >>> flow_system.build_model()
+ >>> print(flow_system.model.variables) # Inspect variables before solving
+ >>> flow_system.solve(solver)
+ """
+ self.connect_and_transform()
+ self.create_model(normalize_weights)
+ self.model.do_modeling()
+
+ # Add clustering constraints if this is a clustered FlowSystem
+ if self._clustering_info is not None:
+ self._add_clustering_constraints()
+
+ return self
+
+ def _add_clustering_constraints(self) -> None:
+ """Add clustering constraints to the model."""
+ from .clustering import ClusteringModel
+
+ info = self._clustering_info
+ clustering_model = ClusteringModel(
+ model=self.model,
+ clustering_parameters=info['parameters'],
+ flow_system=self,
+ clustering_data=info['clustering'],
+ components_to_clusterize=info['components_to_clusterize'],
+ )
+ clustering_model.do_modeling()
- Notes:
- - This function requires `pyvis`. If not installed, the function prints a warning and returns `None`.
- - Nodes are styled based on type (e.g., circles for buses, boxes for components) and annotated with node information.
+ def solve(self, solver: _Solver) -> FlowSystem:
"""
- from . import plotting
+ Solve the optimization model and populate the solution.
- node_infos, edge_infos = self.network_infos()
- return plotting.plot_network(
- node_infos, edge_infos, path, controls, show if show is not None else CONFIG.Plotting.default_show
- )
+ This method solves the previously built model using the specified solver.
+ After solving, `self.solution` will contain the optimization results,
+ and each element's `.solution` property will provide access to its
+ specific variables.
+
+ Args:
+ solver: The solver to use (e.g., HighsSolver, GurobiSolver).
- def start_network_app(self):
- """Visualizes the network structure of a FlowSystem using Dash, Cytoscape, and networkx.
- Requires optional dependencies: dash, dash-cytoscape, dash-daq, networkx, flask, werkzeug.
+ Returns:
+ Self, for method chaining.
+
+ Raises:
+ RuntimeError: If the model has not been built yet (call build_model first).
+ RuntimeError: If the model is infeasible.
+
+ Examples:
+ >>> flow_system.build_model()
+ >>> flow_system.solve(HighsSolver())
+ >>> print(flow_system.solution)
"""
- from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR, flow_graph, shownetwork
+ if self.model is None:
+ raise RuntimeError('Model has not been built. Call build_model() first.')
- warnings.warn(
- 'The network visualization is still experimental and might change in the future.',
- stacklevel=2,
- category=UserWarning,
+ self.model.solve(
+ solver_name=solver.name,
+ **solver.options,
)
- if not DASH_CYTOSCAPE_AVAILABLE:
- raise ImportError(
- f'Network visualization requires optional dependencies. '
- f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
- f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
- f'Original error: {VISUALIZATION_ERROR}'
- )
+ if 'infeasible' in self.model.termination_condition:
+ if CONFIG.Solving.compute_infeasibilities:
+ import io
+ from contextlib import redirect_stdout
+
+ f = io.StringIO()
+
+ # Redirect stdout to our buffer
+ with redirect_stdout(f):
+ self.model.print_infeasibilities()
+
+ infeasibilities = f.getvalue()
+ logger.error('Sucessfully extracted infeasibilities: \n%s', infeasibilities)
+ raise RuntimeError(f'Model was infeasible. Status: {self.model.status}. Check your constraints and bounds.')
+
+ # Store solution on FlowSystem for direct Element access
+ self.solution = self.model.solution
+
+ logger.info(f'Optimization solved successfully. Objective: {self.model.objective.value:.4f}')
+
+ return self
+
+ @property
+ def solution(self) -> xr.Dataset | None:
+ """
+ Access the optimization solution as an xarray Dataset.
+
+ The solution is indexed by ``timesteps_extra`` (the original timesteps plus
+ one additional timestep at the end). Variables that do not have data for the
+ extra timestep (most variables except storage charge states) will contain
+ NaN values at the final timestep.
+
+ Returns:
+ xr.Dataset: The solution dataset with all optimization variable results,
+ or None if the model hasn't been solved yet.
+
+ Example:
+ >>> flow_system.optimize(solver)
+ >>> flow_system.solution.isel(time=slice(None, -1)) # Exclude trailing NaN (and final charge states)
+ """
+ return self._solution
+
+ @solution.setter
+ def solution(self, value: xr.Dataset | None) -> None:
+ """Set the solution dataset and invalidate statistics cache."""
+ self._solution = value
+ self._statistics = None # Invalidate cached statistics
+
+ @property
+ def is_locked(self) -> bool:
+ """Check if the FlowSystem is locked (has a solution).
+
+ A locked FlowSystem cannot be modified. Use `reset()` to unlock it.
+ """
+ return self._solution is not None
+
+ def _invalidate_model(self) -> None:
+ """Invalidate the model and element submodels when structure changes.
+
+ This clears the model, resets the ``connected_and_transformed`` flag,
+ clears all element submodels and variable/constraint names, and invalidates
+ the topology accessor cache.
+
+ Called internally by :meth:`add_elements`, :meth:`add_carriers`,
+ :meth:`reset`, and :meth:`invalidate`.
+
+ See Also:
+ :meth:`invalidate`: Public method for manual invalidation.
+ :meth:`reset`: Clears solution and invalidates (for locked FlowSystems).
+ """
+ self.model = None
+ self._connected_and_transformed = False
+ self._topology = None # Invalidate topology accessor (and its cached colors)
+ for element in self.values():
+ element.submodel = None
+ element._variable_names = []
+ element._constraint_names = []
+
+ def reset(self) -> FlowSystem:
+ """Clear optimization state to allow modifications.
+
+ This method unlocks the FlowSystem by clearing:
+ - The solution dataset
+ - The optimization model
+ - All element submodels and variable/constraint names
+ - The connected_and_transformed flag
+
+ After calling reset(), the FlowSystem can be modified again
+ (e.g., adding elements or carriers).
+
+ Returns:
+ Self, for method chaining.
+
+ Examples:
+ >>> flow_system.optimize(solver) # FlowSystem is now locked
+ >>> flow_system.add_elements(new_bus) # Raises RuntimeError
+ >>> flow_system.reset() # Unlock the FlowSystem
+ >>> flow_system.add_elements(new_bus) # Now works
+ """
+ self.solution = None # Also clears _statistics via setter
+ self._invalidate_model()
+ return self
+
+ def invalidate(self) -> FlowSystem:
+ """Invalidate the model to allow re-transformation after modifying elements.
+
+ Call this after modifying existing element attributes (e.g., ``flow.size``,
+ ``flow.relative_minimum``) to ensure changes take effect on the next
+ optimization. The next call to :meth:`optimize` or :meth:`build_model`
+ will re-run :meth:`connect_and_transform`.
+
+ Note:
+ Adding new elements via :meth:`add_elements` automatically invalidates
+ the model. This method is only needed when modifying attributes of
+ elements that are already part of the FlowSystem.
- if not self._connected_and_transformed:
- self._connect_network()
+ Returns:
+ Self, for method chaining.
+
+ Raises:
+ RuntimeError: If the FlowSystem has a solution. Call :meth:`reset`
+ first to clear the solution.
- if self._network_app is not None:
- logger.warning('The network app is already running. Restarting it.')
- self.stop_network_app()
+ Examples:
+ Modify a flow's size and re-optimize:
- self._network_app = shownetwork(flow_graph(self))
+ >>> flow_system.optimize(solver)
+ >>> flow_system.reset() # Clear solution first
+ >>> flow_system.components['Boiler'].inputs[0].size = 200
+ >>> flow_system.invalidate()
+ >>> flow_system.optimize(solver) # Re-runs connect_and_transform
- def stop_network_app(self):
- """Stop the network visualization server."""
- from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR
+ Modify before first optimization:
- if not DASH_CYTOSCAPE_AVAILABLE:
- raise ImportError(
- f'Network visualization requires optional dependencies. '
- f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
- f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
- f'Original error: {VISUALIZATION_ERROR}'
+ >>> flow_system.connect_and_transform()
+ >>> # Oops, need to change something
+ >>> flow_system.components['Boiler'].inputs[0].size = 200
+ >>> flow_system.invalidate()
+ >>> flow_system.optimize(solver) # Changes take effect
+ """
+ if self.is_locked:
+ raise RuntimeError(
+ 'Cannot invalidate a FlowSystem with a solution. Call `reset()` first to clear the solution.'
)
+ self._invalidate_model()
+ return self
- if self._network_app is None:
- logger.warning("No network app is currently running. Can't stop it")
- return
+ @property
+ def optimize(self) -> OptimizeAccessor:
+ """
+ Access optimization methods for this FlowSystem.
- try:
- logger.info('Stopping network visualization server...')
- self._network_app.server_instance.shutdown()
- logger.info('Network visualization stopped.')
- except Exception as e:
- logger.error(f'Failed to stop the network visualization app: {e}')
- finally:
- self._network_app = None
+ This property returns an OptimizeAccessor that can be called directly
+ for standard optimization, or used to access specialized optimization modes.
- def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]:
- if not self.connected_and_transformed:
- self.connect_and_transform()
- nodes = {
- node.label_full: {
- 'label': node.label,
- 'class': 'Bus' if isinstance(node, Bus) else 'Component',
- 'infos': node.__str__(),
- }
- for node in chain(self.components.values(), self.buses.values())
- }
+ Returns:
+ An OptimizeAccessor instance.
- edges = {
- flow.label_full: {
- 'label': flow.label,
- 'start': flow.bus if flow.is_input_in_component else flow.component,
- 'end': flow.component if flow.is_input_in_component else flow.bus,
- 'infos': flow.__str__(),
- }
- for flow in self.flows.values()
- }
+ Examples:
+ Standard optimization (call directly):
+
+ >>> flow_system.optimize(HighsSolver())
+ >>> print(flow_system.solution['Boiler(Q_th)|flow_rate'])
+
+ Access element solutions directly:
+
+ >>> flow_system.optimize(solver)
+ >>> boiler = flow_system.components['Boiler']
+ >>> print(boiler.solution)
+
+ Future specialized modes:
+
+ >>> flow_system.optimize.clustered(solver, aggregation=params)
+ >>> flow_system.optimize.mga(solver, alternatives=5)
+ """
+ return OptimizeAccessor(self)
+
+ @property
+ def transform(self) -> TransformAccessor:
+ """
+ Access transformation methods for this FlowSystem.
+
+ This property returns a TransformAccessor that provides methods to create
+ transformed versions of this FlowSystem (e.g., clustered for time aggregation).
+
+ Returns:
+ A TransformAccessor instance.
+
+ Examples:
+ Clustered optimization:
+
+ >>> params = ClusteringParameters(hours_per_period=24, nr_of_periods=8)
+ >>> clustered_fs = flow_system.transform.cluster(params)
+ >>> clustered_fs.optimize(solver)
+ >>> print(clustered_fs.solution)
+ """
+ return TransformAccessor(self)
+
+ @property
+ def statistics(self) -> StatisticsAccessor:
+ """
+ Access statistics and plotting methods for optimization results.
+
+ This property returns a StatisticsAccessor that provides methods to analyze
+ and visualize optimization results stored in this FlowSystem's solution.
+
+ Note:
+ The FlowSystem must have a solution (from optimize() or solve()) before
+ most statistics methods can be used.
- return nodes, edges
+ Returns:
+ A cached StatisticsAccessor instance.
+
+ Examples:
+ After optimization:
+
+ >>> flow_system.optimize(solver)
+ >>> flow_system.statistics.plot.balance('ElectricityBus')
+ >>> flow_system.statistics.plot.heatmap('Boiler|on')
+ >>> ds = flow_system.statistics.flow_rates # Get data for analysis
+ """
+ if self._statistics is None:
+ self._statistics = StatisticsAccessor(self)
+ return self._statistics
+
+ @property
+ def topology(self) -> TopologyAccessor:
+ """
+ Access network topology inspection and visualization methods.
+
+ This property returns a cached TopologyAccessor that provides methods to inspect
+ the network structure and visualize it. The accessor is invalidated when the
+ FlowSystem structure changes (via reset() or invalidate()).
+
+ Returns:
+ A cached TopologyAccessor instance.
+
+ Examples:
+ Visualize the network:
+
+ >>> flow_system.topology.plot()
+ >>> flow_system.topology.plot(path='my_network.html', show=True)
+
+ Interactive visualization:
+
+ >>> flow_system.topology.start_app()
+ >>> # ... interact with the visualization ...
+ >>> flow_system.topology.stop_app()
+
+ Get network structure info:
+
+ >>> nodes, edges = flow_system.topology.infos()
+ """
+ if self._topology is None:
+ self._topology = TopologyAccessor(self)
+ return self._topology
+
+ def plot_network(
+ self,
+ path: bool | str | pathlib.Path = 'flow_system.html',
+ controls: bool
+ | list[
+ Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
+ ] = True,
+ show: bool | None = None,
+ ) -> pyvis.network.Network | None:
+ """
+ Deprecated: Use `flow_system.topology.plot()` instead.
+
+ Visualizes the network structure of a FlowSystem using PyVis.
+ """
+ return self.topology.plot_legacy(path=path, controls=controls, show=show)
+
+ def start_network_app(self) -> None:
+ """
+ Deprecated: Use `flow_system.topology.start_app()` instead.
+
+ Visualizes the network structure using Dash and Cytoscape.
+ """
+ warnings.warn(
+ f'start_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.topology.start_app() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.topology.start_app()
+
+ def stop_network_app(self) -> None:
+ """
+ Deprecated: Use `flow_system.topology.stop_app()` instead.
+
+ Stop the network visualization server.
+ """
+ warnings.warn(
+ f'stop_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.topology.stop_app() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.topology.stop_app()
+
+ def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]:
+ """
+ Deprecated: Use `flow_system.topology.infos()` instead.
+
+ Get network topology information as dictionaries.
+ """
+ warnings.warn(
+ f'network_infos() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.topology.infos() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.topology.infos()
def _check_if_element_is_unique(self, element: Element) -> None:
"""
@@ -950,12 +1688,12 @@ def _validate_system_integrity(self) -> None:
def _add_effects(self, *args: Effect) -> None:
for effect in args:
- effect._set_flow_system(self) # Link element to FlowSystem
+ effect.link_to_flow_system(self) # Link element to FlowSystem
self.effects.add_effects(*args)
def _add_components(self, *components: Component) -> None:
for new_component in list(components):
- new_component._set_flow_system(self) # Link element to FlowSystem
+ new_component.link_to_flow_system(self) # Link element to FlowSystem
self.components.add(new_component) # Add to existing components
# Invalidate cache once after all additions
if components:
@@ -963,7 +1701,7 @@ def _add_components(self, *components: Component) -> None:
def _add_buses(self, *buses: Bus):
for new_bus in list(buses):
- new_bus._set_flow_system(self) # Link element to FlowSystem
+ new_bus.link_to_flow_system(self) # Link element to FlowSystem
self.buses.add(new_bus) # Add to existing buses
# Invalidate cache once after all additions
if buses:
@@ -1067,6 +1805,18 @@ def flows(self) -> ElementContainer[Flow]:
self._flows_cache = ElementContainer(flows, element_type_name='flows', truncate_repr=10)
return self._flows_cache
+ @property
+ def storages(self) -> ElementContainer[Storage]:
+ """All storage components as an ElementContainer.
+
+ Returns:
+ ElementContainer containing all Storage components in the FlowSystem,
+ sorted by label for reproducibility.
+ """
+ storages = [c for c in self.components.values() if isinstance(c, Storage)]
+ storages = sorted(storages, key=lambda s: s.label_full.lower())
+ return ElementContainer(storages, element_type_name='storages', truncate_repr=10)
+
@property
def coords(self) -> dict[FlowSystemDimensions, pd.Index]:
active_coords = {'time': self.timesteps}
@@ -1223,29 +1973,22 @@ def _dataset_sel(
Returns:
xr.Dataset: Selected dataset
"""
- indexers = {}
- if time is not None:
- indexers['time'] = time
- if period is not None:
- indexers['period'] = period
- if scenario is not None:
- indexers['scenario'] = scenario
-
- if not indexers:
- return dataset
-
- result = dataset.sel(**indexers)
-
- # Update time-related attributes if time was selected
- if 'time' in indexers:
- result = cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps)
-
- # Update period-related attributes if period was selected
- # This recalculates period_weights and weights from the new period index
- if 'period' in indexers:
- result = cls._update_period_metadata(result)
+ warnings.warn(
+ f'\n_dataset_sel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use TransformAccessor._dataset_sel() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ from .transform_accessor import TransformAccessor
- return result
+ return TransformAccessor._dataset_sel(
+ dataset,
+ time=time,
+ period=period,
+ scenario=scenario,
+ hours_of_last_timestep=hours_of_last_timestep,
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
+ )
def sel(
self,
@@ -1256,8 +1999,8 @@ def sel(
"""
Select a subset of the flowsystem by label.
- For power users: Use FlowSystem._dataset_sel() to chain operations on datasets
- without conversion overhead. See _dataset_sel() documentation.
+ .. deprecated::
+ Use ``flow_system.transform.sel()`` instead. Will be removed in v6.0.0.
Args:
time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15')
@@ -1265,17 +2008,15 @@ def sel(
scenario: Scenario selection (e.g., 'scenario1', or list of scenarios)
Returns:
- FlowSystem: New FlowSystem with selected data
+ FlowSystem: New FlowSystem with selected data (no solution).
"""
- if time is None and period is None and scenario is None:
- return self.copy()
-
- if not self.connected_and_transformed:
- self.connect_and_transform()
-
- ds = self.to_dataset()
- ds = self._dataset_sel(ds, time=time, period=period, scenario=scenario)
- return self.__class__.from_dataset(ds)
+ warnings.warn(
+ f'\nsel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.transform.sel() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.transform.sel(time=time, period=period, scenario=scenario)
@classmethod
def _dataset_isel(
@@ -1304,29 +2045,22 @@ def _dataset_isel(
Returns:
xr.Dataset: Selected dataset
"""
- indexers = {}
- if time is not None:
- indexers['time'] = time
- if period is not None:
- indexers['period'] = period
- if scenario is not None:
- indexers['scenario'] = scenario
-
- if not indexers:
- return dataset
-
- result = dataset.isel(**indexers)
-
- # Update time-related attributes if time was selected
- if 'time' in indexers:
- result = cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps)
-
- # Update period-related attributes if period was selected
- # This recalculates period_weights and weights from the new period index
- if 'period' in indexers:
- result = cls._update_period_metadata(result)
+ warnings.warn(
+ f'\n_dataset_isel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use TransformAccessor._dataset_isel() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ from .transform_accessor import TransformAccessor
- return result
+ return TransformAccessor._dataset_isel(
+ dataset,
+ time=time,
+ period=period,
+ scenario=scenario,
+ hours_of_last_timestep=hours_of_last_timestep,
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
+ )
def isel(
self,
@@ -1337,109 +2071,24 @@ def isel(
"""
Select a subset of the flowsystem by integer indices.
- For power users: Use FlowSystem._dataset_isel() to chain operations on datasets
- without conversion overhead. See _dataset_sel() documentation.
+ .. deprecated::
+ Use ``flow_system.transform.isel()`` instead. Will be removed in v6.0.0.
Args:
time: Time selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
- period: Period selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
- scenario: Scenario selection by integer index (e.g., slice(0, 3), 50, or [0, 5, 10])
+ period: Period selection by integer index
+ scenario: Scenario selection by integer index
Returns:
- FlowSystem: New FlowSystem with selected data
- """
- if time is None and period is None and scenario is None:
- return self.copy()
-
- if not self.connected_and_transformed:
- self.connect_and_transform()
-
- ds = self.to_dataset()
- ds = self._dataset_isel(ds, time=time, period=period, scenario=scenario)
- return self.__class__.from_dataset(ds)
-
- @classmethod
- def _resample_by_dimension_groups(
- cls,
- time_dataset: xr.Dataset,
- time: str,
- method: str,
- **kwargs: Any,
- ) -> xr.Dataset:
+ FlowSystem: New FlowSystem with selected data (no solution).
"""
- Resample variables grouped by their dimension structure to avoid broadcasting.
-
- This method groups variables by their non-time dimensions before resampling,
- which provides two key benefits:
-
- 1. **Performance**: Resampling many variables with the same dimensions together
- is significantly faster than resampling each variable individually.
-
- 2. **Safety**: Prevents xarray from broadcasting variables with different
- dimensions into a larger dimensional space filled with NaNs, which would
- cause memory bloat and computational inefficiency.
-
- Example:
- Without grouping (problematic):
- var1: (time, location, tech) shape (8000, 10, 2)
- var2: (time, region) shape (8000, 5)
- concat → (variable, time, location, tech, region) ← Unwanted broadcasting!
-
- With grouping (safe and fast):
- Group 1: [var1, var3, ...] with dims (time, location, tech)
- Group 2: [var2, var4, ...] with dims (time, region)
- Each group resampled separately → No broadcasting, optimal performance!
-
- Args:
- time_dataset: Dataset containing only variables with time dimension
- time: Resampling frequency (e.g., '2h', '1D', '1M')
- method: Resampling method name (e.g., 'mean', 'sum', 'first')
- **kwargs: Additional arguments passed to xarray.resample()
-
- Returns:
- Resampled dataset with original dimension structure preserved
- """
- # Group variables by dimensions (excluding time)
- dim_groups = defaultdict(list)
- for var_name, var in time_dataset.data_vars.items():
- dims_key = tuple(sorted(d for d in var.dims if d != 'time'))
- dim_groups[dims_key].append(var_name)
-
- # Handle empty case: no time-dependent variables
- if not dim_groups:
- return getattr(time_dataset.resample(time=time, **kwargs), method)()
-
- # Resample each group separately using DataArray concat (faster)
- resampled_groups = []
- for var_names in dim_groups.values():
- # Skip empty groups
- if not var_names:
- continue
-
- # Concat variables into a single DataArray with 'variable' dimension
- # Use combine_attrs='drop_conflicts' to handle attribute conflicts
- stacked = xr.concat(
- [time_dataset[name] for name in var_names],
- dim=pd.Index(var_names, name='variable'),
- combine_attrs='drop_conflicts',
- )
-
- # Resample the DataArray (faster than resampling Dataset)
- resampled = getattr(stacked.resample(time=time, **kwargs), method)()
-
- # Convert back to Dataset using the 'variable' dimension
- resampled_dataset = resampled.to_dataset(dim='variable')
- resampled_groups.append(resampled_dataset)
-
- # Merge all resampled groups, handling empty list case
- if not resampled_groups:
- return time_dataset # Return empty dataset as-is
-
- if len(resampled_groups) == 1:
- return resampled_groups[0]
-
- # Merge multiple groups with combine_attrs to avoid conflicts
- return xr.merge(resampled_groups, combine_attrs='drop_conflicts')
+ warnings.warn(
+ f'\nisel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.transform.isel() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.transform.isel(time=time, period=period, scenario=scenario)
@classmethod
def _dataset_resample(
@@ -1470,36 +2119,47 @@ def _dataset_resample(
Returns:
xr.Dataset: Resampled dataset
"""
- # Validate method
- available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count']
- if method not in available_methods:
- raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}')
-
- # Preserve original dataset attributes (especially the reference structure)
- original_attrs = dict(dataset.attrs)
-
- # Separate time and non-time variables
- time_var_names = [v for v in dataset.data_vars if 'time' in dataset[v].dims]
- non_time_var_names = [v for v in dataset.data_vars if v not in time_var_names]
-
- # Only resample variables that have time dimension
- time_dataset = dataset[time_var_names]
+ warnings.warn(
+ f'\n_dataset_resample() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use TransformAccessor._dataset_resample() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ from .transform_accessor import TransformAccessor
- # Resample with dimension grouping to avoid broadcasting
- resampled_time_dataset = cls._resample_by_dimension_groups(time_dataset, freq, method, **kwargs)
+ return TransformAccessor._dataset_resample(
+ dataset,
+ freq=freq,
+ method=method,
+ hours_of_last_timestep=hours_of_last_timestep,
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
+ **kwargs,
+ )
- # Combine resampled time variables with non-time variables
- if non_time_var_names:
- non_time_dataset = dataset[non_time_var_names]
- result = xr.merge([resampled_time_dataset, non_time_dataset])
- else:
- result = resampled_time_dataset
+ @classmethod
+ def _resample_by_dimension_groups(
+ cls,
+ time_dataset: xr.Dataset,
+ time: str,
+ method: str,
+ **kwargs: Any,
+ ) -> xr.Dataset:
+ """
+ Resample variables grouped by their dimension structure to avoid broadcasting.
- # Restore original attributes (xr.merge can drop them)
- result.attrs.update(original_attrs)
+ .. deprecated::
+ Use ``TransformAccessor._resample_by_dimension_groups()`` instead.
+ Will be removed in v6.0.0.
+ """
+ warnings.warn(
+ f'\n_resample_by_dimension_groups() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use TransformAccessor._resample_by_dimension_groups() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ from .transform_accessor import TransformAccessor
- # Update time-related attributes based on new time index
- return cls._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps)
+ return TransformAccessor._resample_by_dimension_groups(time_dataset, time, method, **kwargs)
def resample(
self,
@@ -1510,36 +2170,34 @@ def resample(
**kwargs: Any,
) -> FlowSystem:
"""
- Create a resampled FlowSystem by resampling data along the time dimension (like xr.Dataset.resample()).
- Only resamples data variables that have a time dimension.
+ Create a resampled FlowSystem by resampling data along the time dimension.
- For power users: Use FlowSystem._dataset_resample() to chain operations on datasets
- without conversion overhead. See _dataset_sel() documentation.
+ .. deprecated::
+ Use ``flow_system.transform.resample()`` instead. Will be removed in v6.0.0.
Args:
time: Resampling frequency (e.g., '3h', '2D', '1M')
method: Resampling method. Recommended: 'mean', 'first', 'last', 'max', 'min'
- hours_of_last_timestep: Duration of the last timestep after resampling. If None, computed from the last time interval.
- hours_of_previous_timesteps: Duration of previous timesteps after resampling. If None, computed from the first time interval.
- Can be a scalar or array.
+ hours_of_last_timestep: Duration of the last timestep after resampling.
+ hours_of_previous_timesteps: Duration of previous timesteps after resampling.
**kwargs: Additional arguments passed to xarray.resample()
Returns:
- FlowSystem: New resampled FlowSystem
+ FlowSystem: New resampled FlowSystem (no solution).
"""
- if not self.connected_and_transformed:
- self.connect_and_transform()
-
- ds = self.to_dataset()
- ds = self._dataset_resample(
- ds,
- freq=time,
+ warnings.warn(
+ f'\nresample() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.transform.resample() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.transform.resample(
+ time=time,
method=method,
hours_of_last_timestep=hours_of_last_timestep,
hours_of_previous_timesteps=hours_of_previous_timesteps,
**kwargs,
)
- return self.__class__.from_dataset(ds)
@property
def connected_and_transformed(self) -> bool:
diff --git a/flixopt/interface.py b/flixopt/interface.py
index 7995d5e78..13a9255da 100644
--- a/flixopt/interface.py
+++ b/flixopt/interface.py
@@ -6,13 +6,15 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
import numpy as np
import pandas as pd
+import plotly.express as px
import xarray as xr
from .config import CONFIG
+from .plot_result import PlotResult
from .structure import Interface, register_class_for_io
if TYPE_CHECKING: # for type checking and preventing circular imports
@@ -74,10 +76,10 @@ def __init__(self, start: Numeric_TPS, end: Numeric_TPS):
self.end = end
self.has_time_dim = False
- def transform_data(self, name_prefix: str = '') -> None:
+ def transform_data(self) -> None:
dims = None if self.has_time_dim else ['period', 'scenario']
- self.start = self._fit_coords(f'{name_prefix}|start', self.start, dims=dims)
- self.end = self._fit_coords(f'{name_prefix}|end', self.end, dims=dims)
+ self.start = self._fit_coords(f'{self.prefix}|start', self.start, dims=dims)
+ self.end = self._fit_coords(f'{self.prefix}|end', self.end, dims=dims)
@register_class_for_io
@@ -226,15 +228,15 @@ def __getitem__(self, index) -> Piece:
def __iter__(self) -> Iterator[Piece]:
return iter(self.pieces) # Enables iteration like for piece in piecewise: ...
- def _set_flow_system(self, flow_system) -> None:
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
"""Propagate flow_system reference to nested Piece objects."""
- super()._set_flow_system(flow_system)
- for piece in self.pieces:
- piece._set_flow_system(flow_system)
-
- def transform_data(self, name_prefix: str = '') -> None:
+ super().link_to_flow_system(flow_system, prefix)
for i, piece in enumerate(self.pieces):
- piece.transform_data(f'{name_prefix}|Piece{i}')
+ piece.link_to_flow_system(flow_system, self._sub_prefix(f'Piece{i}'))
+
+ def transform_data(self) -> None:
+ for piece in self.pieces:
+ piece.transform_data()
@register_class_for_io
@@ -458,15 +460,151 @@ def items(self):
"""
return self.piecewises.items()
- def _set_flow_system(self, flow_system) -> None:
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
"""Propagate flow_system reference to nested Piecewise objects."""
- super()._set_flow_system(flow_system)
+ super().link_to_flow_system(flow_system, prefix)
+ for name, piecewise in self.piecewises.items():
+ piecewise.link_to_flow_system(flow_system, self._sub_prefix(name))
+
+ def transform_data(self) -> None:
for piecewise in self.piecewises.values():
- piecewise._set_flow_system(flow_system)
+ piecewise.transform_data()
- def transform_data(self, name_prefix: str = '') -> None:
- for name, piecewise in self.piecewises.items():
- piecewise.transform_data(f'{name_prefix}|{name}')
+ def plot(
+ self,
+ x_flow: str | None = None,
+ title: str = '',
+ select: dict[str, Any] | None = None,
+ colorscale: str | None = None,
+ show: bool | None = None,
+ ) -> PlotResult:
+ """Plot multi-flow piecewise conversion with time variation visualization.
+
+ Visualizes the piecewise linear relationships between flows. Each flow
+ is shown in a separate subplot (faceted by flow). Pieces are distinguished
+ by line dash style. If boundaries vary over time, color shows time progression.
+
+ Note:
+ Requires FlowSystem to be connected and transformed (call
+ flow_system.connect_and_transform() first).
+
+ Args:
+ x_flow: Flow label to use for X-axis. Defaults to first flow in dict.
+ title: Plot title.
+ select: xarray-style selection dict to filter data,
+ e.g. {'time': slice('2024-01-01', '2024-01-02')}.
+ colorscale: Colorscale name for time coloring (e.g., 'RdYlBu_r', 'viridis').
+ Defaults to CONFIG.Plotting.default_sequential_colorscale.
+ show: Whether to display the figure.
+ Defaults to CONFIG.Plotting.default_show.
+
+ Returns:
+ PlotResult containing the figure and underlying piecewise data.
+
+ Examples:
+ >>> flow_system.connect_and_transform()
+ >>> chp.piecewise_conversion.plot(x_flow='Gas', title='CHP Curves')
+ >>> # Select specific time range
+ >>> chp.piecewise_conversion.plot(select={'time': slice(0, 12)})
+ """
+ if not self.flow_system.connected_and_transformed:
+ logger.debug('Connecting flow_system for plotting PiecewiseConversion')
+ self.flow_system.connect_and_transform()
+
+ colorscale = colorscale or CONFIG.Plotting.default_sequential_colorscale
+
+ flow_labels = list(self.piecewises.keys())
+ x_label = x_flow if x_flow is not None else flow_labels[0]
+ if x_label not in flow_labels:
+ raise ValueError(f"x_flow '{x_label}' not found. Available: {flow_labels}")
+
+ y_flows = [label for label in flow_labels if label != x_label]
+ if not y_flows:
+ raise ValueError('Need at least two flows to plot')
+
+ x_piecewise = self.piecewises[x_label]
+
+ # Build Dataset with all piece data
+ datasets = []
+ for y_label in y_flows:
+ y_piecewise = self.piecewises[y_label]
+ for i, (x_piece, y_piece) in enumerate(zip(x_piecewise, y_piecewise, strict=False)):
+ ds = xr.Dataset(
+ {
+ x_label: xr.concat([x_piece.start, x_piece.end], dim='point'),
+ 'output': xr.concat([y_piece.start, y_piece.end], dim='point'),
+ }
+ )
+ ds = ds.assign_coords(point=['start', 'end'])
+ ds['flow'] = y_label
+ ds['piece'] = f'Piece {i}'
+ datasets.append(ds)
+
+ combined = xr.concat(datasets, dim='trace')
+
+ # Apply selection if provided
+ if select:
+ valid_select = {k: v for k, v in select.items() if k in combined.dims or k in combined.coords}
+ if valid_select:
+ combined = combined.sel(valid_select)
+
+ df = combined.to_dataframe().reset_index()
+
+ # Check if values vary over time
+ has_time = 'time' in df.columns
+ varies_over_time = False
+ if has_time:
+ varies_over_time = df.groupby(['trace', 'point'])[[x_label, 'output']].nunique().max().max() > 1
+
+ if varies_over_time:
+ # Time-varying: color by time, dash by piece
+ df['time_idx'] = df.groupby('time').ngroup()
+ df['line_id'] = df['trace'].astype(str) + '_' + df['time_idx'].astype(str)
+ n_times = df['time_idx'].nunique()
+ colors = px.colors.sample_colorscale(colorscale, n_times)
+
+ fig = px.line(
+ df,
+ x=x_label,
+ y='output',
+ color='time_idx',
+ line_dash='piece',
+ line_group='line_id',
+ facet_col='flow' if len(y_flows) > 1 else None,
+ title=title or 'Piecewise Conversion',
+ markers=True,
+ color_discrete_sequence=colors,
+ )
+ else:
+ # Static: dash by piece
+ if has_time:
+ df = df.groupby(['trace', 'point', 'flow', 'piece']).first().reset_index()
+ df['line_id'] = df['trace'].astype(str)
+
+ fig = px.line(
+ df,
+ x=x_label,
+ y='output',
+ line_dash='piece',
+ line_group='line_id',
+ facet_col='flow' if len(y_flows) > 1 else None,
+ title=title or 'Piecewise Conversion',
+ markers=True,
+ )
+
+ # Clean up facet titles and axis labels
+ fig.for_each_annotation(lambda a: a.update(text=a.text.replace('flow=', '')))
+ fig.update_yaxes(title_text='')
+ fig.update_xaxes(title_text=x_label)
+
+ result = PlotResult(data=combined, figure=fig)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ result.show()
+
+ return result
@register_class_for_io
@@ -676,17 +814,142 @@ def has_time_dim(self, value):
for piecewise in self.piecewise_shares.values():
piecewise.has_time_dim = value
- def _set_flow_system(self, flow_system) -> None:
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
"""Propagate flow_system reference to nested Piecewise objects."""
- super()._set_flow_system(flow_system)
- self.piecewise_origin._set_flow_system(flow_system)
+ super().link_to_flow_system(flow_system, prefix)
+ self.piecewise_origin.link_to_flow_system(flow_system, self._sub_prefix('origin'))
+ for effect, piecewise in self.piecewise_shares.items():
+ piecewise.link_to_flow_system(flow_system, self._sub_prefix(effect))
+
+ def transform_data(self) -> None:
+ self.piecewise_origin.transform_data()
for piecewise in self.piecewise_shares.values():
- piecewise._set_flow_system(flow_system)
+ piecewise.transform_data()
- def transform_data(self, name_prefix: str = '') -> None:
- self.piecewise_origin.transform_data(f'{name_prefix}|PiecewiseEffects|origin')
- for effect, piecewise in self.piecewise_shares.items():
- piecewise.transform_data(f'{name_prefix}|PiecewiseEffects|{effect}')
+ def plot(
+ self,
+ title: str = '',
+ select: dict[str, Any] | None = None,
+ colorscale: str | None = None,
+ show: bool | None = None,
+ ) -> PlotResult:
+ """Plot origin vs effect shares with time variation visualization.
+
+ Visualizes the piecewise linear relationships between the origin variable
+ and its effect shares. Each effect is shown in a separate subplot (faceted
+ by effect). Pieces are distinguished by line dash style.
+
+ Note:
+ Requires FlowSystem to be connected and transformed (call
+ flow_system.connect_and_transform() first).
+
+ Args:
+ title: Plot title.
+ select: xarray-style selection dict to filter data,
+ e.g. {'time': slice('2024-01-01', '2024-01-02')}.
+ colorscale: Colorscale name for time coloring (e.g., 'RdYlBu_r', 'viridis').
+ Defaults to CONFIG.Plotting.default_sequential_colorscale.
+ show: Whether to display the figure.
+ Defaults to CONFIG.Plotting.default_show.
+
+ Returns:
+ PlotResult containing the figure and underlying piecewise data.
+
+ Examples:
+ >>> flow_system.connect_and_transform()
+ >>> invest_params.piecewise_effects_of_investment.plot(title='Investment Effects')
+ """
+ if not self.flow_system.connected_and_transformed:
+ logger.debug('Connecting flow_system for plotting PiecewiseEffects')
+ self.flow_system.connect_and_transform()
+
+ colorscale = colorscale or CONFIG.Plotting.default_sequential_colorscale
+
+ effect_labels = list(self.piecewise_shares.keys())
+ if not effect_labels:
+ raise ValueError('Need at least one effect share to plot')
+
+ # Build Dataset with all piece data
+ datasets = []
+ for effect_label in effect_labels:
+ y_piecewise = self.piecewise_shares[effect_label]
+ for i, (x_piece, y_piece) in enumerate(zip(self.piecewise_origin, y_piecewise, strict=False)):
+ ds = xr.Dataset(
+ {
+ 'origin': xr.concat([x_piece.start, x_piece.end], dim='point'),
+ 'share': xr.concat([y_piece.start, y_piece.end], dim='point'),
+ }
+ )
+ ds = ds.assign_coords(point=['start', 'end'])
+ ds['effect'] = effect_label
+ ds['piece'] = f'Piece {i}'
+ datasets.append(ds)
+
+ combined = xr.concat(datasets, dim='trace')
+
+ # Apply selection if provided
+ if select:
+ valid_select = {k: v for k, v in select.items() if k in combined.dims or k in combined.coords}
+ if valid_select:
+ combined = combined.sel(valid_select)
+
+ df = combined.to_dataframe().reset_index()
+
+ # Check if values vary over time
+ has_time = 'time' in df.columns
+ varies_over_time = False
+ if has_time:
+ varies_over_time = df.groupby(['trace', 'point'])[['origin', 'share']].nunique().max().max() > 1
+
+ if varies_over_time:
+ # Time-varying: color by time, dash by piece
+ df['time_idx'] = df.groupby('time').ngroup()
+ df['line_id'] = df['trace'].astype(str) + '_' + df['time_idx'].astype(str)
+ n_times = df['time_idx'].nunique()
+ colors = px.colors.sample_colorscale(colorscale, n_times)
+
+ fig = px.line(
+ df,
+ x='origin',
+ y='share',
+ color='time_idx',
+ line_dash='piece',
+ line_group='line_id',
+ facet_col='effect' if len(effect_labels) > 1 else None,
+ title=title or 'Piecewise Effects',
+ markers=True,
+ color_discrete_sequence=colors,
+ )
+ else:
+ # Static: dash by piece
+ if has_time:
+ df = df.groupby(['trace', 'point', 'effect', 'piece']).first().reset_index()
+ df['line_id'] = df['trace'].astype(str)
+
+ fig = px.line(
+ df,
+ x='origin',
+ y='share',
+ line_dash='piece',
+ line_group='line_id',
+ facet_col='effect' if len(effect_labels) > 1 else None,
+ title=title or 'Piecewise Effects',
+ markers=True,
+ )
+
+ # Clean up facet titles and axis labels
+ fig.for_each_annotation(lambda a: a.update(text=a.text.replace('effect=', '')))
+ fig.update_yaxes(title_text='')
+ fig.update_xaxes(title_text='Origin')
+
+ result = PlotResult(data=combined, figure=fig)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ result.show()
+
+ return result
@register_class_for_io
@@ -718,7 +981,7 @@ class InvestParameters(Interface):
fixed_size: Creates binary decision at this exact size. None allows continuous sizing.
minimum_size: Lower bound for continuous sizing. Default: CONFIG.Modeling.epsilon.
Ignored if fixed_size is specified.
- maximum_size: Upper bound for continuous sizing. Default: CONFIG.Modeling.big.
+ maximum_size: Upper bound for continuous sizing. Required if fixed_size is not set.
Ignored if fixed_size is specified.
mandatory: Controls whether investment is required. When True, forces investment
to occur (useful for mandatory upgrades or replacement decisions).
@@ -901,30 +1164,36 @@ def __init__(
)
self.piecewise_effects_of_investment = piecewise_effects_of_investment
self.minimum_size = minimum_size if minimum_size is not None else CONFIG.Modeling.epsilon
- self.maximum_size = maximum_size if maximum_size is not None else CONFIG.Modeling.big # default maximum
+ self.maximum_size = maximum_size
self.linked_periods = linked_periods
- def _set_flow_system(self, flow_system) -> None:
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
"""Propagate flow_system reference to nested PiecewiseEffects object if present."""
- super()._set_flow_system(flow_system)
+ super().link_to_flow_system(flow_system, prefix)
if self.piecewise_effects_of_investment is not None:
- self.piecewise_effects_of_investment._set_flow_system(flow_system)
-
- def transform_data(self, name_prefix: str = '') -> None:
+ self.piecewise_effects_of_investment.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseEffects'))
+
+ def transform_data(self) -> None:
+ # Validate that either fixed_size or maximum_size is set
+ if self.fixed_size is None and self.maximum_size is None:
+ raise ValueError(
+ f'InvestParameters in "{self.prefix}" requires either fixed_size or maximum_size to be set. '
+ f'An upper bound is needed to properly scale the optimization model.'
+ )
self.effects_of_investment = self._fit_effect_coords(
- prefix=name_prefix,
+ prefix=self.prefix,
effect_values=self.effects_of_investment,
suffix='effects_of_investment',
dims=['period', 'scenario'],
)
self.effects_of_retirement = self._fit_effect_coords(
- prefix=name_prefix,
+ prefix=self.prefix,
effect_values=self.effects_of_retirement,
suffix='effects_of_retirement',
dims=['period', 'scenario'],
)
self.effects_of_investment_per_size = self._fit_effect_coords(
- prefix=name_prefix,
+ prefix=self.prefix,
effect_values=self.effects_of_investment_per_size,
suffix='effects_of_investment_per_size',
dims=['period', 'scenario'],
@@ -932,13 +1201,13 @@ def transform_data(self, name_prefix: str = '') -> None:
if self.piecewise_effects_of_investment is not None:
self.piecewise_effects_of_investment.has_time_dim = False
- self.piecewise_effects_of_investment.transform_data(f'{name_prefix}|PiecewiseEffects')
+ self.piecewise_effects_of_investment.transform_data()
self.minimum_size = self._fit_coords(
- f'{name_prefix}|minimum_size', self.minimum_size, dims=['period', 'scenario']
+ f'{self.prefix}|minimum_size', self.minimum_size, dims=['period', 'scenario']
)
self.maximum_size = self._fit_coords(
- f'{name_prefix}|maximum_size', self.maximum_size, dims=['period', 'scenario']
+ f'{self.prefix}|maximum_size', self.maximum_size, dims=['period', 'scenario']
)
# Convert tuple (first_period, last_period) to DataArray if needed
if isinstance(self.linked_periods, (tuple, list)):
@@ -965,9 +1234,9 @@ def transform_data(self, name_prefix: str = '') -> None:
logger.debug(f'Computed {self.linked_periods=}')
self.linked_periods = self._fit_coords(
- f'{name_prefix}|linked_periods', self.linked_periods, dims=['period', 'scenario']
+ f'{self.prefix}|linked_periods', self.linked_periods, dims=['period', 'scenario']
)
- self.fixed_size = self._fit_coords(f'{name_prefix}|fixed_size', self.fixed_size, dims=['period', 'scenario'])
+ self.fixed_size = self._fit_coords(f'{self.prefix}|fixed_size', self.fixed_size, dims=['period', 'scenario'])
@property
def minimum_or_fixed_size(self) -> Numeric_PS:
@@ -1215,29 +1484,29 @@ def __init__(
self.startup_limit = startup_limit
self.force_startup_tracking: bool = force_startup_tracking
- def transform_data(self, name_prefix: str = '') -> None:
+ def transform_data(self) -> None:
self.effects_per_startup = self._fit_effect_coords(
- prefix=name_prefix,
+ prefix=self.prefix,
effect_values=self.effects_per_startup,
suffix='per_startup',
)
self.effects_per_active_hour = self._fit_effect_coords(
- prefix=name_prefix,
+ prefix=self.prefix,
effect_values=self.effects_per_active_hour,
suffix='per_active_hour',
)
- self.min_uptime = self._fit_coords(f'{name_prefix}|min_uptime', self.min_uptime)
- self.max_uptime = self._fit_coords(f'{name_prefix}|max_uptime', self.max_uptime)
- self.min_downtime = self._fit_coords(f'{name_prefix}|min_downtime', self.min_downtime)
- self.max_downtime = self._fit_coords(f'{name_prefix}|max_downtime', self.max_downtime)
+ self.min_uptime = self._fit_coords(f'{self.prefix}|min_uptime', self.min_uptime)
+ self.max_uptime = self._fit_coords(f'{self.prefix}|max_uptime', self.max_uptime)
+ self.min_downtime = self._fit_coords(f'{self.prefix}|min_downtime', self.min_downtime)
+ self.max_downtime = self._fit_coords(f'{self.prefix}|max_downtime', self.max_downtime)
self.active_hours_max = self._fit_coords(
- f'{name_prefix}|active_hours_max', self.active_hours_max, dims=['period', 'scenario']
+ f'{self.prefix}|active_hours_max', self.active_hours_max, dims=['period', 'scenario']
)
self.active_hours_min = self._fit_coords(
- f'{name_prefix}|active_hours_min', self.active_hours_min, dims=['period', 'scenario']
+ f'{self.prefix}|active_hours_min', self.active_hours_min, dims=['period', 'scenario']
)
self.startup_limit = self._fit_coords(
- f'{name_prefix}|startup_limit', self.startup_limit, dims=['period', 'scenario']
+ f'{self.prefix}|startup_limit', self.startup_limit, dims=['period', 'scenario']
)
@property
diff --git a/flixopt/io.py b/flixopt/io.py
index 27bc242ff..f46cd8723 100644
--- a/flixopt/io.py
+++ b/flixopt/io.py
@@ -597,6 +597,236 @@ def load_dataset_from_netcdf(path: str | pathlib.Path) -> xr.Dataset:
return ds
+# Parameter rename mappings for backwards compatibility conversion
+# Format: {old_name: new_name}
+PARAMETER_RENAMES = {
+ # Effect parameters
+ 'minimum_operation': 'minimum_temporal',
+ 'maximum_operation': 'maximum_temporal',
+ 'minimum_invest': 'minimum_periodic',
+ 'maximum_invest': 'maximum_periodic',
+ 'minimum_investment': 'minimum_periodic',
+ 'maximum_investment': 'maximum_periodic',
+ 'minimum_operation_per_hour': 'minimum_per_hour',
+ 'maximum_operation_per_hour': 'maximum_per_hour',
+ # InvestParameters
+ 'fix_effects': 'effects_of_investment',
+ 'specific_effects': 'effects_of_investment_per_size',
+ 'divest_effects': 'effects_of_retirement',
+ 'piecewise_effects': 'piecewise_effects_of_investment',
+ # Flow/OnOffParameters
+ 'flow_hours_total_max': 'flow_hours_max',
+ 'flow_hours_total_min': 'flow_hours_min',
+ 'on_hours_total_max': 'on_hours_max',
+ 'on_hours_total_min': 'on_hours_min',
+ 'switch_on_total_max': 'switch_on_max',
+ # Bus
+ 'excess_penalty_per_flow_hour': 'imbalance_penalty_per_flow_hour',
+ # Component parameters (Source/Sink)
+ 'source': 'outputs',
+ 'sink': 'inputs',
+ 'prevent_simultaneous_sink_and_source': 'prevent_simultaneous_flow_rates',
+ # LinearConverter flow/efficiency parameters (pre-v4 files)
+ # These are needed for very old files that use short flow names
+ 'Q_fu': 'fuel_flow',
+ 'P_el': 'electrical_flow',
+ 'Q_th': 'thermal_flow',
+ 'Q_ab': 'heat_source_flow',
+ 'eta': 'thermal_efficiency',
+ 'eta_th': 'thermal_efficiency',
+ 'eta_el': 'electrical_efficiency',
+ 'COP': 'cop',
+ # Storage
+ # Note: 'lastValueOfSim' → 'equals_final' is a value change, not a key change
+ # Class renames (v4.2.0)
+ 'FullCalculation': 'Optimization',
+ 'AggregatedCalculation': 'ClusteredOptimization',
+ 'SegmentedCalculation': 'SegmentedOptimization',
+ 'CalculationResults': 'Results',
+ 'SegmentedCalculationResults': 'SegmentedResults',
+ 'Aggregation': 'Clustering',
+ 'AggregationParameters': 'ClusteringParameters',
+ 'AggregationModel': 'ClusteringModel',
+ # OnOffParameters → StatusParameters (class and attribute names)
+ 'OnOffParameters': 'StatusParameters',
+ 'on_off_parameters': 'status_parameters',
+ # StatusParameters attribute renames (applies to both Flow-level and Component-level)
+ 'effects_per_switch_on': 'effects_per_startup',
+ 'effects_per_running_hour': 'effects_per_active_hour',
+ 'consecutive_on_hours_min': 'min_uptime',
+ 'consecutive_on_hours_max': 'max_uptime',
+ 'consecutive_off_hours_min': 'min_downtime',
+ 'consecutive_off_hours_max': 'max_downtime',
+ 'force_switch_on': 'force_startup_tracking',
+ 'on_hours_min': 'active_hours_min',
+ 'on_hours_max': 'active_hours_max',
+ 'switch_on_max': 'startup_limit',
+ # TimeSeriesData
+ 'agg_group': 'aggregation_group',
+ 'agg_weight': 'aggregation_weight',
+}
+
+# Value renames (for specific parameter values that changed)
+VALUE_RENAMES = {
+ 'initial_charge_state': {'lastValueOfSim': 'equals_final'},
+}
+
+
+# Keys that should NOT have their child keys renamed (they reference flow labels)
+_FLOW_LABEL_REFERENCE_KEYS = {'piecewises', 'conversion_factors'}
+
+# Keys that ARE flow parameters on components (should be renamed)
+_FLOW_PARAMETER_KEYS = {'Q_fu', 'P_el', 'Q_th', 'Q_ab', 'eta', 'eta_th', 'eta_el', 'COP'}
+
+
+def _rename_keys_recursive(
+ obj: Any,
+ key_renames: dict[str, str],
+ value_renames: dict[str, dict],
+ skip_flow_renames: bool = False,
+) -> Any:
+ """Recursively rename keys and values in nested data structures.
+
+ Args:
+ obj: The object to process (dict, list, or scalar)
+ key_renames: Mapping of old key names to new key names
+ value_renames: Mapping of key names to {old_value: new_value} dicts
+ skip_flow_renames: If True, skip renaming flow parameter keys (for inside piecewises)
+
+ Returns:
+ The processed object with renamed keys and values
+ """
+ if isinstance(obj, dict):
+ new_dict = {}
+ for key, value in obj.items():
+ # Determine if we should skip flow renames for children
+ child_skip_flow_renames = skip_flow_renames or key in _FLOW_LABEL_REFERENCE_KEYS
+
+ # Rename the key if needed (skip flow params if in reference context)
+ if skip_flow_renames and key in _FLOW_PARAMETER_KEYS:
+ new_key = key # Don't rename flow labels inside piecewises etc.
+ else:
+ new_key = key_renames.get(key, key)
+
+ # Process the value recursively
+ new_value = _rename_keys_recursive(value, key_renames, value_renames, child_skip_flow_renames)
+
+ # Check if this key has value renames
+ if key in value_renames and isinstance(new_value, str):
+ new_value = value_renames[key].get(new_value, new_value)
+
+ # Handle __class__ values - rename class names
+ if key == '__class__' and isinstance(new_value, str):
+ new_value = key_renames.get(new_value, new_value)
+
+ new_dict[new_key] = new_value
+ return new_dict
+
+ elif isinstance(obj, list):
+ return [_rename_keys_recursive(item, key_renames, value_renames, skip_flow_renames) for item in obj]
+
+ else:
+ return obj
+
+
+def convert_old_dataset(
+ ds: xr.Dataset,
+ key_renames: dict[str, str] | None = None,
+ value_renames: dict[str, dict] | None = None,
+) -> xr.Dataset:
+ """Convert an old FlowSystem dataset to use new parameter names.
+
+ This function updates the reference structure in a dataset's attrs to use
+ the current parameter naming conventions. This is useful for loading
+ FlowSystem files saved with older versions of flixopt.
+
+ Args:
+ ds: The dataset to convert (will be modified in place)
+ key_renames: Custom key renames to apply. If None, uses PARAMETER_RENAMES.
+ value_renames: Custom value renames to apply. If None, uses VALUE_RENAMES.
+
+ Returns:
+ The converted dataset (same object, modified in place)
+
+ Examples:
+ Convert an old netCDF file to new format:
+
+ ```python
+ from flixopt import io
+
+ # Load old file
+ ds = io.load_dataset_from_netcdf('old_flow_system.nc4')
+
+ # Convert parameter names
+ ds = io.convert_old_dataset(ds)
+
+ # Now load as FlowSystem
+ from flixopt import FlowSystem
+
+ fs = FlowSystem.from_dataset(ds)
+ ```
+ """
+ if key_renames is None:
+ key_renames = PARAMETER_RENAMES
+ if value_renames is None:
+ value_renames = VALUE_RENAMES
+
+ # Convert the attrs (reference_structure)
+ ds.attrs = _rename_keys_recursive(ds.attrs, key_renames, value_renames)
+
+ return ds
+
+
+def convert_old_netcdf(
+ input_path: str | pathlib.Path,
+ output_path: str | pathlib.Path | None = None,
+ compression: int = 0,
+) -> xr.Dataset:
+ """Load an old FlowSystem netCDF file and convert to new parameter names.
+
+ This is a convenience function that combines loading, conversion, and
+ optionally saving the converted dataset.
+
+ Args:
+ input_path: Path to the old netCDF file
+ output_path: If provided, save the converted dataset to this path.
+ If None, only returns the converted dataset without saving.
+ compression: Compression level (0-9) for saving. Only used if output_path is provided.
+
+ Returns:
+ The converted dataset
+
+ Examples:
+ Convert and save to new file:
+
+ ```python
+ from flixopt import io
+
+ # Convert old file to new format
+ ds = io.convert_old_netcdf('old_system.nc4', 'new_system.nc')
+ ```
+
+ Convert and load as FlowSystem:
+
+ ```python
+ from flixopt import FlowSystem, io
+
+ ds = io.convert_old_netcdf('old_system.nc4')
+ fs = FlowSystem.from_dataset(ds)
+ ```
+ """
+ # Load and convert
+ ds = load_dataset_from_netcdf(input_path)
+ ds = convert_old_dataset(ds)
+
+ # Optionally save
+ if output_path is not None:
+ save_dataset_to_netcdf(ds, output_path, compression=compression)
+ logger.info(f'Converted {input_path} -> {output_path}')
+
+ return ds
+
+
@dataclass
class ResultsPaths:
"""Container for all paths related to saving Results."""
@@ -801,7 +1031,7 @@ def build_repr_from_init(
excluded_params: Set of parameter names to exclude (e.g., {'self', 'inputs', 'outputs'})
Default excludes 'self', 'label', and 'kwargs'
label_as_positional: If True and 'label' param exists, show it as first positional arg
- skip_default_size: If True, skip 'size' parameter when it equals CONFIG.Modeling.big
+ skip_default_size: Deprecated. Previously skipped size=CONFIG.Modeling.big, now size=None is default.
Returns:
Formatted repr string like: ClassName("label", param=value)
diff --git a/flixopt/optimization.py b/flixopt/optimization.py
index 529975df7..48a9f5e19 100644
--- a/flixopt/optimization.py
+++ b/flixopt/optimization.py
@@ -15,6 +15,7 @@
import pathlib
import sys
import timeit
+import warnings
from collections import Counter
from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
@@ -24,7 +25,7 @@
from . import io as fx_io
from .clustering import Clustering, ClusteringModel, ClusteringParameters
from .components import Storage
-from .config import CONFIG, SUCCESS_LEVEL
+from .config import CONFIG, DEPRECATION_REMOVAL_VERSION, SUCCESS_LEVEL
from .core import DataConverter, TimeSeriesData, drop_constant_arrays
from .effects import PENALTY_EFFECT_LABEL
from .features import InvestmentModel
@@ -170,6 +171,13 @@ def __init__(
folder: pathlib.Path | None = None,
normalize_weights: bool = True,
):
+ warnings.warn(
+ f'Optimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Use FlowSystem.optimize(solver) or FlowSystem.build_model() + FlowSystem.solve(solver) instead. '
+ 'Access results via FlowSystem.solution.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
_initialize_optimization_common(
self,
name=name,
@@ -260,6 +268,9 @@ def solve(
f'{" Main Results ":#^80}\n' + fx_io.format_yaml_string(self.main_results, compact_numeric_lists=True),
)
+ # Store solution on FlowSystem for direct Element access
+ self.flow_system.solution = self.model.solution
+
self.results = Results.from_optimization(self)
return self
@@ -380,11 +391,20 @@ def __init__(
folder: pathlib.Path | None = None,
normalize_weights: bool = True,
):
+ warnings.warn(
+ f'ClusteredOptimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Use FlowSystem.transform.cluster(params) followed by FlowSystem.optimize(solver) instead. '
+ 'Example: clustered_fs = flow_system.transform.cluster(params); clustered_fs.optimize(solver)',
+ DeprecationWarning,
+ stacklevel=2,
+ )
if flow_system.scenarios is not None:
raise ValueError('Clustering is not supported for scenarios yet. Please use Optimization instead.')
if flow_system.periods is not None:
raise ValueError('Clustering is not supported for periods yet. Please use Optimization instead.')
- super().__init__(
+ # Skip parent deprecation warning by calling common init directly
+ _initialize_optimization_common(
+ self,
name=name,
flow_system=flow_system,
folder=folder,
@@ -449,7 +469,8 @@ def _perform_clustering(self):
)
self.clustering.cluster()
- self.clustering.plot(show=CONFIG.Plotting.default_show, save=self.folder / 'clustering.html')
+ result = self.clustering.plot(show=CONFIG.Plotting.default_show)
+ result.to_html(self.folder / 'clustering.html')
if self.clustering_parameters.aggregate_data_and_fix_non_binary_vars:
ds = self.flow_system.to_dataset()
for name, series in self.clustering.aggregated_data.items():
@@ -618,6 +639,12 @@ def __init__(
nr_of_previous_values: int = 1,
folder: pathlib.Path | None = None,
):
+ warnings.warn(
+ f'SegmentedOptimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'A replacement API for segmented optimization will be provided in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
_initialize_optimization_common(
self,
name=name,
diff --git a/flixopt/optimize_accessor.py b/flixopt/optimize_accessor.py
new file mode 100644
index 000000000..5428cd855
--- /dev/null
+++ b/flixopt/optimize_accessor.py
@@ -0,0 +1,91 @@
+"""
+Optimization accessor for FlowSystem.
+
+This module provides the OptimizeAccessor class that enables the
+`flow_system.optimize(...)` pattern with extensible optimization methods.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from .flow_system import FlowSystem
+ from .solvers import _Solver
+
+
+class OptimizeAccessor:
+ """
+ Accessor for optimization methods on FlowSystem.
+
+ This class provides the optimization API for FlowSystem, accessible via
+ `flow_system.optimize`. It supports both direct calling (standard optimization)
+ and method access for specialized optimization modes.
+
+ Examples:
+ Standard optimization (via __call__):
+
+ >>> flow_system.optimize(solver)
+ >>> print(flow_system.solution)
+
+ Future specialized modes:
+
+ >>> flow_system.optimize.clustered(solver, aggregation=params)
+ >>> flow_system.optimize.mga(solver, alternatives=5)
+ """
+
+ def __init__(self, flow_system: FlowSystem) -> None:
+ """
+ Initialize the accessor with a reference to the FlowSystem.
+
+ Args:
+ flow_system: The FlowSystem to optimize.
+ """
+ self._fs = flow_system
+
+ def __call__(self, solver: _Solver, normalize_weights: bool = True) -> FlowSystem:
+ """
+ Build and solve the optimization model in one step.
+
+ This is a convenience method that combines `build_model()` and `solve()`.
+ Use this for simple optimization workflows. For more control (e.g., inspecting
+ the model before solving, or adding custom constraints), use `build_model()`
+ and `solve()` separately.
+
+ Args:
+ solver: The solver to use (e.g., HighsSolver, GurobiSolver).
+ normalize_weights: Whether to normalize scenario/period weights to sum to 1.
+
+ Returns:
+ The FlowSystem, for method chaining.
+
+ Examples:
+ Simple optimization:
+
+ >>> flow_system.optimize(HighsSolver())
+ >>> print(flow_system.solution['Boiler(Q_th)|flow_rate'])
+
+ Access element solutions directly:
+
+ >>> flow_system.optimize(solver)
+ >>> boiler = flow_system.components['Boiler']
+ >>> print(boiler.solution)
+
+ Method chaining:
+
+ >>> solution = flow_system.optimize(solver).solution
+ """
+ self._fs.build_model(normalize_weights)
+ self._fs.solve(solver)
+ return self._fs
+
+ # Future methods can be added here:
+ #
+ # def clustered(self, solver: _Solver, aggregation: AggregationParameters,
+ # normalize_weights: bool = True) -> FlowSystem:
+ # """Clustered optimization with time aggregation."""
+ # ...
+ #
+ # def mga(self, solver: _Solver, alternatives: int = 5) -> FlowSystem:
+ # """Modeling to Generate Alternatives."""
+ # ...
diff --git a/flixopt/plot_result.py b/flixopt/plot_result.py
new file mode 100644
index 000000000..683fbcf3e
--- /dev/null
+++ b/flixopt/plot_result.py
@@ -0,0 +1,143 @@
+"""Plot result container for unified plotting API.
+
+This module provides the PlotResult class that wraps plotting outputs
+across the entire flixopt package, ensuring a consistent interface.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from pathlib import Path
+
+ import plotly.graph_objects as go
+ import xarray as xr
+
+
+@dataclass
+class PlotResult:
+ """Container returned by all plot methods. Holds both data and figure.
+
+ This class provides a unified interface for all plotting methods across
+ the flixopt package, enabling consistent method chaining and export options.
+
+ Attributes:
+ data: Prepared xarray Dataset used for the plot.
+ figure: Plotly figure object.
+
+ Examples:
+ Basic usage with chaining:
+
+ >>> result = flow_system.statistics.plot.balance('Bus')
+ >>> result.show().to_html('plot.html')
+
+ Accessing underlying data:
+
+ >>> result = flow_system.statistics.plot.flows()
+ >>> df = result.data.to_dataframe()
+ >>> result.to_csv('data.csv')
+
+ Customizing the figure:
+
+ >>> result = clustering.plot()
+ >>> result.update(title='My Custom Title').show()
+ """
+
+ data: xr.Dataset
+ figure: go.Figure
+
+ def _repr_html_(self) -> str:
+ """Return HTML representation for Jupyter notebook display."""
+ return self.figure.to_html(full_html=False, include_plotlyjs='cdn')
+
+ def show(self) -> PlotResult:
+ """Display the figure. Returns self for chaining."""
+ self.figure.show()
+ return self
+
+ def update(self, **layout_kwargs: Any) -> PlotResult:
+ """Update figure layout. Returns self for chaining.
+
+ Args:
+ **layout_kwargs: Arguments passed to plotly's update_layout().
+
+ Returns:
+ Self for method chaining.
+
+ Examples:
+ >>> result.update(title='New Title', height=600)
+ """
+ self.figure.update_layout(**layout_kwargs)
+ return self
+
+ def update_traces(self, **trace_kwargs: Any) -> PlotResult:
+ """Update figure traces. Returns self for chaining.
+
+ Args:
+ **trace_kwargs: Arguments passed to plotly's update_traces().
+
+ Returns:
+ Self for method chaining.
+
+ Examples:
+ >>> result.update_traces(line_width=2, marker_size=8)
+ """
+ self.figure.update_traces(**trace_kwargs)
+ return self
+
+ def to_html(self, path: str | Path) -> PlotResult:
+ """Save figure as interactive HTML. Returns self for chaining.
+
+ Args:
+ path: File path for the HTML output.
+
+ Returns:
+ Self for method chaining.
+ """
+ self.figure.write_html(str(path))
+ return self
+
+ def to_image(self, path: str | Path, **kwargs: Any) -> PlotResult:
+ """Save figure as static image. Returns self for chaining.
+
+ Args:
+ path: File path for the image (format inferred from extension).
+ **kwargs: Additional arguments passed to write_image().
+
+ Returns:
+ Self for method chaining.
+
+ Examples:
+ >>> result.to_image('plot.png', scale=2)
+ >>> result.to_image('plot.svg')
+ """
+ self.figure.write_image(str(path), **kwargs)
+ return self
+
+ def to_csv(self, path: str | Path, **kwargs: Any) -> PlotResult:
+ """Export the underlying data to CSV. Returns self for chaining.
+
+ Args:
+ path: File path for the CSV output.
+ **kwargs: Additional arguments passed to to_csv().
+
+ Returns:
+ Self for method chaining.
+ """
+ self.data.to_dataframe().to_csv(path, **kwargs)
+ return self
+
+ def to_netcdf(self, path: str | Path, **kwargs: Any) -> PlotResult:
+ """Export the underlying data to netCDF. Returns self for chaining.
+
+ Args:
+ path: File path for the netCDF output.
+ **kwargs: Additional arguments passed to to_netcdf().
+
+ Returns:
+ Self for method chaining.
+ """
+ self.data.to_netcdf(path, **kwargs)
+ return self
diff --git a/flixopt/plotting.py b/flixopt/plotting.py
index 0a8dfbc9b..db5a3eb5c 100644
--- a/flixopt/plotting.py
+++ b/flixopt/plotting.py
@@ -39,7 +39,7 @@
import plotly.offline
import xarray as xr
-from .color_processing import process_colors
+from .color_processing import ColorType, process_colors
from .config import CONFIG
if TYPE_CHECKING:
@@ -66,56 +66,6 @@
plt.register_cmap(name='portland', cmap=mcolors.LinearSegmentedColormap.from_list('portland', _portland_colors))
-ColorType = str | list[str] | dict[str, str]
-"""Flexible color specification type supporting multiple input formats for visualization.
-
-Color specifications can take several forms to accommodate different use cases:
-
-**Named colorscales** (str):
- - Standard colorscales: 'turbo', 'plasma', 'cividis', 'tab10', 'Set1'
- - Energy-focused: 'portland' (custom flixopt colorscale for energy systems)
- - Backend-specific maps available in Plotly and Matplotlib
-
-**Color Lists** (list[str]):
- - Explicit color sequences: ['red', 'blue', 'green', 'orange']
- - HEX codes: ['#FF0000', '#0000FF', '#00FF00', '#FFA500']
- - Mixed formats: ['red', '#0000FF', 'green', 'orange']
-
-**Label-to-Color Mapping** (dict[str, str]):
- - Explicit associations: {'Wind': 'skyblue', 'Solar': 'gold', 'Gas': 'brown'}
- - Ensures consistent colors across different plots and datasets
- - Ideal for energy system components with semantic meaning
-
-Examples:
- ```python
- # Named colorscale
- colors = 'turbo' # Automatic color generation
-
- # Explicit color list
- colors = ['red', 'blue', 'green', '#FFD700']
-
- # Component-specific mapping
- colors = {
- 'Wind_Turbine': 'skyblue',
- 'Solar_Panel': 'gold',
- 'Natural_Gas': 'brown',
- 'Battery': 'green',
- 'Electric_Load': 'darkred'
- }
- ```
-
-Color Format Support:
- - **Named Colors**: 'red', 'blue', 'forestgreen', 'darkorange'
- - **HEX Codes**: '#FF0000', '#0000FF', '#228B22', '#FF8C00'
- - **RGB Tuples**: (255, 0, 0), (0, 0, 255) [Matplotlib only]
- - **RGBA**: 'rgba(255,0,0,0.8)' [Plotly only]
-
-References:
- - HTML Color Names: https://htmlcolorcodes.com/color-names/
- - Matplotlib colorscales: https://matplotlib.org/stable/tutorials/colors/colorscales.html
- - Plotly Built-in Colorscales: https://plotly.com/python/builtin-colorscales/
-"""
-
PlottingEngine = Literal['plotly', 'matplotlib']
"""Identifier for the plotting engine to use."""
@@ -1192,6 +1142,57 @@ def draw_pie(ax, labels, values, subtitle):
return fig, axes
+def heatmap_with_plotly_v2(
+ data: xr.DataArray,
+ colors: ColorType | None = None,
+ title: str = '',
+ facet_col: str | None = None,
+ animation_frame: str | None = None,
+ facet_col_wrap: int | None = None,
+ **imshow_kwargs: Any,
+) -> go.Figure:
+ """
+ Plot a heatmap using Plotly's imshow.
+
+ Data should be prepared with dims in order: (y_axis, x_axis, [facet_col], [animation_frame]).
+ Use reshape_data_for_heatmap() to prepare time-series data before calling this.
+
+ Args:
+ data: DataArray with 2-4 dimensions. First two are heatmap axes.
+ colors: Colorscale name ('viridis', 'plasma', etc.).
+ title: Plot title.
+ facet_col: Dimension name for subplot columns (3rd dim).
+ animation_frame: Dimension name for animation (4th dim).
+ facet_col_wrap: Max columns before wrapping (only if < n_facets).
+ **imshow_kwargs: Additional args for px.imshow.
+
+ Returns:
+ Plotly Figure object.
+ """
+ if data.size == 0:
+ return go.Figure()
+
+ colors = colors or CONFIG.Plotting.default_sequential_colorscale
+ facet_col_wrap = facet_col_wrap or CONFIG.Plotting.default_facet_cols
+
+ imshow_args: dict[str, Any] = {
+ 'img': data,
+ 'color_continuous_scale': colors,
+ 'title': title,
+ **imshow_kwargs,
+ }
+
+ if facet_col and facet_col in data.dims:
+ imshow_args['facet_col'] = facet_col
+ if facet_col_wrap < data.sizes[facet_col]:
+ imshow_args['facet_col_wrap'] = facet_col_wrap
+
+ if animation_frame and animation_frame in data.dims:
+ imshow_args['animation_frame'] = animation_frame
+
+ return px.imshow(**imshow_args)
+
+
def heatmap_with_plotly(
data: xr.DataArray,
colors: ColorType | None = None,
diff --git a/flixopt/results.py b/flixopt/results.py
index f3d0c19a9..16d88743a 100644
--- a/flixopt/results.py
+++ b/flixopt/results.py
@@ -2,6 +2,7 @@
import copy
import datetime
+import json
import logging
import pathlib
import warnings
@@ -15,7 +16,7 @@
from . import io as fx_io
from . import plotting
from .color_processing import process_colors
-from .config import CONFIG, SUCCESS_LEVEL
+from .config import CONFIG, DEPRECATION_REMOVAL_VERSION, SUCCESS_LEVEL
from .flow_system import FlowSystem
from .structure import CompositeContainerMixin, ResultsContainer
@@ -47,6 +48,18 @@ def load_mapping_from_file(path: pathlib.Path) -> dict[str, str | list[str]]:
return fx_io.load_config_file(path)
+def _get_solution_attr(solution: xr.Dataset, key: str) -> dict:
+ """Get an attribute from solution, decoding JSON if necessary.
+
+ Solution attrs are stored as JSON strings for netCDF compatibility.
+ This helper handles both JSON strings and dicts (for backward compatibility).
+ """
+ value = solution.attrs.get(key, {})
+ if isinstance(value, str):
+ return json.loads(value)
+ return value
+
+
class _FlowSystemRestorationError(Exception):
"""Exception raised when a FlowSystem cannot be restored from dataset."""
@@ -222,6 +235,14 @@ def __init__(
folder: Results storage folder.
model: Linopy optimization model.
"""
+ warnings.warn(
+ f'Results is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Access results directly via FlowSystem.solution after optimization, or use the '
+ '.plot accessor on FlowSystem and its components (e.g., flow_system.plot.heatmap(...)). '
+ 'To load old result files, use FlowSystem.from_old_results(folder, name).',
+ DeprecationWarning,
+ stacklevel=2,
+ )
self.solution = solution
self.flow_system_data = flow_system_data
@@ -232,19 +253,25 @@ def __init__(
# Create ResultsContainers for better access patterns
components_dict = {
- label: ComponentResults(self, **infos) for label, infos in self.solution.attrs['Components'].items()
+ label: ComponentResults(self, **infos)
+ for label, infos in _get_solution_attr(self.solution, 'Components').items()
}
self.components = ResultsContainer(
elements=components_dict, element_type_name='component results', truncate_repr=10
)
- buses_dict = {label: BusResults(self, **infos) for label, infos in self.solution.attrs['Buses'].items()}
+ buses_dict = {
+ label: BusResults(self, **infos) for label, infos in _get_solution_attr(self.solution, 'Buses').items()
+ }
self.buses = ResultsContainer(elements=buses_dict, element_type_name='bus results', truncate_repr=10)
- effects_dict = {label: EffectResults(self, **infos) for label, infos in self.solution.attrs['Effects'].items()}
+ effects_dict = {
+ label: EffectResults(self, **infos) for label, infos in _get_solution_attr(self.solution, 'Effects').items()
+ }
self.effects = ResultsContainer(elements=effects_dict, element_type_name='effect results', truncate_repr=10)
- if 'Flows' not in self.solution.attrs:
+ flows_attr = _get_solution_attr(self.solution, 'Flows')
+ if not flows_attr:
warnings.warn(
'No Data about flows found in the results. This data is only included since v2.2.0. Some functionality '
'is not availlable. We recommend to evaluate your results with a version <2.2.0.',
@@ -253,9 +280,7 @@ def __init__(
flows_dict = {}
self._has_flow_data = False
else:
- flows_dict = {
- label: FlowResults(self, **infos) for label, infos in self.solution.attrs.get('Flows', {}).items()
- }
+ flows_dict = {label: FlowResults(self, **infos) for label, infos in flows_attr.items()}
self._has_flow_data = True
self.flows = ResultsContainer(elements=flows_dict, element_type_name='flow results', truncate_repr=10)
@@ -387,7 +412,7 @@ def setup_colors(
def get_all_variable_names(comp: str) -> list[str]:
"""Collect all variables from the component, including flows and flow_hours."""
comp_object = self.components[comp]
- var_names = [comp] + list(comp_object._variable_names)
+ var_names = [comp] + list(comp_object.variable_names)
for flow in comp_object.flows:
var_names.extend([flow, f'{flow}|flow_hours'])
return var_names
@@ -542,21 +567,40 @@ def flow_rates(
) -> xr.DataArray:
"""Returns a DataArray containing the flow rates of each Flow.
- Args:
- start: Optional source node(s) to filter by. Can be a single node name or a list of names.
- end: Optional destination node(s) to filter by. Can be a single node name or a list of names.
- component: Optional component(s) to filter by. Can be a single component name or a list of names.
+ .. deprecated::
+ Use `results.plot.all_flow_rates` (Dataset) or
+ `results.flows['FlowLabel'].flow_rate` (DataArray) instead.
- Further usage:
- Convert the dataarray to a dataframe:
- >>>results.flow_rates().to_pandas()
- Get the max or min over time:
- >>>results.flow_rates().max('time')
- Sum up the flow rates of flows with the same start and end:
- >>>results.flow_rates(end='Fernwärme').groupby('start').sum(dim='flow')
- To recombine filtered dataarrays, use `xr.concat` with dim 'flow':
- >>>xr.concat([results.flow_rates(start='Fernwärme'), results.flow_rates(end='Fernwärme')], dim='flow')
+ **Note**: The new API differs from this method:
+
+ - Returns ``xr.Dataset`` (not ``DataArray``) with flow labels as variable names
+ - No ``'flow'`` dimension - each flow is a separate variable
+ - No filtering parameters - filter using these alternatives::
+
+ # Select specific flows by label
+ ds = results.plot.all_flow_rates
+ ds[['Boiler(Q_th)', 'CHP(Q_th)']]
+
+ # Filter by substring in label
+ ds[[v for v in ds.data_vars if 'Boiler' in v]]
+
+ # Filter by bus (start/end) - get flows connected to a bus
+ results['Fernwärme'].inputs # list of input flow labels
+ results['Fernwärme'].outputs # list of output flow labels
+ ds[results['Fernwärme'].inputs] # Dataset with only inputs to bus
+
+ # Filter by component - get flows of a component
+ results['Boiler'].inputs # list of input flow labels
+ results['Boiler'].outputs # list of output flow labels
"""
+ warnings.warn(
+ 'results.flow_rates() is deprecated. '
+ 'Use results.plot.all_flow_rates instead (returns Dataset, not DataArray). '
+ 'Note: The new API has no filtering parameters and uses flow labels as variable names. '
+ f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
if not self._has_flow_data:
raise ValueError('Flow data is not available in this results object (pre-v2.2.0).')
if self._flow_rates is None:
@@ -577,6 +621,32 @@ def flow_hours(
) -> xr.DataArray:
"""Returns a DataArray containing the flow hours of each Flow.
+ .. deprecated::
+ Use `results.plot.all_flow_hours` (Dataset) or
+ `results.flows['FlowLabel'].flow_rate * results.hours_per_timestep` instead.
+
+ **Note**: The new API differs from this method:
+
+ - Returns ``xr.Dataset`` (not ``DataArray``) with flow labels as variable names
+ - No ``'flow'`` dimension - each flow is a separate variable
+ - No filtering parameters - filter using these alternatives::
+
+ # Select specific flows by label
+ ds = results.plot.all_flow_hours
+ ds[['Boiler(Q_th)', 'CHP(Q_th)']]
+
+ # Filter by substring in label
+ ds[[v for v in ds.data_vars if 'Boiler' in v]]
+
+ # Filter by bus (start/end) - get flows connected to a bus
+ results['Fernwärme'].inputs # list of input flow labels
+ results['Fernwärme'].outputs # list of output flow labels
+ ds[results['Fernwärme'].inputs] # Dataset with only inputs to bus
+
+ # Filter by component - get flows of a component
+ results['Boiler'].inputs # list of input flow labels
+ results['Boiler'].outputs # list of output flow labels
+
Flow hours represent the total energy/material transferred over time,
calculated by multiplying flow rates by the duration of each timestep.
@@ -596,6 +666,14 @@ def flow_hours(
>>>xr.concat([results.flow_hours(start='Fernwärme'), results.flow_hours(end='Fernwärme')], dim='flow')
"""
+ warnings.warn(
+ 'results.flow_hours() is deprecated. '
+ 'Use results.plot.all_flow_hours instead (returns Dataset, not DataArray). '
+ 'Note: The new API has no filtering parameters and uses flow labels as variable names. '
+ f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
if self._flow_hours is None:
self._flow_hours = (self.flow_rates() * self.hours_per_timestep).rename('flow_hours')
filters = {k: v for k, v in {'start': start, 'end': end, 'component': component}.items() if v is not None}
@@ -608,18 +686,41 @@ def sizes(
component: str | list[str] | None = None,
) -> xr.DataArray:
"""Returns a dataset with the sizes of the Flows.
- Args:
- start: Optional source node(s) to filter by. Can be a single node name or a list of names.
- end: Optional destination node(s) to filter by. Can be a single node name or a list of names.
- component: Optional component(s) to filter by. Can be a single component name or a list of names.
- Further usage:
- Convert the dataarray to a dataframe:
- >>>results.sizes().to_pandas()
- To recombine filtered dataarrays, use `xr.concat` with dim 'flow':
- >>>xr.concat([results.sizes(start='Fernwärme'), results.sizes(end='Fernwärme')], dim='flow')
+ .. deprecated::
+ Use `results.plot.all_sizes` (Dataset) or
+ `results.flows['FlowLabel'].size` (DataArray) instead.
+
+ **Note**: The new API differs from this method:
+
+ - Returns ``xr.Dataset`` (not ``DataArray``) with flow labels as variable names
+ - No ``'flow'`` dimension - each flow is a separate variable
+ - No filtering parameters - filter using these alternatives::
+
+ # Select specific flows by label
+ ds = results.plot.all_sizes
+ ds[['Boiler(Q_th)', 'CHP(Q_th)']]
+
+ # Filter by substring in label
+ ds[[v for v in ds.data_vars if 'Boiler' in v]]
+ # Filter by bus (start/end) - get flows connected to a bus
+ results['Fernwärme'].inputs # list of input flow labels
+ results['Fernwärme'].outputs # list of output flow labels
+ ds[results['Fernwärme'].inputs] # Dataset with only inputs to bus
+
+ # Filter by component - get flows of a component
+ results['Boiler'].inputs # list of input flow labels
+ results['Boiler'].outputs # list of output flow labels
"""
+ warnings.warn(
+ 'results.sizes() is deprecated. '
+ 'Use results.plot.all_sizes instead (returns Dataset, not DataArray). '
+ 'Note: The new API has no filtering parameters and uses flow labels as variable names. '
+ f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
if not self._has_flow_data:
raise ValueError('Flow data is not available in this results object (pre-v2.2.0).')
if self._sizes is None:
@@ -1028,6 +1129,61 @@ def plot_network(
path = self.folder / f'{self.name}--network.html'
return self.flow_system.plot_network(controls=controls, path=path, show=show)
+ def to_flow_system(self) -> FlowSystem:
+ """Convert Results to a FlowSystem with solution attached.
+
+ This method migrates results from the deprecated Results format to the
+ new FlowSystem-based format, enabling use of the modern API.
+
+ Note:
+ For loading old results files directly, consider using
+ ``FlowSystem.from_old_results(folder, name)`` instead.
+
+ Returns:
+ FlowSystem: A FlowSystem instance with the solution data attached.
+
+ Caveats:
+ - The linopy model is NOT attached (only the solution data)
+ - Element submodels are NOT recreated (no re-optimization without
+ calling build_model() first)
+ - Variable/constraint names on elements are NOT restored
+
+ Examples:
+ Convert loaded Results to FlowSystem:
+
+ ```python
+ # Load old results
+ results = Results.from_file('results', 'my_optimization')
+
+ # Convert to FlowSystem
+ flow_system = results.to_flow_system()
+
+ # Use new API
+ flow_system.plot.heatmap()
+ flow_system.solution.to_netcdf('solution.nc')
+
+ # Save in new single-file format
+ flow_system.to_netcdf('my_optimization.nc')
+ ```
+ """
+ from flixopt.io import convert_old_dataset
+
+ # Convert flow_system_data to new parameter names
+ convert_old_dataset(self.flow_system_data)
+
+ # Reconstruct FlowSystem from stored data
+ flow_system = FlowSystem.from_dataset(self.flow_system_data)
+
+ # Convert solution attrs from dicts to JSON strings for consistency with new format
+ # The _get_solution_attr helper handles both formats, but we normalize here
+ solution = self.solution.copy()
+ for key in ['Components', 'Buses', 'Effects', 'Flows']:
+ if key in solution.attrs and isinstance(solution.attrs[key], dict):
+ solution.attrs[key] = json.dumps(solution.attrs[key])
+
+ flow_system.solution = solution
+ return flow_system
+
def to_file(
self,
folder: str | pathlib.Path | None = None,
@@ -1095,10 +1251,10 @@ class _ElementResults:
def __init__(self, results: Results, label: str, variables: list[str], constraints: list[str]):
self._results = results
self.label = label
- self._variable_names = variables
+ self.variable_names = variables
self._constraint_names = constraints
- self.solution = self._results.solution[self._variable_names]
+ self.solution = self._results.solution[self.variable_names]
@property
def variables(self) -> linopy.Variables:
@@ -1109,7 +1265,7 @@ def variables(self) -> linopy.Variables:
"""
if self._results.model is None:
raise ValueError('The linopy model is not available.')
- return self._results.model.variables[self._variable_names]
+ return self._results.model.variables[self.variable_names]
@property
def constraints(self) -> linopy.Constraints:
@@ -1574,7 +1730,7 @@ class ComponentResults(_NodeResults):
@property
def is_storage(self) -> bool:
- return self._charge_state in self._variable_names
+ return self._charge_state in self.variable_names
@property
def _charge_state(self) -> str:
@@ -1835,7 +1991,7 @@ def get_shares_from(self, element: str) -> xr.Dataset:
Returns:
xr.Dataset: Element shares to this effect.
"""
- return self.solution[[name for name in self._variable_names if name.startswith(f'{element}->')]]
+ return self.solution[[name for name in self.variable_names if name.startswith(f'{element}->')]]
class FlowResults(_ElementResults):
@@ -2033,6 +2189,12 @@ def __init__(
name: str,
folder: pathlib.Path | None = None,
):
+ warnings.warn(
+ f'SegmentedResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'A replacement API for segmented optimization will be provided in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
self.segment_results = segment_results
self.all_timesteps = all_timesteps
self.timesteps_per_segment = timesteps_per_segment
diff --git a/flixopt/statistics_accessor.py b/flixopt/statistics_accessor.py
new file mode 100644
index 000000000..020435c5c
--- /dev/null
+++ b/flixopt/statistics_accessor.py
@@ -0,0 +1,2282 @@
+"""Statistics accessor for FlowSystem.
+
+This module provides a user-friendly API for analyzing optimization results
+directly from a FlowSystem.
+
+Structure:
+ - `.statistics` - Data/metrics access (cached xarray Datasets)
+ - `.statistics.plot` - Plotting methods using the statistics data
+
+Example:
+ >>> flow_system.optimize(solver)
+ >>> # Data access
+ >>> flow_system.statistics.flow_rates
+ >>> flow_system.statistics.flow_hours
+ >>> # Plotting
+ >>> flow_system.statistics.plot.balance('ElectricityBus')
+ >>> flow_system.statistics.plot.heatmap('Boiler|on')
+"""
+
+from __future__ import annotations
+
+import logging
+import re
+from typing import TYPE_CHECKING, Any, Literal
+
+import numpy as np
+import pandas as pd
+import plotly.express as px
+import plotly.graph_objects as go
+import xarray as xr
+
+from .color_processing import ColorType, hex_to_rgba, process_colors
+from .config import CONFIG
+from .plot_result import PlotResult
+
+if TYPE_CHECKING:
+ from .flow_system import FlowSystem
+
+logger = logging.getLogger('flixopt')
+
+# Type aliases
+SelectType = dict[str, Any]
+"""xarray-style selection dict: {'time': slice(...), 'scenario': 'base'}"""
+
+FilterType = str | list[str]
+"""For include/exclude filtering: 'Boiler' or ['Boiler', 'CHP']"""
+
+
+# Sankey select types with Literal keys for IDE autocomplete
+FlowSankeySelect = dict[Literal['flow', 'bus', 'component', 'carrier', 'time', 'period', 'scenario'], Any]
+"""Select options for flow-based sankey: flow, bus, component, carrier, time, period, scenario."""
+
+EffectsSankeySelect = dict[Literal['effect', 'component', 'contributor', 'period', 'scenario'], Any]
+"""Select options for effects sankey: effect, component, contributor, period, scenario."""
+
+
+def _reshape_time_for_heatmap(
+ data: xr.DataArray,
+ reshape: tuple[str, str],
+ fill: Literal['ffill', 'bfill'] | None = 'ffill',
+) -> xr.DataArray:
+ """Reshape time dimension into 2D (timeframe × timestep) for heatmap display.
+
+ Args:
+ data: DataArray with 'time' dimension.
+ reshape: Tuple of (outer_freq, inner_freq), e.g. ('D', 'h') for days × hours.
+ fill: Method to fill missing values after resampling.
+
+ Returns:
+ DataArray with 'time' replaced by 'timestep' and 'timeframe' dimensions.
+ """
+ if 'time' not in data.dims:
+ return data
+
+ timeframes, timesteps_per_frame = reshape
+
+ # Define formats for different combinations
+ formats = {
+ ('YS', 'W'): ('%Y', '%W'),
+ ('YS', 'D'): ('%Y', '%j'),
+ ('YS', 'h'): ('%Y', '%j %H:00'),
+ ('MS', 'D'): ('%Y-%m', '%d'),
+ ('MS', 'h'): ('%Y-%m', '%d %H:00'),
+ ('W', 'D'): ('%Y-w%W', '%w_%A'),
+ ('W', 'h'): ('%Y-w%W', '%w_%A %H:00'),
+ ('D', 'h'): ('%Y-%m-%d', '%H:00'),
+ ('D', '15min'): ('%Y-%m-%d', '%H:%M'),
+ ('h', '15min'): ('%Y-%m-%d %H:00', '%M'),
+ ('h', 'min'): ('%Y-%m-%d %H:00', '%M'),
+ }
+
+ format_pair = (timeframes, timesteps_per_frame)
+ if format_pair not in formats:
+ raise ValueError(f'{format_pair} is not a valid format. Choose from {list(formats.keys())}')
+ period_format, step_format = formats[format_pair]
+
+ # Resample along time dimension
+ resampled = data.resample(time=timesteps_per_frame).mean()
+
+ # Apply fill if specified
+ if fill == 'ffill':
+ resampled = resampled.ffill(dim='time')
+ elif fill == 'bfill':
+ resampled = resampled.bfill(dim='time')
+
+ # Create period and step labels
+ time_values = pd.to_datetime(resampled.coords['time'].values)
+ period_labels = time_values.strftime(period_format)
+ step_labels = time_values.strftime(step_format)
+
+ # Handle special case for weekly day format
+ if '%w_%A' in step_format:
+ step_labels = pd.Series(step_labels).replace('0_Sunday', '7_Sunday').values
+
+ # Add period and step as coordinates
+ resampled = resampled.assign_coords({'timeframe': ('time', period_labels), 'timestep': ('time', step_labels)})
+
+ # Convert to multi-index and unstack
+ resampled = resampled.set_index(time=['timeframe', 'timestep'])
+ result = resampled.unstack('time')
+
+ # Reorder: timestep, timeframe, then other dimensions
+ other_dims = [d for d in result.dims if d not in ['timestep', 'timeframe']]
+ return result.transpose('timestep', 'timeframe', *other_dims)
+
+
+def _heatmap_figure(
+ data: xr.DataArray,
+ colors: str | list[str] | None = None,
+ title: str = '',
+ facet_col: str | None = None,
+ animation_frame: str | None = None,
+ facet_col_wrap: int | None = None,
+ **imshow_kwargs: Any,
+) -> go.Figure:
+ """Create heatmap figure using px.imshow.
+
+ Args:
+ data: DataArray with 2-4 dimensions. First two are heatmap axes.
+ colors: Colorscale name (str) or list of colors. Dicts are not supported
+ for heatmaps as color_continuous_scale requires a colorscale specification.
+ title: Plot title.
+ facet_col: Dimension for subplot columns.
+ animation_frame: Dimension for animation slider.
+ facet_col_wrap: Max columns before wrapping.
+ **imshow_kwargs: Additional args for px.imshow.
+
+ Returns:
+ Plotly Figure.
+ """
+ if data.size == 0:
+ return go.Figure()
+
+ colors = colors or CONFIG.Plotting.default_sequential_colorscale
+ facet_col_wrap = facet_col_wrap or CONFIG.Plotting.default_facet_cols
+
+ imshow_args: dict[str, Any] = {
+ 'img': data,
+ 'color_continuous_scale': colors,
+ 'title': title,
+ **imshow_kwargs,
+ }
+
+ if facet_col and facet_col in data.dims:
+ imshow_args['facet_col'] = facet_col
+ if facet_col_wrap < data.sizes[facet_col]:
+ imshow_args['facet_col_wrap'] = facet_col_wrap
+
+ if animation_frame and animation_frame in data.dims:
+ imshow_args['animation_frame'] = animation_frame
+
+ return px.imshow(**imshow_args)
+
+
+# --- Helper functions ---
+
+
+def _filter_by_pattern(
+ names: list[str],
+ include: FilterType | None,
+ exclude: FilterType | None,
+) -> list[str]:
+ """Filter names using substring matching."""
+ result = names.copy()
+ if include is not None:
+ patterns = [include] if isinstance(include, str) else include
+ result = [n for n in result if any(p in n for p in patterns)]
+ if exclude is not None:
+ patterns = [exclude] if isinstance(exclude, str) else exclude
+ result = [n for n in result if not any(p in n for p in patterns)]
+ return result
+
+
+def _apply_selection(ds: xr.Dataset, select: SelectType | None, drop: bool = True) -> xr.Dataset:
+ """Apply xarray-style selection to dataset.
+
+ Args:
+ ds: Dataset to select from.
+ select: xarray-style selection dict.
+ drop: If True (default), drop dimensions that become scalar after selection.
+ This prevents auto-faceting when selecting a single value.
+ """
+ if select is None:
+ return ds
+ valid_select = {k: v for k, v in select.items() if k in ds.dims or k in ds.coords}
+ if valid_select:
+ ds = ds.sel(valid_select, drop=drop)
+ return ds
+
+
+def _filter_by_carrier(ds: xr.Dataset, carrier: str | list[str] | None) -> xr.Dataset:
+ """Filter dataset variables by carrier attribute.
+
+ Args:
+ ds: Dataset with variables that have 'carrier' attributes.
+ carrier: Carrier name(s) to keep. None means no filtering.
+
+ Returns:
+ Dataset containing only variables matching the carrier(s).
+ """
+ if carrier is None:
+ return ds
+
+ carriers = [carrier] if isinstance(carrier, str) else carrier
+ carriers = [c.lower() for c in carriers]
+
+ matching_vars = [var for var in ds.data_vars if ds[var].attrs.get('carrier', '').lower() in carriers]
+ return ds[matching_vars] if matching_vars else xr.Dataset()
+
+
+def _resolve_facets(
+ ds: xr.Dataset,
+ facet_col: str | None,
+ facet_row: str | None,
+) -> tuple[str | None, str | None]:
+ """Resolve facet dimensions, returning None if not present in data."""
+ actual_facet_col = facet_col if facet_col and facet_col in ds.dims else None
+ actual_facet_row = facet_row if facet_row and facet_row in ds.dims else None
+ return actual_facet_col, actual_facet_row
+
+
+def _dataset_to_long_df(ds: xr.Dataset, value_name: str = 'value', var_name: str = 'variable') -> pd.DataFrame:
+ """Convert xarray Dataset to long-form DataFrame for plotly express."""
+ if not ds.data_vars:
+ return pd.DataFrame()
+ if all(ds[var].ndim == 0 for var in ds.data_vars):
+ rows = [{var_name: var, value_name: float(ds[var].values)} for var in ds.data_vars]
+ return pd.DataFrame(rows)
+ df = ds.to_dataframe().reset_index()
+ # Only use coordinates that are actually present as columns after reset_index
+ coord_cols = [c for c in ds.coords.keys() if c in df.columns]
+ return df.melt(id_vars=coord_cols, var_name=var_name, value_name=value_name)
+
+
+def _create_stacked_bar(
+ ds: xr.Dataset,
+ colors: ColorType,
+ title: str,
+ facet_col: str | None,
+ facet_row: str | None,
+ **plotly_kwargs: Any,
+) -> go.Figure:
+ """Create a stacked bar chart from xarray Dataset."""
+ df = _dataset_to_long_df(ds)
+ if df.empty:
+ return go.Figure()
+ x_col = 'time' if 'time' in df.columns else df.columns[0]
+ variables = df['variable'].unique().tolist()
+ color_map = process_colors(colors, variables, default_colorscale=CONFIG.Plotting.default_qualitative_colorscale)
+ fig = px.bar(
+ df,
+ x=x_col,
+ y='value',
+ color='variable',
+ facet_col=facet_col,
+ facet_row=facet_row,
+ color_discrete_map=color_map,
+ title=title,
+ **plotly_kwargs,
+ )
+ fig.update_layout(barmode='relative', bargap=0, bargroupgap=0)
+ fig.update_traces(marker_line_width=0)
+ return fig
+
+
+def _create_line(
+ ds: xr.Dataset,
+ colors: ColorType,
+ title: str,
+ facet_col: str | None,
+ facet_row: str | None,
+ **plotly_kwargs: Any,
+) -> go.Figure:
+ """Create a line chart from xarray Dataset."""
+ df = _dataset_to_long_df(ds)
+ if df.empty:
+ return go.Figure()
+ x_col = 'time' if 'time' in df.columns else df.columns[0]
+ variables = df['variable'].unique().tolist()
+ color_map = process_colors(colors, variables, default_colorscale=CONFIG.Plotting.default_qualitative_colorscale)
+ return px.line(
+ df,
+ x=x_col,
+ y='value',
+ color='variable',
+ facet_col=facet_col,
+ facet_row=facet_row,
+ color_discrete_map=color_map,
+ title=title,
+ **plotly_kwargs,
+ )
+
+
+# --- Statistics Accessor (data only) ---
+
+
+class StatisticsAccessor:
+ """Statistics accessor for FlowSystem. Access via ``flow_system.statistics``.
+
+ This accessor provides cached data properties for optimization results.
+ Use ``.plot`` for visualization methods.
+
+ Data Properties:
+ ``flow_rates`` : xr.Dataset
+ Flow rates for all flows.
+ ``flow_hours`` : xr.Dataset
+ Flow hours (energy) for all flows.
+ ``sizes`` : xr.Dataset
+ Sizes for all flows.
+ ``charge_states`` : xr.Dataset
+ Charge states for all storage components.
+ ``temporal_effects`` : xr.Dataset
+ Temporal effects per contributor per timestep.
+ ``periodic_effects`` : xr.Dataset
+ Periodic (investment) effects per contributor.
+ ``total_effects`` : xr.Dataset
+ Total effects (temporal + periodic) per contributor.
+ ``effect_share_factors`` : dict
+ Conversion factors between effects.
+
+ Examples:
+ >>> flow_system.optimize(solver)
+ >>> flow_system.statistics.flow_rates # Get data
+ >>> flow_system.statistics.plot.balance('Bus') # Plot
+ """
+
+ def __init__(self, flow_system: FlowSystem) -> None:
+ self._fs = flow_system
+ # Cached data
+ self._flow_rates: xr.Dataset | None = None
+ self._flow_hours: xr.Dataset | None = None
+ self._flow_sizes: xr.Dataset | None = None
+ self._storage_sizes: xr.Dataset | None = None
+ self._sizes: xr.Dataset | None = None
+ self._charge_states: xr.Dataset | None = None
+ self._effect_share_factors: dict[str, dict] | None = None
+ self._temporal_effects: xr.Dataset | None = None
+ self._periodic_effects: xr.Dataset | None = None
+ self._total_effects: xr.Dataset | None = None
+ # Plotting accessor (lazy)
+ self._plot: StatisticsPlotAccessor | None = None
+
+ def _require_solution(self) -> xr.Dataset:
+ """Get solution, raising if not available."""
+ if self._fs.solution is None:
+ raise RuntimeError('FlowSystem has no solution. Run optimize() or solve() first.')
+ return self._fs.solution
+
+ @property
+ def carrier_colors(self) -> dict[str, str]:
+ """Cached mapping of carrier name to color.
+
+ Delegates to topology accessor for centralized color caching.
+
+ Returns:
+ Dict mapping carrier names (lowercase) to hex color strings.
+ """
+ return self._fs.topology.carrier_colors
+
+ @property
+ def component_colors(self) -> dict[str, str]:
+ """Cached mapping of component label to color.
+
+ Delegates to topology accessor for centralized color caching.
+
+ Returns:
+ Dict mapping component labels to hex color strings.
+ """
+ return self._fs.topology.component_colors
+
+ @property
+ def bus_colors(self) -> dict[str, str]:
+ """Cached mapping of bus label to color (from carrier).
+
+ Delegates to topology accessor for centralized color caching.
+
+ Returns:
+ Dict mapping bus labels to hex color strings.
+ """
+ return self._fs.topology.bus_colors
+
+ @property
+ def carrier_units(self) -> dict[str, str]:
+ """Cached mapping of carrier name to unit string.
+
+ Delegates to topology accessor for centralized unit caching.
+
+ Returns:
+ Dict mapping carrier names (lowercase) to unit strings.
+ """
+ return self._fs.topology.carrier_units
+
+ @property
+ def effect_units(self) -> dict[str, str]:
+ """Cached mapping of effect label to unit string.
+
+ Delegates to topology accessor for centralized unit caching.
+
+ Returns:
+ Dict mapping effect labels to unit strings.
+ """
+ return self._fs.topology.effect_units
+
+ @property
+ def plot(self) -> StatisticsPlotAccessor:
+ """Access plotting methods for statistics.
+
+ Returns:
+ A StatisticsPlotAccessor instance.
+
+ Examples:
+ >>> flow_system.statistics.plot.balance('ElectricityBus')
+ >>> flow_system.statistics.plot.heatmap('Boiler|on')
+ """
+ if self._plot is None:
+ self._plot = StatisticsPlotAccessor(self)
+ return self._plot
+
+ @property
+ def flow_rates(self) -> xr.Dataset:
+ """All flow rates as a Dataset with flow labels as variable names.
+
+ Each variable has attributes:
+ - 'carrier': carrier type (e.g., 'heat', 'electricity', 'gas')
+ - 'unit': carrier unit (e.g., 'kW')
+ """
+ self._require_solution()
+ if self._flow_rates is None:
+ flow_rate_vars = [v for v in self._fs.solution.data_vars if v.endswith('|flow_rate')]
+ flow_carriers = self._fs.flow_carriers # Cached lookup
+ carrier_units = self.carrier_units # Cached lookup
+ data_vars = {}
+ for v in flow_rate_vars:
+ flow_label = v.replace('|flow_rate', '')
+ da = self._fs.solution[v].copy()
+ # Add carrier and unit as attributes
+ carrier = flow_carriers.get(flow_label)
+ da.attrs['carrier'] = carrier
+ da.attrs['unit'] = carrier_units.get(carrier, '') if carrier else ''
+ data_vars[flow_label] = da
+ self._flow_rates = xr.Dataset(data_vars)
+ return self._flow_rates
+
+ @property
+ def flow_hours(self) -> xr.Dataset:
+ """All flow hours (energy) as a Dataset with flow labels as variable names.
+
+ Each variable has attributes:
+ - 'carrier': carrier type (e.g., 'heat', 'electricity', 'gas')
+ - 'unit': energy unit (e.g., 'kWh', 'm3/s*h')
+ """
+ self._require_solution()
+ if self._flow_hours is None:
+ hours = self._fs.hours_per_timestep
+ flow_rates = self.flow_rates
+ # Multiply and preserve/transform attributes
+ data_vars = {}
+ for var in flow_rates.data_vars:
+ da = flow_rates[var] * hours
+ da.attrs['carrier'] = flow_rates[var].attrs.get('carrier')
+ # Convert power unit to energy unit (e.g., 'kW' -> 'kWh', 'm3/s' -> 'm3/s*h')
+ power_unit = flow_rates[var].attrs.get('unit', '')
+ da.attrs['unit'] = f'{power_unit}*h' if power_unit else ''
+ data_vars[var] = da
+ self._flow_hours = xr.Dataset(data_vars)
+ return self._flow_hours
+
+ @property
+ def flow_sizes(self) -> xr.Dataset:
+ """Flow sizes as a Dataset with flow labels as variable names."""
+ self._require_solution()
+ if self._flow_sizes is None:
+ flow_labels = set(self._fs.flows.keys())
+ size_vars = [
+ v for v in self._fs.solution.data_vars if v.endswith('|size') and v.replace('|size', '') in flow_labels
+ ]
+ self._flow_sizes = xr.Dataset({v.replace('|size', ''): self._fs.solution[v] for v in size_vars})
+ return self._flow_sizes
+
+ @property
+ def storage_sizes(self) -> xr.Dataset:
+ """Storage capacity sizes as a Dataset with storage labels as variable names."""
+ self._require_solution()
+ if self._storage_sizes is None:
+ storage_labels = set(self._fs.storages.keys())
+ size_vars = [
+ v
+ for v in self._fs.solution.data_vars
+ if v.endswith('|size') and v.replace('|size', '') in storage_labels
+ ]
+ self._storage_sizes = xr.Dataset({v.replace('|size', ''): self._fs.solution[v] for v in size_vars})
+ return self._storage_sizes
+
+ @property
+ def sizes(self) -> xr.Dataset:
+ """All investment sizes (flows and storage capacities) as a Dataset."""
+ if self._sizes is None:
+ self._sizes = xr.merge([self.flow_sizes, self.storage_sizes])
+ return self._sizes
+
+ @property
+ def charge_states(self) -> xr.Dataset:
+ """All storage charge states as a Dataset with storage labels as variable names."""
+ self._require_solution()
+ if self._charge_states is None:
+ charge_vars = [v for v in self._fs.solution.data_vars if v.endswith('|charge_state')]
+ self._charge_states = xr.Dataset(
+ {v.replace('|charge_state', ''): self._fs.solution[v] for v in charge_vars}
+ )
+ return self._charge_states
+
+ @property
+ def effect_share_factors(self) -> dict[str, dict]:
+ """Effect share factors for temporal and periodic modes.
+
+ Returns:
+ Dict with 'temporal' and 'periodic' keys, each containing
+ conversion factors between effects.
+ """
+ self._require_solution()
+ if self._effect_share_factors is None:
+ factors = self._fs.effects.calculate_effect_share_factors()
+ self._effect_share_factors = {'temporal': factors[0], 'periodic': factors[1]}
+ return self._effect_share_factors
+
+ @property
+ def temporal_effects(self) -> xr.Dataset:
+ """Temporal effects per contributor per timestep.
+
+ Returns a Dataset where each effect is a data variable with dimensions
+ [time, contributor] (plus period/scenario if present).
+
+ Coordinates:
+ - contributor: Individual contributor labels
+ - component: Parent component label for groupby operations
+ - component_type: Component type (e.g., 'Boiler', 'Source', 'Sink')
+
+ Examples:
+ >>> # Get costs per contributor per timestep
+ >>> statistics.temporal_effects['costs']
+ >>> # Sum over all contributors to get total costs per timestep
+ >>> statistics.temporal_effects['costs'].sum('contributor')
+ >>> # Group by component
+ >>> statistics.temporal_effects['costs'].groupby('component').sum()
+
+ Returns:
+ xr.Dataset with effects as variables and contributor dimension.
+ """
+ self._require_solution()
+ if self._temporal_effects is None:
+ ds = self._create_effects_dataset('temporal')
+ dim_order = ['time', 'period', 'scenario', 'contributor']
+ self._temporal_effects = ds.transpose(*dim_order, missing_dims='ignore')
+ return self._temporal_effects
+
+ @property
+ def periodic_effects(self) -> xr.Dataset:
+ """Periodic (investment) effects per contributor.
+
+ Returns a Dataset where each effect is a data variable with dimensions
+ [contributor] (plus period/scenario if present).
+
+ Coordinates:
+ - contributor: Individual contributor labels
+ - component: Parent component label for groupby operations
+ - component_type: Component type (e.g., 'Boiler', 'Source', 'Sink')
+
+ Examples:
+ >>> # Get investment costs per contributor
+ >>> statistics.periodic_effects['costs']
+ >>> # Sum over all contributors to get total investment costs
+ >>> statistics.periodic_effects['costs'].sum('contributor')
+ >>> # Group by component
+ >>> statistics.periodic_effects['costs'].groupby('component').sum()
+
+ Returns:
+ xr.Dataset with effects as variables and contributor dimension.
+ """
+ self._require_solution()
+ if self._periodic_effects is None:
+ ds = self._create_effects_dataset('periodic')
+ dim_order = ['period', 'scenario', 'contributor']
+ self._periodic_effects = ds.transpose(*dim_order, missing_dims='ignore')
+ return self._periodic_effects
+
+ @property
+ def total_effects(self) -> xr.Dataset:
+ """Total effects (temporal + periodic) per contributor.
+
+ Returns a Dataset where each effect is a data variable with dimensions
+ [contributor] (plus period/scenario if present).
+
+ Coordinates:
+ - contributor: Individual contributor labels
+ - component: Parent component label for groupby operations
+ - component_type: Component type (e.g., 'Boiler', 'Source', 'Sink')
+
+ Examples:
+ >>> # Get total costs per contributor
+ >>> statistics.total_effects['costs']
+ >>> # Sum over all contributors to get total system costs
+ >>> statistics.total_effects['costs'].sum('contributor')
+ >>> # Group by component
+ >>> statistics.total_effects['costs'].groupby('component').sum()
+ >>> # Group by component type
+ >>> statistics.total_effects['costs'].groupby('component_type').sum()
+
+ Returns:
+ xr.Dataset with effects as variables and contributor dimension.
+ """
+ self._require_solution()
+ if self._total_effects is None:
+ ds = self._create_effects_dataset('total')
+ dim_order = ['period', 'scenario', 'contributor']
+ self._total_effects = ds.transpose(*dim_order, missing_dims='ignore')
+ return self._total_effects
+
+ def get_effect_shares(
+ self,
+ element: str,
+ effect: str,
+ mode: Literal['temporal', 'periodic'] | None = None,
+ include_flows: bool = False,
+ ) -> xr.Dataset:
+ """Retrieve individual effect shares for a specific element and effect.
+
+ Args:
+ element: The element identifier (component or flow label).
+ effect: The effect identifier.
+ mode: 'temporal', 'periodic', or None for both.
+ include_flows: Whether to include effects from flows connected to this element.
+
+ Returns:
+ xr.Dataset containing the requested effect shares.
+
+ Raises:
+ ValueError: If the effect is not available or mode is invalid.
+ """
+ self._require_solution()
+
+ if effect not in self._fs.effects:
+ raise ValueError(f'Effect {effect} is not available.')
+
+ if mode is None:
+ return xr.merge(
+ [
+ self.get_effect_shares(
+ element=element, effect=effect, mode='temporal', include_flows=include_flows
+ ),
+ self.get_effect_shares(
+ element=element, effect=effect, mode='periodic', include_flows=include_flows
+ ),
+ ]
+ )
+
+ if mode not in ['temporal', 'periodic']:
+ raise ValueError(f'Mode {mode} is not available. Choose between "temporal" and "periodic".')
+
+ ds = xr.Dataset()
+ label = f'{element}->{effect}({mode})'
+ if label in self._fs.solution:
+ ds = xr.Dataset({label: self._fs.solution[label]})
+
+ if include_flows:
+ if element not in self._fs.components:
+ raise ValueError(f'Only use Components when retrieving Effects including flows. Got {element}')
+ comp = self._fs.components[element]
+ flows = [f.label_full.split('|')[0] for f in comp.inputs + comp.outputs]
+ return xr.merge(
+ [ds]
+ + [
+ self.get_effect_shares(element=flow, effect=effect, mode=mode, include_flows=False)
+ for flow in flows
+ ]
+ )
+
+ return ds
+
+ def _create_template_for_mode(self, mode: Literal['temporal', 'periodic', 'total']) -> xr.DataArray:
+ """Create a template DataArray with the correct dimensions for a given mode."""
+ coords = {}
+ if mode == 'temporal':
+ coords['time'] = self._fs.timesteps
+ if self._fs.periods is not None:
+ coords['period'] = self._fs.periods
+ if self._fs.scenarios is not None:
+ coords['scenario'] = self._fs.scenarios
+
+ if coords:
+ shape = tuple(len(coords[dim]) for dim in coords)
+ return xr.DataArray(np.full(shape, np.nan, dtype=float), coords=coords, dims=list(coords.keys()))
+ else:
+ return xr.DataArray(np.nan)
+
+ def _create_effects_dataset(self, mode: Literal['temporal', 'periodic', 'total']) -> xr.Dataset:
+ """Create dataset containing effect totals for all contributors.
+
+ Detects contributors (flows, components, etc.) from solution data variables.
+ Excludes effect-to-effect shares which are intermediate conversions.
+ Provides component and component_type coordinates for flexible groupby operations.
+ """
+ solution = self._fs.solution
+ template = self._create_template_for_mode(mode)
+
+ # Detect contributors from solution data variables
+ # Pattern: {contributor}->{effect}(temporal) or {contributor}->{effect}(periodic)
+ contributor_pattern = re.compile(r'^(.+)->(.+)\((temporal|periodic)\)$')
+ effect_labels = set(self._fs.effects.keys())
+
+ detected_contributors: set[str] = set()
+ for var in solution.data_vars:
+ match = contributor_pattern.match(str(var))
+ if match:
+ contributor = match.group(1)
+ # Exclude effect-to-effect shares (e.g., costs(temporal) -> Effect1(temporal))
+ base_name = contributor.split('(')[0] if '(' in contributor else contributor
+ if base_name not in effect_labels:
+ detected_contributors.add(contributor)
+
+ contributors = sorted(detected_contributors)
+
+ # Build metadata for each contributor
+ def get_parent_component(contributor: str) -> str:
+ if contributor in self._fs.flows:
+ return self._fs.flows[contributor].component
+ elif contributor in self._fs.components:
+ return contributor
+ return contributor
+
+ def get_contributor_type(contributor: str) -> str:
+ if contributor in self._fs.flows:
+ parent = self._fs.flows[contributor].component
+ return type(self._fs.components[parent]).__name__
+ elif contributor in self._fs.components:
+ return type(self._fs.components[contributor]).__name__
+ elif contributor in self._fs.buses:
+ return type(self._fs.buses[contributor]).__name__
+ return 'Unknown'
+
+ parents = [get_parent_component(c) for c in contributors]
+ contributor_types = [get_contributor_type(c) for c in contributors]
+
+ # Determine modes to process
+ modes_to_process = ['temporal', 'periodic'] if mode == 'total' else [mode]
+
+ ds = xr.Dataset()
+
+ for effect in self._fs.effects:
+ contributor_arrays = []
+
+ for contributor in contributors:
+ share_total: xr.DataArray | None = None
+
+ for current_mode in modes_to_process:
+ # Get conversion factors: which source effects contribute to this target effect
+ conversion_factors = {
+ key[0]: value
+ for key, value in self.effect_share_factors[current_mode].items()
+ if key[1] == effect
+ }
+ conversion_factors[effect] = 1 # Direct contribution
+
+ for source_effect, factor in conversion_factors.items():
+ label = f'{contributor}->{source_effect}({current_mode})'
+ if label in solution:
+ da = solution[label] * factor
+ # For total mode, sum temporal over time
+ if mode == 'total' and current_mode == 'temporal' and 'time' in da.dims:
+ da = da.sum('time')
+ if share_total is None:
+ share_total = da
+ else:
+ share_total = share_total + da
+
+ # If no share found, use NaN template
+ if share_total is None:
+ share_total = xr.full_like(template, np.nan, dtype=float)
+
+ contributor_arrays.append(share_total.expand_dims(contributor=[contributor]))
+
+ # Concatenate all contributors for this effect
+ da = xr.concat(contributor_arrays, dim='contributor', coords='minimal', join='outer').rename(effect)
+ # Add unit attribute from effect definition
+ da.attrs['unit'] = self.effect_units.get(effect, '')
+ ds[effect] = da
+
+ # Add groupby coordinates for contributor dimension
+ ds = ds.assign_coords(
+ component=('contributor', parents),
+ component_type=('contributor', contributor_types),
+ )
+
+ # Validation: check totals match solution
+ suffix_map = {'temporal': '(temporal)|per_timestep', 'periodic': '(periodic)', 'total': ''}
+ for effect in self._fs.effects:
+ label = f'{effect}{suffix_map[mode]}'
+ if label in solution:
+ computed = ds[effect].sum('contributor')
+ found = solution[label]
+ if not np.allclose(computed.fillna(0).values, found.fillna(0).values, equal_nan=True):
+ logger.critical(
+ f'Results for {effect}({mode}) in effects_dataset doesnt match {label}\n{computed=}\n, {found=}'
+ )
+
+ return ds
+
+
+# --- Sankey Plot Accessor ---
+
+
+class SankeyPlotAccessor:
+ """Sankey diagram accessor. Access via ``flow_system.statistics.plot.sankey``.
+
+ Provides typed methods for different sankey diagram types.
+
+ Examples:
+ >>> fs.statistics.plot.sankey.flows(select={'bus': 'HeatBus'})
+ >>> fs.statistics.plot.sankey.effects(select={'effect': 'costs'})
+ >>> fs.statistics.plot.sankey.sizes(select={'component': 'Boiler'})
+ """
+
+ def __init__(self, plot_accessor: StatisticsPlotAccessor) -> None:
+ self._plot = plot_accessor
+ self._stats = plot_accessor._stats
+ self._fs = plot_accessor._fs
+
+ def _extract_flow_filters(
+ self, select: FlowSankeySelect | None
+ ) -> tuple[SelectType | None, list[str] | None, list[str] | None, list[str] | None, list[str] | None]:
+ """Extract special filters from select dict.
+
+ Returns:
+ Tuple of (xarray_select, flow_filter, bus_filter, component_filter, carrier_filter).
+ """
+ if select is None:
+ return None, None, None, None, None
+
+ select = dict(select) # Copy to avoid mutating original
+ flow_filter = select.pop('flow', None)
+ bus_filter = select.pop('bus', None)
+ component_filter = select.pop('component', None)
+ carrier_filter = select.pop('carrier', None)
+
+ # Normalize to lists
+ if isinstance(flow_filter, str):
+ flow_filter = [flow_filter]
+ if isinstance(bus_filter, str):
+ bus_filter = [bus_filter]
+ if isinstance(component_filter, str):
+ component_filter = [component_filter]
+ if isinstance(carrier_filter, str):
+ carrier_filter = [carrier_filter]
+
+ return select if select else None, flow_filter, bus_filter, component_filter, carrier_filter
+
+ def _build_flow_links(
+ self,
+ ds: xr.Dataset,
+ flow_filter: list[str] | None = None,
+ bus_filter: list[str] | None = None,
+ component_filter: list[str] | None = None,
+ carrier_filter: list[str] | None = None,
+ min_value: float = 1e-6,
+ ) -> tuple[set[str], dict[str, list]]:
+ """Build Sankey nodes and links from flow data."""
+ nodes: set[str] = set()
+ links: dict[str, list] = {'source': [], 'target': [], 'value': [], 'label': [], 'carrier': []}
+
+ # Normalize carrier filter to lowercase
+ if carrier_filter is not None:
+ carrier_filter = [c.lower() for c in carrier_filter]
+
+ # Use flow_rates to get carrier names from xarray attributes (already computed)
+ flow_rates = self._stats.flow_rates
+
+ for flow in self._fs.flows.values():
+ label = flow.label_full
+ if label not in ds:
+ continue
+
+ # Apply filters
+ if flow_filter is not None and label not in flow_filter:
+ continue
+ bus_label = flow.bus
+ comp_label = flow.component
+ if bus_filter is not None and bus_label not in bus_filter:
+ continue
+
+ # Get carrier name from flow_rates xarray attribute (efficient lookup)
+ carrier_name = flow_rates[label].attrs.get('carrier') if label in flow_rates else None
+
+ if carrier_filter is not None:
+ if carrier_name is None or carrier_name.lower() not in carrier_filter:
+ continue
+ if component_filter is not None and comp_label not in component_filter:
+ continue
+
+ value = float(ds[label].values)
+ if abs(value) < min_value:
+ continue
+
+ if flow.is_input_in_component:
+ source, target = bus_label, comp_label
+ else:
+ source, target = comp_label, bus_label
+
+ nodes.add(source)
+ nodes.add(target)
+ links['source'].append(source)
+ links['target'].append(target)
+ links['value'].append(abs(value))
+ links['label'].append(label)
+ links['carrier'].append(carrier_name)
+
+ return nodes, links
+
+ def _create_figure(
+ self,
+ nodes: set[str],
+ links: dict[str, list],
+ colors: ColorType | None,
+ title: str,
+ **plotly_kwargs: Any,
+ ) -> go.Figure:
+ """Create Plotly Sankey figure."""
+ node_list = list(nodes)
+ node_indices = {n: i for i, n in enumerate(node_list)}
+
+ # Build node colors: buses use carrier colors, components use process_colors
+ node_colors = self._get_node_colors(node_list, colors)
+
+ # Build link colors from carrier colors (subtle/semi-transparent)
+ link_colors = self._get_link_colors(links.get('carrier', []))
+
+ link_dict: dict[str, Any] = dict(
+ source=[node_indices[s] for s in links['source']],
+ target=[node_indices[t] for t in links['target']],
+ value=links['value'],
+ label=links['label'],
+ )
+ if link_colors:
+ link_dict['color'] = link_colors
+
+ fig = go.Figure(
+ data=[
+ go.Sankey(
+ node=dict(
+ pad=15, thickness=20, line=dict(color='black', width=0.5), label=node_list, color=node_colors
+ ),
+ link=link_dict,
+ )
+ ]
+ )
+ fig.update_layout(title=title, **plotly_kwargs)
+ return fig
+
+ def _get_node_colors(self, node_list: list[str], colors: ColorType | None) -> list[str]:
+ """Get colors for nodes: buses use cached bus_colors, components use process_colors."""
+ # Get fallback colors from process_colors
+ fallback_colors = process_colors(colors, node_list)
+
+ # Use cached bus colors for efficiency
+ bus_colors = self._stats.bus_colors
+
+ node_colors = []
+ for node in node_list:
+ # Check if node is a bus with a cached color
+ if node in bus_colors:
+ node_colors.append(bus_colors[node])
+ else:
+ # Fall back to process_colors
+ node_colors.append(fallback_colors[node])
+
+ return node_colors
+
+ def _get_link_colors(self, carriers: list[str | None]) -> list[str]:
+ """Get subtle/semi-transparent colors for links based on their carriers."""
+ if not carriers:
+ return []
+
+ # Use cached carrier colors for efficiency
+ carrier_colors = self._stats.carrier_colors
+
+ link_colors = []
+ for carrier_name in carriers:
+ hex_color = carrier_colors.get(carrier_name.lower()) if carrier_name else None
+ link_colors.append(hex_to_rgba(hex_color, alpha=0.4) if hex_color else hex_to_rgba('', alpha=0.4))
+
+ return link_colors
+
+ def _finalize(self, fig: go.Figure, links: dict[str, list], show: bool | None) -> PlotResult:
+ """Create PlotResult and optionally show figure."""
+ coords: dict[str, Any] = {
+ 'link': range(len(links['value'])),
+ 'source': ('link', links['source']),
+ 'target': ('link', links['target']),
+ 'label': ('link', links['label']),
+ }
+ # Add carrier if present
+ if 'carrier' in links:
+ coords['carrier'] = ('link', links['carrier'])
+
+ sankey_ds = xr.Dataset({'value': ('link', links['value'])}, coords=coords)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=sankey_ds, figure=fig)
+
+ def flows(
+ self,
+ *,
+ aggregate: Literal['sum', 'mean'] = 'sum',
+ select: FlowSankeySelect | None = None,
+ colors: ColorType | None = None,
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot Sankey diagram of energy/material flow amounts.
+
+ Args:
+ aggregate: How to aggregate over time ('sum' or 'mean').
+ select: Filter options:
+ - flow: filter by flow label (e.g., 'Boiler|Q_th')
+ - bus: filter by bus label (e.g., 'HeatBus')
+ - component: filter by component label (e.g., 'Boiler')
+ - time: select specific time (e.g., 100 or '2023-01-01')
+ - period, scenario: xarray dimension selection
+ colors: Color specification for nodes.
+ show: Whether to display the figure.
+ **plotly_kwargs: Additional arguments passed to Plotly layout.
+
+ Returns:
+ PlotResult with Sankey flow data and figure.
+ """
+ self._stats._require_solution()
+ xr_select, flow_filter, bus_filter, component_filter, carrier_filter = self._extract_flow_filters(select)
+
+ ds = self._stats.flow_hours.copy()
+
+ # Apply period/scenario weights
+ if 'period' in ds.dims and self._fs.period_weights is not None:
+ ds = ds * self._fs.period_weights
+ if 'scenario' in ds.dims and self._fs.scenario_weights is not None:
+ weights = self._fs.scenario_weights / self._fs.scenario_weights.sum()
+ ds = ds * weights
+
+ ds = _apply_selection(ds, xr_select)
+
+ # Aggregate remaining dimensions
+ if 'time' in ds.dims:
+ ds = getattr(ds, aggregate)(dim='time')
+ for dim in ['period', 'scenario']:
+ if dim in ds.dims:
+ ds = ds.sum(dim=dim)
+
+ nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter, carrier_filter)
+ fig = self._create_figure(nodes, links, colors, 'Energy Flow', **plotly_kwargs)
+ return self._finalize(fig, links, show)
+
+ def sizes(
+ self,
+ *,
+ select: FlowSankeySelect | None = None,
+ max_size: float | None = None,
+ colors: ColorType | None = None,
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot Sankey diagram of investment sizes/capacities.
+
+ Args:
+ select: Filter options:
+ - flow: filter by flow label (e.g., 'Boiler|Q_th')
+ - bus: filter by bus label (e.g., 'HeatBus')
+ - component: filter by component label (e.g., 'Boiler')
+ - period, scenario: xarray dimension selection
+ max_size: Filter flows with sizes exceeding this value.
+ colors: Color specification for nodes.
+ show: Whether to display the figure.
+ **plotly_kwargs: Additional arguments passed to Plotly layout.
+
+ Returns:
+ PlotResult with Sankey size data and figure.
+ """
+ self._stats._require_solution()
+ xr_select, flow_filter, bus_filter, component_filter, carrier_filter = self._extract_flow_filters(select)
+
+ ds = self._stats.sizes.copy()
+ ds = _apply_selection(ds, xr_select)
+
+ # Collapse remaining dimensions
+ for dim in ['period', 'scenario']:
+ if dim in ds.dims:
+ ds = ds.max(dim=dim)
+
+ # Apply max_size filter
+ if max_size is not None and ds.data_vars:
+ valid_labels = [lbl for lbl in ds.data_vars if float(ds[lbl].max()) < max_size]
+ ds = ds[valid_labels]
+
+ nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter, carrier_filter)
+ fig = self._create_figure(nodes, links, colors, 'Investment Sizes (Capacities)', **plotly_kwargs)
+ return self._finalize(fig, links, show)
+
+ def peak_flow(
+ self,
+ *,
+ select: FlowSankeySelect | None = None,
+ colors: ColorType | None = None,
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot Sankey diagram of peak (maximum) flow rates.
+
+ Args:
+ select: Filter options:
+ - flow: filter by flow label (e.g., 'Boiler|Q_th')
+ - bus: filter by bus label (e.g., 'HeatBus')
+ - component: filter by component label (e.g., 'Boiler')
+ - time, period, scenario: xarray dimension selection
+ colors: Color specification for nodes.
+ show: Whether to display the figure.
+ **plotly_kwargs: Additional arguments passed to Plotly layout.
+
+ Returns:
+ PlotResult with Sankey peak flow data and figure.
+ """
+ self._stats._require_solution()
+ xr_select, flow_filter, bus_filter, component_filter, carrier_filter = self._extract_flow_filters(select)
+
+ ds = self._stats.flow_rates.copy()
+ ds = _apply_selection(ds, xr_select)
+
+ # Take max over all dimensions
+ for dim in ['time', 'period', 'scenario']:
+ if dim in ds.dims:
+ ds = ds.max(dim=dim)
+
+ nodes, links = self._build_flow_links(ds, flow_filter, bus_filter, component_filter, carrier_filter)
+ fig = self._create_figure(nodes, links, colors, 'Peak Flow Rates', **plotly_kwargs)
+ return self._finalize(fig, links, show)
+
+ def effects(
+ self,
+ *,
+ select: EffectsSankeySelect | None = None,
+ colors: ColorType | None = None,
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot Sankey diagram of component contributions to effects.
+
+ Shows how each component contributes to costs, CO2, and other effects.
+
+ Args:
+ select: Filter options:
+ - effect: filter which effects are shown (e.g., 'costs', ['costs', 'CO2'])
+ - component: filter by component label (e.g., 'Boiler')
+ - contributor: filter by contributor label (e.g., 'Boiler|Q_th')
+ - period, scenario: xarray dimension selection
+ colors: Color specification for nodes.
+ show: Whether to display the figure.
+ **plotly_kwargs: Additional arguments passed to Plotly layout.
+
+ Returns:
+ PlotResult with Sankey effects data and figure.
+ """
+ self._stats._require_solution()
+ total_effects = self._stats.total_effects
+
+ # Extract special filters from select
+ effect_filter: list[str] | None = None
+ component_filter: list[str] | None = None
+ contributor_filter: list[str] | None = None
+ xr_select: SelectType | None = None
+
+ if select is not None:
+ select = dict(select) # Copy to avoid mutating
+ effect_filter = select.pop('effect', None)
+ component_filter = select.pop('component', None)
+ contributor_filter = select.pop('contributor', None)
+ xr_select = select if select else None
+
+ # Normalize to lists
+ if isinstance(effect_filter, str):
+ effect_filter = [effect_filter]
+ if isinstance(component_filter, str):
+ component_filter = [component_filter]
+ if isinstance(contributor_filter, str):
+ contributor_filter = [contributor_filter]
+
+ # Determine which effects to include
+ effect_names = list(total_effects.data_vars)
+ if effect_filter is not None:
+ effect_names = [e for e in effect_names if e in effect_filter]
+
+ # Collect all links: component -> effect
+ nodes: set[str] = set()
+ links: dict[str, list] = {'source': [], 'target': [], 'value': [], 'label': []}
+
+ for effect_name in effect_names:
+ effect_data = total_effects[effect_name]
+ effect_data = _apply_selection(effect_data, xr_select)
+
+ # Sum over remaining dimensions
+ for dim in ['period', 'scenario']:
+ if dim in effect_data.dims:
+ effect_data = effect_data.sum(dim=dim)
+
+ contributors = effect_data.coords['contributor'].values
+ components = effect_data.coords['component'].values
+
+ for contributor, component in zip(contributors, components, strict=False):
+ if component_filter is not None and component not in component_filter:
+ continue
+ if contributor_filter is not None and contributor not in contributor_filter:
+ continue
+
+ value = float(effect_data.sel(contributor=contributor).values)
+ if not np.isfinite(value) or abs(value) < 1e-6:
+ continue
+
+ source = str(component)
+ target = f'[{effect_name}]'
+
+ nodes.add(source)
+ nodes.add(target)
+ links['source'].append(source)
+ links['target'].append(target)
+ links['value'].append(abs(value))
+ links['label'].append(f'{contributor} → {effect_name}: {value:.2f}')
+
+ fig = self._create_figure(nodes, links, colors, 'Effect Contributions by Component', **plotly_kwargs)
+ return self._finalize(fig, links, show)
+
+
+# --- Statistics Plot Accessor ---
+
+
+class StatisticsPlotAccessor:
+ """Plot accessor for statistics. Access via ``flow_system.statistics.plot``.
+
+ All methods return PlotResult with both data and figure.
+ """
+
+ def __init__(self, statistics: StatisticsAccessor) -> None:
+ self._stats = statistics
+ self._fs = statistics._fs
+ self._sankey: SankeyPlotAccessor | None = None
+
+ @property
+ def sankey(self) -> SankeyPlotAccessor:
+ """Access sankey diagram methods with typed select options.
+
+ Returns:
+ SankeyPlotAccessor with methods: flows(), sizes(), peak_flow(), effects()
+
+ Examples:
+ >>> fs.statistics.plot.sankey.flows(select={'bus': 'HeatBus'})
+ >>> fs.statistics.plot.sankey.effects(select={'effect': 'costs'})
+ """
+ if self._sankey is None:
+ self._sankey = SankeyPlotAccessor(self)
+ return self._sankey
+
+ def _get_color_map_for_balance(self, node: str, flow_labels: list[str]) -> dict[str, str]:
+ """Build color map for balance plot.
+
+ - Bus balance: colors from component.color (using cached component_colors)
+ - Component balance: colors from flow's carrier (using cached carrier_colors)
+
+ Raises:
+ RuntimeError: If FlowSystem is not connected_and_transformed.
+ """
+ if not self._fs.connected_and_transformed:
+ raise RuntimeError(
+ 'FlowSystem is not connected_and_transformed. Call FlowSystem.connect_and_transform() first.'
+ )
+
+ is_bus = node in self._fs.buses
+ color_map = {}
+ uncolored = []
+
+ # Get cached colors for efficient lookup
+ carrier_colors = self._stats.carrier_colors
+ component_colors = self._stats.component_colors
+ flow_rates = self._stats.flow_rates
+
+ for label in flow_labels:
+ if is_bus:
+ # Use cached component colors
+ comp_label = self._fs.flows[label].component
+ color = component_colors.get(comp_label)
+ else:
+ # Use carrier name from xarray attribute (already computed) + cached colors
+ carrier_name = flow_rates[label].attrs.get('carrier') if label in flow_rates else None
+ color = carrier_colors.get(carrier_name) if carrier_name else None
+
+ if color:
+ color_map[label] = color
+ else:
+ uncolored.append(label)
+
+ if uncolored:
+ color_map.update(process_colors(CONFIG.Plotting.default_qualitative_colorscale, uncolored))
+
+ return color_map
+
+ def _resolve_variable_names(self, variables: list[str], solution: xr.Dataset) -> list[str]:
+ """Resolve flow labels to variable names with fallback.
+
+ For each variable:
+ 1. First check if it exists in the dataset as-is
+ 2. If not found and doesn't contain '|', try adding '|flow_rate' suffix
+ 3. If still not found, try '|charge_state' suffix (for storages)
+
+ Args:
+ variables: List of flow labels or variable names.
+ solution: The solution dataset to check variable existence.
+
+ Returns:
+ List of resolved variable names.
+ """
+ resolved = []
+ for var in variables:
+ if var in solution:
+ # Variable exists as-is, use it directly
+ resolved.append(var)
+ elif '|' not in var:
+ # Not found and no '|', try common suffixes
+ flow_rate_var = f'{var}|flow_rate'
+ charge_state_var = f'{var}|charge_state'
+ if flow_rate_var in solution:
+ resolved.append(flow_rate_var)
+ elif charge_state_var in solution:
+ resolved.append(charge_state_var)
+ else:
+ # Let it fail with the original name for clear error message
+ resolved.append(var)
+ else:
+ # Contains '|' but not in solution - let it fail with original name
+ resolved.append(var)
+ return resolved
+
+ def balance(
+ self,
+ node: str,
+ *,
+ select: SelectType | None = None,
+ include: FilterType | None = None,
+ exclude: FilterType | None = None,
+ unit: Literal['flow_rate', 'flow_hours'] = 'flow_rate',
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot node balance (inputs vs outputs) for a Bus or Component.
+
+ Args:
+ node: Label of the Bus or Component to plot.
+ select: xarray-style selection dict.
+ include: Only include flows containing these substrings.
+ exclude: Exclude flows containing these substrings.
+ unit: 'flow_rate' (power) or 'flow_hours' (energy).
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display the plot.
+
+ Returns:
+ PlotResult with .data and .figure.
+ """
+ self._stats._require_solution()
+
+ # Get the element
+ if node in self._fs.buses:
+ element = self._fs.buses[node]
+ elif node in self._fs.components:
+ element = self._fs.components[node]
+ else:
+ raise KeyError(f"'{node}' not found in buses or components")
+
+ input_labels = [f.label_full for f in element.inputs]
+ output_labels = [f.label_full for f in element.outputs]
+ all_labels = input_labels + output_labels
+
+ filtered_labels = _filter_by_pattern(all_labels, include, exclude)
+ if not filtered_labels:
+ logger.warning(f'No flows remaining after filtering for node {node}')
+ return PlotResult(data=xr.Dataset(), figure=go.Figure())
+
+ # Get data from statistics
+ if unit == 'flow_rate':
+ ds = self._stats.flow_rates[[lbl for lbl in filtered_labels if lbl in self._stats.flow_rates]]
+ else:
+ ds = self._stats.flow_hours[[lbl for lbl in filtered_labels if lbl in self._stats.flow_hours]]
+
+ # Negate inputs
+ for label in input_labels:
+ if label in ds:
+ ds[label] = -ds[label]
+
+ ds = _apply_selection(ds, select)
+ actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row)
+
+ # Build color map from Element.color attributes if no colors specified
+ if colors is None:
+ colors = self._get_color_map_for_balance(node, list(ds.data_vars))
+
+ # Get unit label from first data variable's attributes
+ unit_label = ''
+ if ds.data_vars:
+ first_var = next(iter(ds.data_vars))
+ unit_label = ds[first_var].attrs.get('unit', '')
+
+ fig = _create_stacked_bar(
+ ds,
+ colors=colors,
+ title=f'{node} [{unit_label}]' if unit_label else node,
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ **plotly_kwargs,
+ )
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=ds, figure=fig)
+
+ def carrier_balance(
+ self,
+ carrier: str,
+ *,
+ select: SelectType | None = None,
+ include: FilterType | None = None,
+ exclude: FilterType | None = None,
+ unit: Literal['flow_rate', 'flow_hours'] = 'flow_rate',
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot carrier-level balance showing all flows of a carrier type.
+
+ Shows production (positive) and consumption (negative) of a carrier
+ across all buses of that carrier type in the system.
+
+ Args:
+ carrier: Carrier name (e.g., 'heat', 'electricity', 'gas').
+ select: xarray-style selection dict.
+ include: Only include flows containing these substrings.
+ exclude: Exclude flows containing these substrings.
+ unit: 'flow_rate' (power) or 'flow_hours' (energy).
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display the plot.
+
+ Returns:
+ PlotResult with .data and .figure.
+
+ Examples:
+ >>> fs.statistics.plot.carrier_balance('heat')
+ >>> fs.statistics.plot.carrier_balance('electricity', unit='flow_hours')
+
+ Notes:
+ - Inputs to carrier buses (from sources/converters) are shown as positive
+ - Outputs from carrier buses (to sinks/converters) are shown as negative
+ - Internal transfers between buses of the same carrier appear on both sides
+ """
+ self._stats._require_solution()
+ carrier = carrier.lower()
+
+ # Find all buses with this carrier
+ carrier_buses = [bus for bus in self._fs.buses.values() if bus.carrier == carrier]
+ if not carrier_buses:
+ raise KeyError(f"No buses found with carrier '{carrier}'")
+
+ # Collect all flows connected to these buses
+ input_labels: list[str] = [] # Inputs to buses = production
+ output_labels: list[str] = [] # Outputs from buses = consumption
+
+ for bus in carrier_buses:
+ for flow in bus.inputs:
+ input_labels.append(flow.label_full)
+ for flow in bus.outputs:
+ output_labels.append(flow.label_full)
+
+ all_labels = input_labels + output_labels
+ filtered_labels = _filter_by_pattern(all_labels, include, exclude)
+ if not filtered_labels:
+ logger.warning(f'No flows remaining after filtering for carrier {carrier}')
+ return PlotResult(data=xr.Dataset(), figure=go.Figure())
+
+ # Get data from statistics
+ if unit == 'flow_rate':
+ ds = self._stats.flow_rates[[lbl for lbl in filtered_labels if lbl in self._stats.flow_rates]]
+ else:
+ ds = self._stats.flow_hours[[lbl for lbl in filtered_labels if lbl in self._stats.flow_hours]]
+
+ # Negate outputs (consumption) - opposite convention from bus balance
+ for label in output_labels:
+ if label in ds:
+ ds[label] = -ds[label]
+
+ ds = _apply_selection(ds, select)
+ actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row)
+
+ # Use cached component colors for flows
+ if colors is None:
+ component_colors = self._stats.component_colors
+ color_map = {}
+ uncolored = []
+ for label in ds.data_vars:
+ flow = self._fs.flows.get(label)
+ if flow:
+ color = component_colors.get(flow.component)
+ if color:
+ color_map[label] = color
+ continue
+ uncolored.append(label)
+ if uncolored:
+ color_map.update(process_colors(CONFIG.Plotting.default_qualitative_colorscale, uncolored))
+ colors = color_map
+
+ # Get unit label from carrier or first data variable
+ unit_label = ''
+ if ds.data_vars:
+ first_var = next(iter(ds.data_vars))
+ unit_label = ds[first_var].attrs.get('unit', '')
+
+ fig = _create_stacked_bar(
+ ds,
+ colors=colors,
+ title=f'{carrier.capitalize()} Balance [{unit_label}]' if unit_label else f'{carrier.capitalize()} Balance',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ **plotly_kwargs,
+ )
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=ds, figure=fig)
+
+ def heatmap(
+ self,
+ variables: str | list[str],
+ *,
+ select: SelectType | None = None,
+ reshape: tuple[str, str] | None = ('D', 'h'),
+ colors: str | list[str] | None = None,
+ facet_col: str | None = 'period',
+ animation_frame: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot heatmap of time series data.
+
+ Time is reshaped into 2D (e.g., days × hours) when possible. Multiple variables
+ are shown as facets. If too many dimensions exist to display without data loss,
+ reshaping is skipped and variables are shown on the y-axis with time on x-axis.
+
+ Args:
+ variables: Flow label(s) or variable name(s). Flow labels like 'Boiler(Q_th)'
+ are automatically resolved to 'Boiler(Q_th)|flow_rate'. Full variable
+ names like 'Storage|charge_state' are used as-is.
+ select: xarray-style selection, e.g. {'scenario': 'Base Case'}.
+ reshape: Time reshape frequencies as (outer, inner), e.g. ('D', 'h') for
+ days × hours. Set to None to disable reshaping.
+ colors: Colorscale name (str) or list of colors for heatmap coloring.
+ Dicts are not supported for heatmaps (use str or list[str]).
+ facet_col: Dimension for subplot columns (default: 'period').
+ With multiple variables, 'variable' is used instead.
+ animation_frame: Dimension for animation slider (default: 'scenario').
+ show: Whether to display the figure.
+ **plotly_kwargs: Additional arguments passed to px.imshow.
+
+ Returns:
+ PlotResult with processed data and figure.
+ """
+ solution = self._stats._require_solution()
+
+ if isinstance(variables, str):
+ variables = [variables]
+
+ # Resolve flow labels to variable names
+ resolved_variables = self._resolve_variable_names(variables, solution)
+
+ ds = solution[resolved_variables]
+ ds = _apply_selection(ds, select)
+
+ # Stack variables into single DataArray
+ variable_names = list(ds.data_vars)
+ dataarrays = [ds[var] for var in variable_names]
+ da = xr.concat(dataarrays, dim=pd.Index(variable_names, name='variable'))
+
+ # Determine facet and animation from available dims
+ has_multiple_vars = 'variable' in da.dims and da.sizes['variable'] > 1
+
+ if has_multiple_vars:
+ actual_facet = 'variable'
+ actual_animation = (
+ animation_frame
+ if animation_frame in da.dims
+ else (facet_col if facet_col in da.dims and da.sizes.get(facet_col, 1) > 1 else None)
+ )
+ else:
+ actual_facet = facet_col if facet_col in da.dims and da.sizes.get(facet_col, 0) > 1 else None
+ actual_animation = (
+ animation_frame if animation_frame in da.dims and da.sizes.get(animation_frame, 0) > 1 else None
+ )
+
+ # Count non-time dims with size > 1 (these need facet/animation slots)
+ extra_dims = [d for d in da.dims if d != 'time' and da.sizes[d] > 1]
+ used_slots = len([d for d in [actual_facet, actual_animation] if d])
+ would_drop = len(extra_dims) > used_slots
+
+ # Reshape time only if we wouldn't lose data (all extra dims fit in facet + animation)
+ if reshape and 'time' in da.dims and not would_drop:
+ da = _reshape_time_for_heatmap(da, reshape)
+ heatmap_dims = ['timestep', 'timeframe']
+ elif has_multiple_vars:
+ # Can't reshape but have multiple vars: use variable + time as heatmap axes
+ heatmap_dims = ['variable', 'time']
+ # variable is now a heatmap dim, use period/scenario for facet/animation
+ actual_facet = facet_col if facet_col in da.dims and da.sizes.get(facet_col, 0) > 1 else None
+ actual_animation = (
+ animation_frame if animation_frame in da.dims and da.sizes.get(animation_frame, 0) > 1 else None
+ )
+ else:
+ heatmap_dims = ['time'] if 'time' in da.dims else list(da.dims)[:1]
+
+ # Keep only dims we need
+ keep_dims = set(heatmap_dims) | {d for d in [actual_facet, actual_animation] if d is not None}
+ for dim in [d for d in da.dims if d not in keep_dims]:
+ da = da.isel({dim: 0}, drop=True) if da.sizes[dim] > 1 else da.squeeze(dim, drop=True)
+
+ # Transpose to expected order
+ dim_order = heatmap_dims + [d for d in [actual_facet, actual_animation] if d]
+ da = da.transpose(*dim_order)
+
+ # Clear name for multiple variables (colorbar would show first var's name)
+ if has_multiple_vars:
+ da = da.rename('')
+
+ fig = _heatmap_figure(
+ da,
+ colors=colors,
+ facet_col=actual_facet,
+ animation_frame=actual_animation,
+ **plotly_kwargs,
+ )
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ reshaped_ds = da.to_dataset(name='value') if isinstance(da, xr.DataArray) else da
+ return PlotResult(data=reshaped_ds, figure=fig)
+
+ def flows(
+ self,
+ *,
+ start: str | list[str] | None = None,
+ end: str | list[str] | None = None,
+ component: str | list[str] | None = None,
+ select: SelectType | None = None,
+ unit: Literal['flow_rate', 'flow_hours'] = 'flow_rate',
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot flow rates filtered by start/end nodes or component.
+
+ Args:
+ start: Filter by source node(s).
+ end: Filter by destination node(s).
+ component: Filter by parent component(s).
+ select: xarray-style selection.
+ unit: 'flow_rate' or 'flow_hours'.
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display.
+
+ Returns:
+ PlotResult with flow data.
+ """
+ self._stats._require_solution()
+
+ ds = self._stats.flow_rates if unit == 'flow_rate' else self._stats.flow_hours
+
+ # Filter by connection
+ if start is not None or end is not None or component is not None:
+ matching_labels = []
+ starts = [start] if isinstance(start, str) else (start or [])
+ ends = [end] if isinstance(end, str) else (end or [])
+ components = [component] if isinstance(component, str) else (component or [])
+
+ for flow in self._fs.flows.values():
+ # Get bus label (could be string or Bus object)
+ bus_label = flow.bus
+ comp_label = flow.component
+
+ # start/end filtering based on flow direction
+ if flow.is_input_in_component:
+ # Flow goes: bus -> component, so start=bus, end=component
+ if starts and bus_label not in starts:
+ continue
+ if ends and comp_label not in ends:
+ continue
+ else:
+ # Flow goes: component -> bus, so start=component, end=bus
+ if starts and comp_label not in starts:
+ continue
+ if ends and bus_label not in ends:
+ continue
+
+ if components and comp_label not in components:
+ continue
+ matching_labels.append(flow.label_full)
+
+ ds = ds[[lbl for lbl in matching_labels if lbl in ds]]
+
+ ds = _apply_selection(ds, select)
+ actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row)
+
+ # Get unit label from first data variable's attributes
+ unit_label = ''
+ if ds.data_vars:
+ first_var = next(iter(ds.data_vars))
+ unit_label = ds[first_var].attrs.get('unit', '')
+
+ fig = _create_line(
+ ds,
+ colors=colors,
+ title=f'Flows [{unit_label}]' if unit_label else 'Flows',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ **plotly_kwargs,
+ )
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=ds, figure=fig)
+
+ def sizes(
+ self,
+ *,
+ max_size: float | None = 1e6,
+ select: SelectType | None = None,
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot investment sizes (capacities) of flows.
+
+ Args:
+ max_size: Maximum size to include (filters defaults).
+ select: xarray-style selection.
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display.
+
+ Returns:
+ PlotResult with size data.
+ """
+ self._stats._require_solution()
+ ds = self._stats.sizes
+
+ ds = _apply_selection(ds, select)
+
+ if max_size is not None and ds.data_vars:
+ valid_labels = [lbl for lbl in ds.data_vars if float(ds[lbl].max()) < max_size]
+ ds = ds[valid_labels]
+
+ actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row)
+
+ df = _dataset_to_long_df(ds)
+ if df.empty:
+ fig = go.Figure()
+ else:
+ variables = df['variable'].unique().tolist()
+ color_map = process_colors(colors, variables)
+ fig = px.bar(
+ df,
+ x='variable',
+ y='value',
+ color='variable',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ color_discrete_map=color_map,
+ title='Investment Sizes',
+ labels={'variable': 'Flow', 'value': 'Size'},
+ **plotly_kwargs,
+ )
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=ds, figure=fig)
+
+ def duration_curve(
+ self,
+ variables: str | list[str],
+ *,
+ select: SelectType | None = None,
+ normalize: bool = False,
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot load duration curves (sorted time series).
+
+ Args:
+ variables: Flow label(s) or variable name(s). Flow labels like 'Boiler(Q_th)'
+ are looked up in flow_rates. Full variable names like 'Boiler(Q_th)|flow_rate'
+ are stripped to their flow label. Other variables (e.g., 'Storage|charge_state')
+ are looked up in the solution directly.
+ select: xarray-style selection.
+ normalize: If True, normalize x-axis to 0-100%.
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display.
+
+ Returns:
+ PlotResult with sorted duration curve data.
+ """
+ solution = self._stats._require_solution()
+
+ if isinstance(variables, str):
+ variables = [variables]
+
+ # Normalize variable names: strip |flow_rate suffix for flow_rates lookup
+ flow_rates = self._stats.flow_rates
+ normalized_vars = []
+ for var in variables:
+ # Strip |flow_rate suffix if present
+ if var.endswith('|flow_rate'):
+ var = var[: -len('|flow_rate')]
+ normalized_vars.append(var)
+
+ # Try to get from flow_rates first, fall back to solution for non-flow variables
+ ds_parts = []
+ for var in normalized_vars:
+ if var in flow_rates:
+ ds_parts.append(flow_rates[[var]])
+ elif var in solution:
+ ds_parts.append(solution[[var]])
+ else:
+ # Try with |flow_rate suffix as last resort
+ flow_rate_var = f'{var}|flow_rate'
+ if flow_rate_var in solution:
+ ds_parts.append(solution[[flow_rate_var]].rename({flow_rate_var: var}))
+ else:
+ raise KeyError(f"Variable '{var}' not found in flow_rates or solution")
+
+ ds = xr.merge(ds_parts)
+ ds = _apply_selection(ds, select)
+
+ if 'time' not in ds.dims:
+ raise ValueError('Duration curve requires time dimension')
+
+ def sort_descending(arr: np.ndarray) -> np.ndarray:
+ return np.sort(arr)[::-1]
+
+ result_ds = xr.apply_ufunc(
+ sort_descending,
+ ds,
+ input_core_dims=[['time']],
+ output_core_dims=[['time']],
+ vectorize=True,
+ )
+
+ duration_name = 'duration_pct' if normalize else 'duration'
+ result_ds = result_ds.rename({'time': duration_name})
+
+ n_timesteps = result_ds.sizes[duration_name]
+ duration_coord = np.linspace(0, 100, n_timesteps) if normalize else np.arange(n_timesteps)
+ result_ds = result_ds.assign_coords({duration_name: duration_coord})
+
+ actual_facet_col, actual_facet_row = _resolve_facets(result_ds, facet_col, facet_row)
+
+ # Get unit label from first data variable's attributes
+ unit_label = ''
+ if ds.data_vars:
+ first_var = next(iter(ds.data_vars))
+ unit_label = ds[first_var].attrs.get('unit', '')
+
+ fig = _create_line(
+ result_ds,
+ colors=colors,
+ title=f'Duration Curve [{unit_label}]' if unit_label else 'Duration Curve',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ **plotly_kwargs,
+ )
+
+ x_label = 'Duration [%]' if normalize else 'Timesteps'
+ fig.update_xaxes(title_text=x_label)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=result_ds, figure=fig)
+
+ def effects(
+ self,
+ aspect: Literal['total', 'temporal', 'periodic'] = 'total',
+ *,
+ effect: str | None = None,
+ by: Literal['component', 'contributor', 'time'] = 'component',
+ select: SelectType | None = None,
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot effect (cost, emissions, etc.) breakdown.
+
+ Args:
+ aspect: Which aspect to plot - 'total', 'temporal', or 'periodic'.
+ effect: Specific effect name to plot (e.g., 'costs', 'CO2').
+ If None, plots all effects.
+ by: Group by 'component', 'contributor' (individual flows), or 'time'.
+ select: xarray-style selection.
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets (ignored if not in data).
+ facet_row: Dimension for row facets (ignored if not in data).
+ show: Whether to display.
+
+ Returns:
+ PlotResult with effect breakdown data.
+
+ Examples:
+ >>> flow_system.statistics.plot.effects() # Total of all effects by component
+ >>> flow_system.statistics.plot.effects(effect='costs') # Just costs
+ >>> flow_system.statistics.plot.effects(by='contributor') # By individual flows
+ >>> flow_system.statistics.plot.effects(aspect='temporal', by='time') # Over time
+ """
+ self._stats._require_solution()
+
+ # Get the appropriate effects dataset based on aspect
+ if aspect == 'total':
+ effects_ds = self._stats.total_effects
+ elif aspect == 'temporal':
+ effects_ds = self._stats.temporal_effects
+ elif aspect == 'periodic':
+ effects_ds = self._stats.periodic_effects
+ else:
+ raise ValueError(f"Aspect '{aspect}' not valid. Choose from 'total', 'temporal', 'periodic'.")
+
+ # Get available effects (data variables in the dataset)
+ available_effects = list(effects_ds.data_vars)
+
+ # Filter to specific effect if requested
+ if effect is not None:
+ if effect not in available_effects:
+ raise ValueError(f"Effect '{effect}' not found. Available: {available_effects}")
+ effects_to_plot = [effect]
+ else:
+ effects_to_plot = available_effects
+
+ # Build a combined DataArray with effect dimension
+ effect_arrays = []
+ for eff in effects_to_plot:
+ da = effects_ds[eff]
+ if by == 'contributor':
+ # Keep individual contributors (flows) - no groupby
+ effect_arrays.append(da.expand_dims(effect=[eff]))
+ else:
+ # Group by component (sum over contributor within each component)
+ da_grouped = da.groupby('component').sum()
+ effect_arrays.append(da_grouped.expand_dims(effect=[eff]))
+
+ combined = xr.concat(effect_arrays, dim='effect')
+
+ # Apply selection
+ combined = _apply_selection(combined.to_dataset(name='value'), select)['value']
+
+ # Group by the specified dimension
+ if by == 'component':
+ # Sum over time if present
+ if 'time' in combined.dims:
+ combined = combined.sum(dim='time')
+ x_col = 'component'
+ color_col = 'effect' if len(effects_to_plot) > 1 else 'component'
+ elif by == 'contributor':
+ # Sum over time if present
+ if 'time' in combined.dims:
+ combined = combined.sum(dim='time')
+ x_col = 'contributor'
+ color_col = 'effect' if len(effects_to_plot) > 1 else 'contributor'
+ elif by == 'time':
+ if 'time' not in combined.dims:
+ raise ValueError(f"Cannot plot by 'time' for aspect '{aspect}' - no time dimension.")
+ # Sum over components or contributors
+ if 'component' in combined.dims:
+ combined = combined.sum(dim='component')
+ if 'contributor' in combined.dims:
+ combined = combined.sum(dim='contributor')
+ x_col = 'time'
+ color_col = 'effect' if len(effects_to_plot) > 1 else None
+ else:
+ raise ValueError(f"'by' must be one of 'component', 'contributor', 'time', got {by!r}")
+
+ # Resolve facets
+ actual_facet_col, actual_facet_row = _resolve_facets(combined.to_dataset(name='value'), facet_col, facet_row)
+
+ # Convert to DataFrame for plotly express
+ df = combined.to_dataframe(name='value').reset_index()
+
+ # Build color map
+ if color_col and color_col in df.columns:
+ color_items = df[color_col].unique().tolist()
+ color_map = process_colors(colors, color_items)
+ else:
+ color_map = None
+
+ # Build title with unit if single effect
+ effect_label = effect if effect else 'Effects'
+ if effect and effect in effects_ds:
+ unit_label = effects_ds[effect].attrs.get('unit', '')
+ title = f'{effect_label} [{unit_label}]' if unit_label else effect_label
+ else:
+ title = effect_label
+ title = f'{title} ({aspect}) by {by}'
+
+ fig = px.bar(
+ df,
+ x=x_col,
+ y='value',
+ color=color_col,
+ color_discrete_map=color_map,
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ title=title,
+ **plotly_kwargs,
+ )
+ fig.update_layout(bargap=0, bargroupgap=0)
+ fig.update_traces(marker_line_width=0)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=combined.to_dataset(name=aspect), figure=fig)
+
+ def charge_states(
+ self,
+ storages: str | list[str] | None = None,
+ *,
+ select: SelectType | None = None,
+ colors: ColorType | None = None,
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot storage charge states over time.
+
+ Args:
+ storages: Storage label(s) to plot. If None, plots all storages.
+ select: xarray-style selection.
+ colors: Color specification (colorscale name, color list, or label-to-color dict).
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display.
+
+ Returns:
+ PlotResult with charge state data.
+ """
+ self._stats._require_solution()
+ ds = self._stats.charge_states
+
+ if storages is not None:
+ if isinstance(storages, str):
+ storages = [storages]
+ ds = ds[[s for s in storages if s in ds]]
+
+ ds = _apply_selection(ds, select)
+ actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row)
+
+ fig = _create_line(
+ ds,
+ colors=colors,
+ title='Storage Charge States',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ **plotly_kwargs,
+ )
+ fig.update_yaxes(title_text='Charge State')
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=ds, figure=fig)
+
+ def storage(
+ self,
+ storage: str,
+ *,
+ select: SelectType | None = None,
+ unit: Literal['flow_rate', 'flow_hours'] = 'flow_rate',
+ colors: ColorType | None = None,
+ charge_state_color: str = 'black',
+ facet_col: str | None = 'period',
+ facet_row: str | None = 'scenario',
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """Plot storage operation: balance and charge state in vertically stacked subplots.
+
+ Creates two subplots sharing the x-axis:
+ - Top: Charging/discharging flows as stacked bars (inputs negative, outputs positive)
+ - Bottom: Charge state over time as a line
+
+ Args:
+ storage: Storage component label.
+ select: xarray-style selection.
+ unit: 'flow_rate' (power) or 'flow_hours' (energy).
+ colors: Color specification for flow bars.
+ charge_state_color: Color for the charge state line overlay.
+ facet_col: Dimension for column facets.
+ facet_row: Dimension for row facets.
+ show: Whether to display.
+
+ Returns:
+ PlotResult with combined balance and charge state data.
+
+ Raises:
+ KeyError: If storage component not found.
+ ValueError: If component is not a storage.
+ """
+ self._stats._require_solution()
+
+ # Get the storage component
+ if storage not in self._fs.components:
+ raise KeyError(f"'{storage}' not found in components")
+
+ component = self._fs.components[storage]
+
+ # Check if it's a storage by looking for charge_state variable
+ charge_state_var = f'{storage}|charge_state'
+ if charge_state_var not in self._fs.solution:
+ raise ValueError(f"'{storage}' is not a storage (no charge_state variable found)")
+
+ # Get flow data
+ input_labels = [f.label_full for f in component.inputs]
+ output_labels = [f.label_full for f in component.outputs]
+ all_labels = input_labels + output_labels
+
+ if unit == 'flow_rate':
+ ds = self._stats.flow_rates[[lbl for lbl in all_labels if lbl in self._stats.flow_rates]]
+ else:
+ ds = self._stats.flow_hours[[lbl for lbl in all_labels if lbl in self._stats.flow_hours]]
+
+ # Negate outputs for balance view (discharging shown as negative)
+ for label in output_labels:
+ if label in ds:
+ ds[label] = -ds[label]
+
+ # Get charge state and add to dataset
+ charge_state = self._fs.solution[charge_state_var].rename(storage)
+ ds['charge_state'] = charge_state
+
+ # Apply selection
+ ds = _apply_selection(ds, select)
+ actual_facet_col, actual_facet_row = _resolve_facets(ds, facet_col, facet_row)
+
+ # Build color map
+ flow_labels = [lbl for lbl in ds.data_vars if lbl != 'charge_state']
+ if colors is None:
+ colors = self._get_color_map_for_balance(storage, flow_labels)
+ color_map = process_colors(colors, flow_labels)
+ color_map['charge_state'] = 'black'
+
+ # Convert to long-form DataFrame
+ df = _dataset_to_long_df(ds)
+
+ # Create figure with facets using px.bar for flows, then add charge_state line
+ flow_df = df[df['variable'] != 'charge_state']
+ charge_df = df[df['variable'] == 'charge_state']
+
+ fig = px.bar(
+ flow_df,
+ x='time',
+ y='value',
+ color='variable',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ color_discrete_map=color_map,
+ title=f'{storage} Operation ({unit})',
+ **plotly_kwargs,
+ )
+ fig.update_layout(bargap=0, bargroupgap=0)
+ fig.update_traces(marker_line_width=0)
+
+ # Add charge state as line on secondary y-axis using px.line, then merge traces
+ if not charge_df.empty:
+ line_fig = px.line(
+ charge_df,
+ x='time',
+ y='value',
+ facet_col=actual_facet_col,
+ facet_row=actual_facet_row,
+ )
+ # Update line traces and add to main figure
+ for trace in line_fig.data:
+ trace.name = 'charge_state'
+ trace.line = dict(color=charge_state_color, width=2)
+ trace.yaxis = 'y2'
+ trace.showlegend = True
+ fig.add_trace(trace)
+
+ # Add secondary y-axis
+ fig.update_layout(
+ yaxis2=dict(
+ title='Charge State',
+ overlaying='y',
+ side='right',
+ showgrid=False,
+ )
+ )
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ fig.show()
+
+ return PlotResult(data=ds, figure=fig)
diff --git a/flixopt/structure.py b/flixopt/structure.py
index 62067e2ba..88fd9ce31 100644
--- a/flixopt/structure.py
+++ b/flixopt/structure.py
@@ -6,13 +6,16 @@
from __future__ import annotations
import inspect
+import json
import logging
+import pathlib
import re
from dataclasses import dataclass
from difflib import get_close_matches
from typing import (
TYPE_CHECKING,
Any,
+ ClassVar,
Generic,
Literal,
TypeVar,
@@ -28,7 +31,6 @@
from .core import FlowSystemDimensions, TimeSeriesData, get_dataarray_stats
if TYPE_CHECKING: # for type checking and preventing circular imports
- import pathlib
from collections.abc import Collection, ItemsView, Iterator
from .effects import EffectCollectionModel
@@ -108,6 +110,16 @@ def do_modeling(self):
# Add scenario equality constraints after all elements are modeled
self._add_scenario_equality_constraints()
+ # Populate _variable_names and _constraint_names on each Element
+ self._populate_element_variable_names()
+
+ def _populate_element_variable_names(self):
+ """Populate _variable_names and _constraint_names on each Element from its submodel."""
+ for element in self.flow_system.values():
+ if element.submodel is not None:
+ element._variable_names = list(element.submodel.variables)
+ element._constraint_names = list(element.submodel.constraints)
+
def _add_scenario_equality_for_parameter_type(
self,
parameter_type: Literal['flow_rate', 'size'],
@@ -156,29 +168,45 @@ def _add_scenario_equality_constraints(self):
@property
def solution(self):
+ """Build solution dataset, reindexing to timesteps_extra for consistency."""
solution = super().solution
solution['objective'] = self.objective.value
+ # Store attrs as JSON strings for netCDF compatibility
solution.attrs = {
- 'Components': {
- comp.label_full: comp.submodel.results_structure()
- for comp in sorted(
- self.flow_system.components.values(), key=lambda component: component.label_full.upper()
- )
- },
- 'Buses': {
- bus.label_full: bus.submodel.results_structure()
- for bus in sorted(self.flow_system.buses.values(), key=lambda bus: bus.label_full.upper())
- },
- 'Effects': {
- effect.label_full: effect.submodel.results_structure()
- for effect in sorted(self.flow_system.effects.values(), key=lambda effect: effect.label_full.upper())
- },
- 'Flows': {
- flow.label_full: flow.submodel.results_structure()
- for flow in sorted(self.flow_system.flows.values(), key=lambda flow: flow.label_full.upper())
- },
+ 'Components': json.dumps(
+ {
+ comp.label_full: comp.submodel.results_structure()
+ for comp in sorted(
+ self.flow_system.components.values(), key=lambda component: component.label_full.upper()
+ )
+ }
+ ),
+ 'Buses': json.dumps(
+ {
+ bus.label_full: bus.submodel.results_structure()
+ for bus in sorted(self.flow_system.buses.values(), key=lambda bus: bus.label_full.upper())
+ }
+ ),
+ 'Effects': json.dumps(
+ {
+ effect.label_full: effect.submodel.results_structure()
+ for effect in sorted(
+ self.flow_system.effects.values(), key=lambda effect: effect.label_full.upper()
+ )
+ }
+ ),
+ 'Flows': json.dumps(
+ {
+ flow.label_full: flow.submodel.results_structure()
+ for flow in sorted(self.flow_system.flows.values(), key=lambda flow: flow.label_full.upper())
+ }
+ ),
}
- return solution.reindex(time=self.flow_system.timesteps_extra)
+ # Ensure solution is always indexed by timesteps_extra for consistency.
+ # Variables without extra timestep data will have NaN at the final timestep.
+ if 'time' in solution.coords and not solution.indexes['time'].equals(self.flow_system.timesteps_extra):
+ solution = solution.reindex(time=self.flow_system.timesteps_extra)
+ return solution
@property
def hours_per_step(self):
@@ -291,14 +319,18 @@ class Interface:
- Recursive handling of complex nested structures
Subclasses must implement:
- transform_data(name_prefix=''): Transform data to match FlowSystem dimensions
+ transform_data(): Transform data to match FlowSystem dimensions
"""
- def transform_data(self, name_prefix: str = '') -> None:
+ # Class-level defaults for attributes set by link_to_flow_system()
+ # These provide type hints and default values without requiring __init__ in subclasses
+ _flow_system: FlowSystem | None = None
+ _prefix: str = ''
+
+ def transform_data(self) -> None:
"""Transform the data of the interface to match the FlowSystem's dimensions.
- Args:
- name_prefix: The prefix to use for the names of the variables. Defaults to '', which results in no prefix.
+ Uses `self._prefix` (set during `link_to_flow_system()`) to name transformed data.
Raises:
NotImplementedError: Must be implemented by subclasses
@@ -310,20 +342,53 @@ def transform_data(self, name_prefix: str = '') -> None:
"""
raise NotImplementedError('Every Interface subclass needs a transform_data() method')
- def _set_flow_system(self, flow_system: FlowSystem) -> None:
- """Store flow_system reference and propagate to nested Interface objects.
+ @property
+ def prefix(self) -> str:
+ """The prefix used for naming transformed data (e.g., 'Boiler(Q_th)|status_parameters')."""
+ return self._prefix
+
+ def _sub_prefix(self, name: str) -> str:
+ """Build a prefix for a nested interface by appending name to current prefix."""
+ return f'{self._prefix}|{name}' if self._prefix else name
+
+ def link_to_flow_system(self, flow_system: FlowSystem, prefix: str = '') -> None:
+ """Link this interface and all nested interfaces to a FlowSystem.
This method is called automatically during element registration to enable
elements to access FlowSystem properties without passing the reference
- through every method call.
+ through every method call. It also sets the prefix used for naming
+ transformed data.
Subclasses with nested Interface objects should override this method
- to explicitly propagate the reference to their nested interfaces.
+ to propagate the link to their nested interfaces by calling
+ `super().link_to_flow_system(flow_system, prefix)` first, then linking
+ nested objects with appropriate prefixes.
Args:
- flow_system: The FlowSystem that this interface belongs to
+ flow_system: The FlowSystem to link to
+ prefix: The prefix for naming transformed data (e.g., 'Boiler(Q_th)')
+
+ Examples:
+ Override in a subclass with nested interfaces:
+
+ ```python
+ def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
+ super().link_to_flow_system(flow_system, prefix)
+ if self.nested_interface is not None:
+ self.nested_interface.link_to_flow_system(flow_system, f'{prefix}|nested' if prefix else 'nested')
+ ```
+
+ Creating an Interface dynamically during modeling:
+
+ ```python
+ # In a Model class
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
+ flow.status_parameters.link_to_flow_system(self._model.flow_system, f'{flow.label_full}')
+ ```
"""
self._flow_system = flow_system
+ self._prefix = prefix
@property
def flow_system(self) -> FlowSystem:
@@ -339,7 +404,7 @@ def flow_system(self) -> FlowSystem:
For Elements, this is set during add_elements().
For parameter classes, this is set recursively when the parent Element is registered.
"""
- if not hasattr(self, '_flow_system') or self._flow_system is None:
+ if self._flow_system is None:
raise RuntimeError(
f'{self.__class__.__name__} is not linked to a FlowSystem. '
f'Ensure the parent element is registered via flow_system.add_elements() first.'
@@ -723,7 +788,34 @@ def _resolve_reference_structure(cls, structure, arrays_dict: dict[str, xr.DataA
resolved_nested_data = cls._resolve_reference_structure(nested_data, arrays_dict)
try:
- return nested_class(**resolved_nested_data)
+ # Get valid constructor parameters for this class
+ init_params = set(inspect.signature(nested_class.__init__).parameters.keys())
+
+ # Check for deferred init attributes (defined as class attribute on Element subclasses)
+ # These are serialized but set after construction, not passed to child __init__
+ deferred_attr_names = getattr(nested_class, '_deferred_init_attrs', set())
+ deferred_attrs = {k: v for k, v in resolved_nested_data.items() if k in deferred_attr_names}
+ constructor_data = {k: v for k, v in resolved_nested_data.items() if k not in deferred_attr_names}
+
+ # Check for unknown parameters - these could be typos or renamed params
+ unknown_params = set(constructor_data.keys()) - init_params
+ if unknown_params:
+ raise TypeError(
+ f'{class_name}.__init__() got unexpected keyword arguments: {unknown_params}. '
+ f'This may indicate renamed parameters that need conversion. '
+ f'Valid parameters are: {init_params - {"self"}}'
+ )
+
+ # Create instance with constructor parameters
+ instance = nested_class(**constructor_data)
+
+ # Set internal attributes after construction
+ for attr_name, attr_value in deferred_attrs.items():
+ setattr(instance, attr_name, attr_value)
+
+ return instance
+ except TypeError as e:
+ raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
except Exception as e:
raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
else:
@@ -799,18 +891,29 @@ def to_dataset(self) -> xr.Dataset:
f'Original Error: {e}'
) from e
- def to_netcdf(self, path: str | pathlib.Path, compression: int = 0):
+ def to_netcdf(self, path: str | pathlib.Path, compression: int = 5, overwrite: bool = False):
"""
Save the object to a NetCDF file.
Args:
- path: Path to save the NetCDF file
+ path: Path to save the NetCDF file. Parent directories are created if they don't exist.
compression: Compression level (0-9)
+ overwrite: If True, overwrite existing file. If False, raise error if file exists.
Raises:
+ FileExistsError: If overwrite=False and file already exists.
ValueError: If serialization fails
IOError: If file cannot be written
"""
+ path = pathlib.Path(path)
+
+ # Check if file exists (unless overwrite is True)
+ if not overwrite and path.exists():
+ raise FileExistsError(f'File already exists: {path}. Use overwrite=True to overwrite existing file.')
+
+ # Create parent directories if they don't exist
+ path.parent.mkdir(parents=True, exist_ok=True)
+
try:
ds = self.to_dataset()
fx_io.save_dataset_to_netcdf(ds, path, compression=compression)
@@ -961,16 +1064,34 @@ class Element(Interface):
submodel: ElementModel | None
- def __init__(self, label: str, meta_data: dict | None = None):
+ # Attributes that are serialized but set after construction (not passed to child __init__)
+ # These are internal state populated during modeling, not user-facing parameters
+ _deferred_init_attrs: ClassVar[set[str]] = {'_variable_names', '_constraint_names'}
+
+ def __init__(
+ self,
+ label: str,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ _variable_names: list[str] | None = None,
+ _constraint_names: list[str] | None = None,
+ ):
"""
Args:
label: The label of the element
meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
+ color: Optional color for visualizations (e.g., '#FF6B6B'). If not provided, a color will be automatically assigned during FlowSystem.connect_and_transform().
+ _variable_names: Internal. Variable names for this element (populated after modeling).
+ _constraint_names: Internal. Constraint names for this element (populated after modeling).
"""
self.label = Element._valid_label(label)
self.meta_data = meta_data if meta_data is not None else {}
+ self.color = color
self.submodel = None
self._flow_system: FlowSystem | None = None
+ # Variable/constraint names - populated after modeling, serialized for results
+ self._variable_names: list[str] = _variable_names if _variable_names is not None else []
+ self._constraint_names: list[str] = _constraint_names if _constraint_names is not None else []
def _plausibility_checks(self) -> None:
"""This function is used to do some basic plausibility checks for each Element during initialization.
@@ -984,6 +1105,40 @@ def create_model(self, model: FlowSystemModel) -> ElementModel:
def label_full(self) -> str:
return self.label
+ @property
+ def solution(self) -> xr.Dataset:
+ """Solution data for this element's variables.
+
+ Returns a view into FlowSystem.solution containing only this element's variables.
+
+ Raises:
+ ValueError: If no solution is available (optimization not run or not solved).
+ """
+ if self._flow_system is None:
+ raise ValueError(f'Element "{self.label}" is not linked to a FlowSystem.')
+ if self._flow_system.solution is None:
+ raise ValueError(f'No solution available for "{self.label}". Run optimization first or load results.')
+ if not self._variable_names:
+ raise ValueError(f'No variable names available for "{self.label}". Element may not have been modeled yet.')
+ return self._flow_system.solution[self._variable_names]
+
+ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
+ """
+ Override to include _variable_names and _constraint_names in serialization.
+
+ These attributes are defined in Element but may not be in subclass constructors,
+ so we need to add them explicitly.
+ """
+ reference_structure, all_extracted_arrays = super()._create_reference_structure()
+
+ # Always include variable/constraint names for solution access after loading
+ if self._variable_names:
+ reference_structure['_variable_names'] = self._variable_names
+ if self._constraint_names:
+ reference_structure['_constraint_names'] = self._constraint_names
+
+ return reference_structure, all_extracted_arrays
+
def __repr__(self) -> str:
"""Return string representation."""
return fx_io.build_repr_from_init(self, excluded_params={'self', 'label', 'kwargs'}, skip_default_size=True)
@@ -1032,16 +1187,20 @@ def __init__(
elements: list[T] | dict[str, T] | None = None,
element_type_name: str = 'elements',
truncate_repr: int | None = None,
+ item_name: str | None = None,
):
"""
Args:
elements: Initial elements to add (list or dict)
element_type_name: Name for display (e.g., 'components', 'buses')
truncate_repr: Maximum number of items to show in repr. If None, show all items. Default: None
+ item_name: Singular name for error messages (e.g., 'Component', 'Carrier').
+ If None, inferred from first added item's class name.
"""
super().__init__()
self._element_type_name = element_type_name
self._truncate_repr = truncate_repr
+ self._item_name = item_name
if elements is not None:
if isinstance(elements, dict):
@@ -1063,13 +1222,28 @@ def _get_label(self, element: T) -> str:
"""
raise NotImplementedError('Subclasses must implement _get_label()')
+ def _get_item_name(self) -> str:
+ """Get the singular item name for error messages.
+
+ Returns the explicitly set item_name, or infers from the first item's class name.
+ Falls back to 'Item' if container is empty and no name was set.
+ """
+ if self._item_name is not None:
+ return self._item_name
+ # Infer from first item's class name
+ if self:
+ first_item = next(iter(self.values()))
+ return first_item.__class__.__name__
+ return 'Item'
+
def add(self, element: T) -> None:
"""Add an element to the container."""
label = self._get_label(element)
if label in self:
+ item_name = element.__class__.__name__
raise ValueError(
- f'Element with label "{label}" already exists in {self._element_type_name}. '
- f'Each element must have a unique label.'
+ f'{item_name} with label "{label}" already exists in {self._element_type_name}. '
+ f'Each {item_name.lower()} must have a unique label.'
)
self[label] = element
@@ -1100,8 +1274,9 @@ def __getitem__(self, label: str) -> T:
return super().__getitem__(label)
except KeyError:
# Provide helpful error with close matches suggestions
+ item_name = self._get_item_name()
suggestions = get_close_matches(label, self.keys(), n=3, cutoff=0.6)
- error_msg = f'Element "{label}" not found in {self._element_type_name}.'
+ error_msg = f'{item_name} "{label}" not found in {self._element_type_name}.'
if suggestions:
error_msg += f' Did you mean: {", ".join(suggestions)}?'
else:
diff --git a/flixopt/topology_accessor.py b/flixopt/topology_accessor.py
new file mode 100644
index 000000000..eb5f05876
--- /dev/null
+++ b/flixopt/topology_accessor.py
@@ -0,0 +1,579 @@
+"""
+Topology accessor for FlowSystem.
+
+This module provides the TopologyAccessor class that enables the
+`flow_system.topology` pattern for network structure inspection and visualization.
+"""
+
+from __future__ import annotations
+
+import logging
+import pathlib
+import warnings
+from itertools import chain
+from typing import TYPE_CHECKING, Any, Literal
+
+import plotly.graph_objects as go
+import xarray as xr
+
+from .color_processing import ColorType, hex_to_rgba, process_colors
+from .config import CONFIG, DEPRECATION_REMOVAL_VERSION
+from .plot_result import PlotResult
+
+if TYPE_CHECKING:
+ import pyvis
+
+ from .flow_system import FlowSystem
+
+logger = logging.getLogger('flixopt')
+
+
+def _plot_network(
+ node_infos: dict,
+ edge_infos: dict,
+ path: str | pathlib.Path | None = None,
+ controls: bool
+ | list[
+ Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
+ ] = True,
+ show: bool = False,
+) -> pyvis.network.Network | None:
+ """Visualize network structure using PyVis.
+
+ Args:
+ node_infos: Dictionary of node information.
+ edge_infos: Dictionary of edge information.
+ path: Path to save HTML visualization.
+ controls: UI controls to add. True for all, or list of specific controls.
+ show: Whether to open in browser.
+
+ Returns:
+ Network instance, or None if pyvis not installed.
+ """
+ try:
+ from pyvis.network import Network
+ except ImportError:
+ logger.critical("Plotting the flow system network was not possible. Please install pyvis: 'pip install pyvis'")
+ return None
+
+ net = Network(directed=True, height='100%' if controls is False else '800px', font_color='white')
+
+ for node_id, node in node_infos.items():
+ net.add_node(
+ node_id,
+ label=node['label'],
+ shape={'Bus': 'circle', 'Component': 'box'}[node['class']],
+ color={'Bus': '#393E46', 'Component': '#00ADB5'}[node['class']],
+ title=node['infos'].replace(')', '\n)'),
+ font={'size': 14},
+ )
+
+ for edge in edge_infos.values():
+ # Use carrier color if available, otherwise default gray
+ edge_color = edge.get('carrier_color', '#222831') or '#222831'
+ net.add_edge(
+ edge['start'],
+ edge['end'],
+ label=edge['label'],
+ title=edge['infos'].replace(')', '\n)'),
+ font={'color': '#4D4D4D', 'size': 14},
+ color=edge_color,
+ )
+
+ net.barnes_hut(central_gravity=0.8, spring_length=50, spring_strength=0.05, gravity=-10000)
+
+ if controls:
+ net.show_buttons(filter_=controls)
+ if not show and not path:
+ return net
+ elif path:
+ path = pathlib.Path(path) if isinstance(path, str) else path
+ net.write_html(path.as_posix())
+ elif show:
+ path = pathlib.Path('network.html')
+ net.write_html(path.as_posix())
+
+ if show:
+ try:
+ import webbrowser
+
+ worked = webbrowser.open(f'file://{path.resolve()}', 2)
+ if not worked:
+ logger.error(f'Showing the network in the Browser went wrong. Open it manually. Its saved under {path}')
+ except Exception as e:
+ logger.error(
+ f'Showing the network in the Browser went wrong. Open it manually. Its saved under {path}: {e}'
+ )
+
+ return net
+
+
+class TopologyAccessor:
+ """
+ Accessor for network topology inspection and visualization on FlowSystem.
+
+ This class provides the topology API for FlowSystem, accessible via
+ `flow_system.topology`. It offers methods to inspect the network structure
+ and visualize it.
+
+ Examples:
+ Visualize the network:
+
+ >>> flow_system.topology.plot()
+ >>> flow_system.topology.plot(path='my_network.html', show=True)
+
+ Interactive visualization:
+
+ >>> flow_system.topology.start_app()
+ >>> # ... interact with the visualization ...
+ >>> flow_system.topology.stop_app()
+
+ Get network structure info:
+
+ >>> nodes, edges = flow_system.topology.infos()
+ """
+
+ def __init__(self, flow_system: FlowSystem) -> None:
+ """
+ Initialize the accessor with a reference to the FlowSystem.
+
+ Args:
+ flow_system: The FlowSystem to inspect.
+ """
+ self._fs = flow_system
+
+ # Cached color mappings (lazily initialized)
+ self._carrier_colors: dict[str, str] | None = None
+ self._component_colors: dict[str, str] | None = None
+ self._bus_colors: dict[str, str] | None = None
+
+ # Cached unit mappings (lazily initialized)
+ self._carrier_units: dict[str, str] | None = None
+ self._effect_units: dict[str, str] | None = None
+
+ @property
+ def carrier_colors(self) -> dict[str, str]:
+ """Cached mapping of carrier name to hex color.
+
+ Returns:
+ Dict mapping carrier names (lowercase) to hex color strings.
+ Only carriers with a color defined are included.
+
+ Examples:
+ >>> fs.topology.carrier_colors
+ {'electricity': '#FECB52', 'heat': '#D62728', 'gas': '#1F77B4'}
+ """
+ if self._carrier_colors is None:
+ self._carrier_colors = {name: carrier.color for name, carrier in self._fs.carriers.items() if carrier.color}
+ return self._carrier_colors
+
+ @property
+ def component_colors(self) -> dict[str, str]:
+ """Cached mapping of component label to hex color.
+
+ Returns:
+ Dict mapping component labels to hex color strings.
+ Only components with a color defined are included.
+
+ Examples:
+ >>> fs.topology.component_colors
+ {'Boiler': '#1f77b4', 'CHP': '#ff7f0e', 'HeatPump': '#2ca02c'}
+ """
+ if self._component_colors is None:
+ self._component_colors = {label: comp.color for label, comp in self._fs.components.items() if comp.color}
+ return self._component_colors
+
+ @property
+ def bus_colors(self) -> dict[str, str]:
+ """Cached mapping of bus label to hex color (from carrier).
+
+ Bus colors are derived from their associated carrier's color.
+
+ Returns:
+ Dict mapping bus labels to hex color strings.
+ Only buses with a carrier that has a color defined are included.
+
+ Examples:
+ >>> fs.topology.bus_colors
+ {'ElectricityBus': '#FECB52', 'HeatBus': '#D62728'}
+ """
+ if self._bus_colors is None:
+ carrier_colors = self.carrier_colors
+ self._bus_colors = {}
+ for label, bus in self._fs.buses.items():
+ if bus.carrier:
+ color = carrier_colors.get(bus.carrier.lower())
+ if color:
+ self._bus_colors[label] = color
+ return self._bus_colors
+
+ @property
+ def carrier_units(self) -> dict[str, str]:
+ """Cached mapping of carrier name to unit string.
+
+ Returns:
+ Dict mapping carrier names (lowercase) to unit strings.
+ Carriers without a unit defined return an empty string.
+
+ Examples:
+ >>> fs.topology.carrier_units
+ {'electricity': 'kW', 'heat': 'kW', 'gas': 'kW'}
+ """
+ if self._carrier_units is None:
+ self._carrier_units = {name: carrier.unit or '' for name, carrier in self._fs.carriers.items()}
+ return self._carrier_units
+
+ @property
+ def effect_units(self) -> dict[str, str]:
+ """Cached mapping of effect label to unit string.
+
+ Returns:
+ Dict mapping effect labels to unit strings.
+ Effects without a unit defined return an empty string.
+
+ Examples:
+ >>> fs.topology.effect_units
+ {'costs': '€', 'CO2': 'kg'}
+ """
+ if self._effect_units is None:
+ self._effect_units = {effect.label: effect.unit or '' for effect in self._fs.effects.values()}
+ return self._effect_units
+
+ def infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]:
+ """
+ Get network topology information as dictionaries.
+
+ Returns node and edge information suitable for visualization or analysis.
+
+ Returns:
+ Tuple of (nodes_dict, edges_dict) where:
+ - nodes_dict maps node labels to their properties (label, class, infos)
+ - edges_dict maps edge labels to their properties (label, start, end, infos)
+
+ Examples:
+ >>> nodes, edges = flow_system.topology.infos()
+ >>> print(nodes.keys()) # All component and bus labels
+ >>> print(edges.keys()) # All flow labels
+ """
+ from .elements import Bus
+
+ if not self._fs.connected_and_transformed:
+ self._fs.connect_and_transform()
+
+ nodes = {
+ node.label_full: {
+ 'label': node.label,
+ 'class': 'Bus' if isinstance(node, Bus) else 'Component',
+ 'infos': node.__str__(),
+ }
+ for node in chain(self._fs.components.values(), self._fs.buses.values())
+ }
+
+ # Use cached colors for efficient lookup
+ flow_carriers = self._fs.flow_carriers
+ carrier_colors = self.carrier_colors
+
+ edges = {}
+ for flow in self._fs.flows.values():
+ carrier_name = flow_carriers.get(flow.label_full)
+ edges[flow.label_full] = {
+ 'label': flow.label,
+ 'start': flow.bus if flow.is_input_in_component else flow.component,
+ 'end': flow.component if flow.is_input_in_component else flow.bus,
+ 'infos': flow.__str__(),
+ 'carrier_color': carrier_colors.get(carrier_name) if carrier_name else None,
+ }
+
+ return nodes, edges
+
+ def plot(
+ self,
+ colors: ColorType | None = None,
+ show: bool | None = None,
+ **plotly_kwargs: Any,
+ ) -> PlotResult:
+ """
+ Visualize the network structure as a Sankey diagram using Plotly.
+
+ Creates a Sankey diagram showing the topology of the flow system,
+ with buses and components as nodes, and flows as links between them.
+ All links have equal width since no solution data is used.
+
+ Args:
+ colors: Color specification for nodes (buses).
+ - `None`: Uses default color palette based on buses.
+ - `str`: Plotly colorscale name (e.g., 'Viridis', 'Blues').
+ - `list`: List of colors to cycle through.
+ - `dict`: Maps bus labels to specific colors.
+ Links inherit colors from their connected bus.
+ show: Whether to display the figure in the browser.
+ - `None`: Uses default from CONFIG.Plotting.default_show.
+ **plotly_kwargs: Additional arguments passed to Plotly layout.
+
+ Returns:
+ PlotResult containing the Sankey diagram figure and topology data
+ (source, target, value for each link).
+
+ Examples:
+ >>> flow_system.topology.plot()
+ >>> flow_system.topology.plot(show=True)
+ >>> flow_system.topology.plot(colors='Viridis')
+ >>> flow_system.topology.plot(colors={'ElectricityBus': 'gold', 'HeatBus': 'red'})
+
+ Notes:
+ This visualization shows the network structure without optimization results.
+ For visualizations that include flow values, use `flow_system.statistics.plot.sankey.flows()`
+ after running an optimization.
+
+ Hover over nodes and links to see detailed element information.
+
+ See Also:
+ - `plot_legacy()`: Previous PyVis-based network visualization.
+ - `statistics.plot.sankey.flows()`: Sankey with actual flow values from optimization.
+ """
+ if not self._fs.connected_and_transformed:
+ self._fs.connect_and_transform()
+
+ # Build nodes and links from topology
+ nodes: set[str] = set()
+ links: dict[str, list] = {
+ 'source': [],
+ 'target': [],
+ 'value': [],
+ 'label': [],
+ 'customdata': [], # For hover text
+ 'color': [], # Carrier-based colors
+ }
+
+ # Collect node hover info (format repr for HTML display)
+ node_hover: dict[str, str] = {}
+ for comp in self._fs.components.values():
+ node_hover[comp.label] = repr(comp).replace('\n', '
')
+ for bus in self._fs.buses.values():
+ node_hover[bus.label] = repr(bus).replace('\n', '
')
+
+ # Use cached colors for efficient lookup
+ flow_carriers = self._fs.flow_carriers
+ carrier_colors = self.carrier_colors
+
+ for flow in self._fs.flows.values():
+ bus_label = flow.bus
+ comp_label = flow.component
+
+ if flow.is_input_in_component:
+ source = bus_label
+ target = comp_label
+ else:
+ source = comp_label
+ target = bus_label
+
+ nodes.add(source)
+ nodes.add(target)
+ links['source'].append(source)
+ links['target'].append(target)
+ links['value'].append(1) # Equal width for all links (no solution data)
+ links['label'].append(flow.label_full)
+ links['customdata'].append(repr(flow).replace('\n', '
')) # Flow repr for hover
+
+ # Get carrier color for this flow (subtle/semi-transparent) using cached colors
+ carrier_name = flow_carriers.get(flow.label_full)
+ color = carrier_colors.get(carrier_name) if carrier_name else None
+ links['color'].append(hex_to_rgba(color, alpha=0.4) if color else hex_to_rgba('', alpha=0.4))
+
+ # Create figure
+ node_list = list(nodes)
+ node_indices = {n: i for i, n in enumerate(node_list)}
+
+ # Get colors for buses and components using cached colors
+ bus_colors_cached = self.bus_colors
+ component_colors_cached = self.component_colors
+
+ # If user provided colors, process them for buses
+ if colors is not None:
+ bus_labels = [bus.label for bus in self._fs.buses.values()]
+ bus_color_map = process_colors(colors, bus_labels)
+ else:
+ bus_color_map = bus_colors_cached
+
+ # Assign colors to nodes: buses get their color, components get their color or neutral gray
+ node_colors = []
+ for node in node_list:
+ if node in bus_color_map:
+ node_colors.append(bus_color_map[node])
+ elif node in component_colors_cached:
+ node_colors.append(component_colors_cached[node])
+ else:
+ # Fallback - use a neutral gray
+ node_colors.append('#808080')
+
+ # Build hover text for nodes
+ node_customdata = [node_hover.get(node, node) for node in node_list]
+
+ fig = go.Figure(
+ data=[
+ go.Sankey(
+ node=dict(
+ pad=15,
+ thickness=20,
+ line=dict(color='black', width=0.5),
+ label=node_list,
+ color=node_colors,
+ customdata=node_customdata,
+ hovertemplate='%{customdata}',
+ ),
+ link=dict(
+ source=[node_indices[s] for s in links['source']],
+ target=[node_indices[t] for t in links['target']],
+ value=links['value'],
+ label=links['label'],
+ customdata=links['customdata'],
+ hovertemplate='%{customdata}',
+ color=links['color'], # Carrier-based colors
+ ),
+ )
+ ]
+ )
+ title = plotly_kwargs.pop('title', 'Flow System Topology')
+ fig.update_layout(title=title, **plotly_kwargs)
+
+ # Build xarray Dataset with topology data
+ data = xr.Dataset(
+ {
+ 'source': ('link', links['source']),
+ 'target': ('link', links['target']),
+ 'value': ('link', links['value']),
+ },
+ coords={'link': links['label']},
+ )
+ result = PlotResult(data=data, figure=fig)
+
+ if show is None:
+ show = CONFIG.Plotting.default_show
+ if show:
+ result.show()
+
+ return result
+
+ def plot_legacy(
+ self,
+ path: bool | str | pathlib.Path = 'flow_system.html',
+ controls: bool
+ | list[
+ Literal['nodes', 'edges', 'layout', 'interaction', 'manipulation', 'physics', 'selection', 'renderer']
+ ] = True,
+ show: bool | None = None,
+ ) -> pyvis.network.Network | None:
+ """
+ Visualize the network structure using PyVis, saving it as an interactive HTML file.
+
+ .. deprecated::
+ Use `plot()` instead for the new Plotly-based Sankey visualization.
+ This method is kept for backwards compatibility.
+
+ Args:
+ path: Path to save the HTML visualization.
+ - `False`: Visualization is created but not saved.
+ - `str` or `Path`: Specifies file path (default: 'flow_system.html').
+ controls: UI controls to add to the visualization.
+ - `True`: Enables all available controls.
+ - `List`: Specify controls, e.g., ['nodes', 'layout'].
+ - Options: 'nodes', 'edges', 'layout', 'interaction', 'manipulation',
+ 'physics', 'selection', 'renderer'.
+ show: Whether to open the visualization in the web browser.
+
+ Returns:
+ The `pyvis.network.Network` instance representing the visualization,
+ or `None` if `pyvis` is not installed.
+
+ Examples:
+ >>> flow_system.topology.plot_legacy()
+ >>> flow_system.topology.plot_legacy(show=False)
+ >>> flow_system.topology.plot_legacy(path='output/network.html', controls=['nodes', 'layout'])
+
+ Notes:
+ This function requires `pyvis`. If not installed, the function prints
+ a warning and returns `None`.
+ Nodes are styled based on type (circles for buses, boxes for components)
+ and annotated with node information.
+ """
+ warnings.warn(
+ f'This method is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. '
+ 'Use flow_system.topology.plot() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ node_infos, edge_infos = self.infos()
+ # Normalize path=False to None for _plot_network compatibility
+ normalized_path = None if path is False else path
+ return _plot_network(
+ node_infos,
+ edge_infos,
+ normalized_path,
+ controls,
+ show if show is not None else CONFIG.Plotting.default_show,
+ )
+
+ def start_app(self) -> None:
+ """
+ Start an interactive network visualization using Dash and Cytoscape.
+
+ Launches a web-based interactive visualization server that allows
+ exploring the network structure dynamically.
+
+ Raises:
+ ImportError: If required dependencies are not installed.
+
+ Examples:
+ >>> flow_system.topology.start_app()
+ >>> # ... interact with the visualization in browser ...
+ >>> flow_system.topology.stop_app()
+
+ Notes:
+ Requires optional dependencies: dash, dash-cytoscape, dash-daq,
+ networkx, flask, werkzeug.
+ Install with: `pip install flixopt[network_viz]` or `pip install flixopt[full]`
+ """
+ from .network_app import DASH_CYTOSCAPE_AVAILABLE, VISUALIZATION_ERROR, flow_graph, shownetwork
+
+ warnings.warn(
+ 'The network visualization is still experimental and might change in the future.',
+ stacklevel=2,
+ category=UserWarning,
+ )
+
+ if not DASH_CYTOSCAPE_AVAILABLE:
+ raise ImportError(
+ f'Network visualization requires optional dependencies. '
+ f'Install with: `pip install flixopt[network_viz]`, `pip install flixopt[full]` '
+ f'or: `pip install dash dash-cytoscape dash-daq networkx werkzeug`. '
+ f'Original error: {VISUALIZATION_ERROR}'
+ )
+
+ if not self._fs._connected_and_transformed:
+ self._fs._connect_network()
+
+ if self._fs._network_app is not None:
+ logger.warning('The network app is already running. Restarting it.')
+ self.stop_app()
+
+ self._fs._network_app = shownetwork(flow_graph(self._fs))
+
+ def stop_app(self) -> None:
+ """
+ Stop the interactive network visualization server.
+
+ Examples:
+ >>> flow_system.topology.stop_app()
+ """
+ if self._fs._network_app is None:
+ logger.warning("No network app is currently running. Can't stop it")
+ return
+
+ try:
+ logger.info('Stopping network visualization server...')
+ self._fs._network_app.server_instance.shutdown()
+ logger.info('Network visualization stopped.')
+ except Exception as e:
+ logger.error(f'Failed to stop the network visualization app: {e}')
+ finally:
+ self._fs._network_app = None
diff --git a/flixopt/transform_accessor.py b/flixopt/transform_accessor.py
new file mode 100644
index 000000000..eaec1a3b6
--- /dev/null
+++ b/flixopt/transform_accessor.py
@@ -0,0 +1,703 @@
+"""
+Transform accessor for FlowSystem.
+
+This module provides the TransformAccessor class that enables
+transformations on FlowSystem like clustering, selection, and resampling.
+"""
+
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from typing import TYPE_CHECKING, Any, Literal
+
+import pandas as pd
+import xarray as xr
+
+if TYPE_CHECKING:
+ import numpy as np
+
+ from .clustering import ClusteringParameters
+ from .flow_system import FlowSystem
+
+logger = logging.getLogger('flixopt')
+
+
+class TransformAccessor:
+ """
+ Accessor for transformation methods on FlowSystem.
+
+ This class provides transformations that create new FlowSystem instances
+ with modified structure or data, accessible via `flow_system.transform`.
+
+ Examples:
+ Clustered optimization:
+
+ >>> clustered_fs = flow_system.transform.cluster(params)
+ >>> clustered_fs.optimize(solver)
+ >>> print(clustered_fs.solution)
+
+ Future MGA:
+
+ >>> mga_fs = flow_system.transform.mga(alternatives=5)
+ >>> mga_fs.optimize(solver)
+ """
+
+ def __init__(self, flow_system: FlowSystem) -> None:
+ """
+ Initialize the accessor with a reference to the FlowSystem.
+
+ Args:
+ flow_system: The FlowSystem to transform.
+ """
+ self._fs = flow_system
+
+ def cluster(
+ self,
+ parameters: ClusteringParameters,
+ components_to_clusterize: list | None = None,
+ ) -> FlowSystem:
+ """
+ Create a clustered FlowSystem for time series aggregation.
+
+ This method creates a new FlowSystem that can be optimized with
+ clustered time series data. The clustering reduces computational
+ complexity by identifying representative time periods.
+
+ The returned FlowSystem:
+ - Has the same timesteps as the original (clustering works via constraints, not reduction)
+ - Has aggregated time series data (if `aggregate_data_and_fix_non_binary_vars=True`)
+ - Will have clustering constraints added during `build_model()`
+
+ Args:
+ parameters: Clustering parameters specifying period duration,
+ number of periods, and aggregation settings.
+ components_to_clusterize: List of components to apply clustering to.
+ If None, all components are clustered.
+
+ Returns:
+ A new FlowSystem configured for clustered optimization.
+
+ Raises:
+ ValueError: If timestep sizes are inconsistent.
+ ValueError: If hours_per_period is not a multiple of timestep size.
+
+ Examples:
+ Basic clustered optimization:
+
+ >>> from flixopt import ClusteringParameters
+ >>> params = ClusteringParameters(
+ ... hours_per_period=24,
+ ... nr_of_periods=8,
+ ... fix_storage_flows=True,
+ ... aggregate_data_and_fix_non_binary_vars=True,
+ ... )
+ >>> clustered_fs = flow_system.transform.cluster(params)
+ >>> clustered_fs.optimize(solver)
+ >>> print(clustered_fs.solution)
+
+ With model modifications:
+
+ >>> clustered_fs = flow_system.transform.cluster(params)
+ >>> clustered_fs.build_model()
+ >>> clustered_fs.model.add_constraints(...)
+ >>> clustered_fs.solve(solver)
+ """
+ import numpy as np
+
+ from .clustering import Clustering
+ from .core import DataConverter, TimeSeriesData, drop_constant_arrays
+
+ # Validation
+ dt_min = float(self._fs.hours_per_timestep.min().item())
+ dt_max = float(self._fs.hours_per_timestep.max().item())
+ if dt_min != dt_max:
+ raise ValueError(
+ f'Clustering failed due to inconsistent time step sizes: '
+ f'delta_t varies from {dt_min} to {dt_max} hours.'
+ )
+ ratio = parameters.hours_per_period / dt_max
+ if not np.isclose(ratio, round(ratio), atol=1e-9):
+ raise ValueError(
+ f'The selected hours_per_period={parameters.hours_per_period} does not match the time '
+ f'step size of {dt_max} hours. It must be an integer multiple of {dt_max} hours.'
+ )
+
+ logger.info(f'{"":#^80}')
+ logger.info(f'{" Clustering TimeSeries Data ":#^80}')
+
+ # Get dataset representation
+ ds = self._fs.to_dataset()
+ temporaly_changing_ds = drop_constant_arrays(ds, dim='time')
+
+ # Perform clustering
+ clustering = Clustering(
+ original_data=temporaly_changing_ds.to_dataframe(),
+ hours_per_time_step=float(dt_min),
+ hours_per_period=parameters.hours_per_period,
+ nr_of_periods=parameters.nr_of_periods,
+ weights=self._calculate_clustering_weights(temporaly_changing_ds),
+ time_series_for_high_peaks=parameters.labels_for_high_peaks,
+ time_series_for_low_peaks=parameters.labels_for_low_peaks,
+ )
+ clustering.cluster()
+
+ # Create new FlowSystem (with aggregated data if requested)
+ if parameters.aggregate_data_and_fix_non_binary_vars:
+ # Note: A second to_dataset() call is required here because:
+ # 1. The first 'ds' (line 124) was processed by drop_constant_arrays()
+ # 2. We need the full unprocessed dataset to apply aggregated data modifications
+ # 3. The clustering used 'temporaly_changing_ds' for input, not the full 'ds'
+ ds = self._fs.to_dataset()
+ for name, series in clustering.aggregated_data.items():
+ da = DataConverter.to_dataarray(series, self._fs.coords).rename(name).assign_attrs(ds[name].attrs)
+ if TimeSeriesData.is_timeseries_data(da):
+ da = TimeSeriesData.from_dataarray(da)
+ ds[name] = da
+
+ from .flow_system import FlowSystem
+
+ clustered_fs = FlowSystem.from_dataset(ds)
+ else:
+ # Copy without data modification
+ clustered_fs = self._fs.copy()
+
+ # Store clustering info for later use
+ clustered_fs._clustering_info = {
+ 'parameters': parameters,
+ 'clustering': clustering,
+ 'components_to_clusterize': components_to_clusterize,
+ 'original_fs': self._fs,
+ }
+
+ return clustered_fs
+
+ @staticmethod
+ def _calculate_clustering_weights(ds) -> dict[str, float]:
+ """Calculate weights for clustering based on dataset attributes."""
+ from collections import Counter
+
+ import numpy as np
+
+ groups = [da.attrs.get('clustering_group') for da in ds.data_vars.values() if 'clustering_group' in da.attrs]
+ group_counts = Counter(groups)
+
+ # Calculate weight for each group (1/count)
+ group_weights = {group: 1 / count for group, count in group_counts.items()}
+
+ weights = {}
+ for name, da in ds.data_vars.items():
+ clustering_group = da.attrs.get('clustering_group')
+ group_weight = group_weights.get(clustering_group)
+ if group_weight is not None:
+ weights[name] = group_weight
+ else:
+ weights[name] = da.attrs.get('clustering_weight', 1)
+
+ if np.all(np.isclose(list(weights.values()), 1, atol=1e-6)):
+ logger.info('All Clustering weights were set to 1')
+
+ return weights
+
+ def sel(
+ self,
+ time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None,
+ period: int | slice | list[int] | pd.Index | None = None,
+ scenario: str | slice | list[str] | pd.Index | None = None,
+ ) -> FlowSystem:
+ """
+ Select a subset of the FlowSystem by label.
+
+ Creates a new FlowSystem with data selected along the specified dimensions.
+ The returned FlowSystem has no solution (it must be re-optimized).
+
+ Args:
+ time: Time selection (e.g., slice('2023-01-01', '2023-12-31'), '2023-06-15')
+ period: Period selection (e.g., slice(2023, 2024), or list of periods)
+ scenario: Scenario selection (e.g., 'scenario1', or list of scenarios)
+
+ Returns:
+ FlowSystem: New FlowSystem with selected data (no solution).
+
+ Examples:
+ >>> # Select specific time range
+ >>> fs_jan = flow_system.transform.sel(time=slice('2023-01-01', '2023-01-31'))
+ >>> fs_jan.optimize(solver)
+
+ >>> # Select single scenario
+ >>> fs_base = flow_system.transform.sel(scenario='Base Case')
+ """
+ from .flow_system import FlowSystem
+
+ if time is None and period is None and scenario is None:
+ result = self._fs.copy()
+ result.solution = None
+ return result
+
+ if not self._fs.connected_and_transformed:
+ self._fs.connect_and_transform()
+
+ ds = self._fs.to_dataset()
+ ds = self._dataset_sel(ds, time=time, period=period, scenario=scenario)
+ return FlowSystem.from_dataset(ds) # from_dataset doesn't include solution
+
+ def isel(
+ self,
+ time: int | slice | list[int] | None = None,
+ period: int | slice | list[int] | None = None,
+ scenario: int | slice | list[int] | None = None,
+ ) -> FlowSystem:
+ """
+ Select a subset of the FlowSystem by integer indices.
+
+ Creates a new FlowSystem with data selected along the specified dimensions.
+ The returned FlowSystem has no solution (it must be re-optimized).
+
+ Args:
+ time: Time selection by integer index (e.g., slice(0, 100), 50, or [0, 5, 10])
+ period: Period selection by integer index
+ scenario: Scenario selection by integer index
+
+ Returns:
+ FlowSystem: New FlowSystem with selected data (no solution).
+
+ Examples:
+ >>> # Select first 24 timesteps
+ >>> fs_day1 = flow_system.transform.isel(time=slice(0, 24))
+ >>> fs_day1.optimize(solver)
+
+ >>> # Select first scenario
+ >>> fs_first = flow_system.transform.isel(scenario=0)
+ """
+ from .flow_system import FlowSystem
+
+ if time is None and period is None and scenario is None:
+ result = self._fs.copy()
+ result.solution = None
+ return result
+
+ if not self._fs.connected_and_transformed:
+ self._fs.connect_and_transform()
+
+ ds = self._fs.to_dataset()
+ ds = self._dataset_isel(ds, time=time, period=period, scenario=scenario)
+ return FlowSystem.from_dataset(ds) # from_dataset doesn't include solution
+
+ def resample(
+ self,
+ time: str,
+ method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean',
+ hours_of_last_timestep: int | float | None = None,
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
+ fill_gaps: Literal['ffill', 'bfill', 'interpolate'] | None = None,
+ **kwargs: Any,
+ ) -> FlowSystem:
+ """
+ Create a resampled FlowSystem by resampling data along the time dimension.
+
+ Creates a new FlowSystem with resampled time series data.
+ The returned FlowSystem has no solution (it must be re-optimized).
+
+ Args:
+ time: Resampling frequency (e.g., '3h', '2D', '1M')
+ method: Resampling method. Recommended: 'mean', 'first', 'last', 'max', 'min'
+ hours_of_last_timestep: Duration of the last timestep after resampling.
+ If None, computed from the last time interval.
+ hours_of_previous_timesteps: Duration of previous timesteps after resampling.
+ If None, computed from the first time interval. Can be a scalar or array.
+ fill_gaps: Strategy for filling gaps (NaN values) that arise when resampling
+ irregular timesteps to regular intervals. Options: 'ffill' (forward fill),
+ 'bfill' (backward fill), 'interpolate' (linear interpolation).
+ If None (default), raises an error when gaps are detected.
+ **kwargs: Additional arguments passed to xarray.resample()
+
+ Returns:
+ FlowSystem: New resampled FlowSystem (no solution).
+
+ Raises:
+ ValueError: If resampling creates gaps and fill_gaps is not specified.
+
+ Examples:
+ >>> # Resample to 4-hour intervals
+ >>> fs_4h = flow_system.transform.resample(time='4h', method='mean')
+ >>> fs_4h.optimize(solver)
+
+ >>> # Resample to daily with max values
+ >>> fs_daily = flow_system.transform.resample(time='1D', method='max')
+ """
+ from .flow_system import FlowSystem
+
+ if not self._fs.connected_and_transformed:
+ self._fs.connect_and_transform()
+
+ ds = self._fs.to_dataset()
+ ds = self._dataset_resample(
+ ds,
+ freq=time,
+ method=method,
+ hours_of_last_timestep=hours_of_last_timestep,
+ hours_of_previous_timesteps=hours_of_previous_timesteps,
+ fill_gaps=fill_gaps,
+ **kwargs,
+ )
+ return FlowSystem.from_dataset(ds) # from_dataset doesn't include solution
+
+ # --- Class methods for dataset operations (can be called without instance) ---
+
+ @classmethod
+ def _dataset_sel(
+ cls,
+ dataset: xr.Dataset,
+ time: str | slice | list[str] | pd.Timestamp | pd.DatetimeIndex | None = None,
+ period: int | slice | list[int] | pd.Index | None = None,
+ scenario: str | slice | list[str] | pd.Index | None = None,
+ hours_of_last_timestep: int | float | None = None,
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
+ ) -> xr.Dataset:
+ """
+ Select subset of dataset by label.
+
+ Args:
+ dataset: xarray Dataset from FlowSystem.to_dataset()
+ time: Time selection (e.g., '2020-01', slice('2020-01-01', '2020-06-30'))
+ period: Period selection (e.g., 2020, slice(2020, 2022))
+ scenario: Scenario selection (e.g., 'Base Case', ['Base Case', 'High Demand'])
+ hours_of_last_timestep: Duration of the last timestep.
+ hours_of_previous_timesteps: Duration of previous timesteps.
+
+ Returns:
+ xr.Dataset: Selected dataset
+ """
+ from .flow_system import FlowSystem
+
+ indexers = {}
+ if time is not None:
+ indexers['time'] = time
+ if period is not None:
+ indexers['period'] = period
+ if scenario is not None:
+ indexers['scenario'] = scenario
+
+ if not indexers:
+ return dataset
+
+ result = dataset.sel(**indexers)
+
+ if 'time' in indexers:
+ result = FlowSystem._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps)
+
+ if 'period' in indexers:
+ result = FlowSystem._update_period_metadata(result)
+
+ if 'scenario' in indexers:
+ result = FlowSystem._update_scenario_metadata(result)
+
+ return result
+
+ @classmethod
+ def _dataset_isel(
+ cls,
+ dataset: xr.Dataset,
+ time: int | slice | list[int] | None = None,
+ period: int | slice | list[int] | None = None,
+ scenario: int | slice | list[int] | None = None,
+ hours_of_last_timestep: int | float | None = None,
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
+ ) -> xr.Dataset:
+ """
+ Select subset of dataset by integer index.
+
+ Args:
+ dataset: xarray Dataset from FlowSystem.to_dataset()
+ time: Time selection by index
+ period: Period selection by index
+ scenario: Scenario selection by index
+ hours_of_last_timestep: Duration of the last timestep.
+ hours_of_previous_timesteps: Duration of previous timesteps.
+
+ Returns:
+ xr.Dataset: Selected dataset
+ """
+ from .flow_system import FlowSystem
+
+ indexers = {}
+ if time is not None:
+ indexers['time'] = time
+ if period is not None:
+ indexers['period'] = period
+ if scenario is not None:
+ indexers['scenario'] = scenario
+
+ if not indexers:
+ return dataset
+
+ result = dataset.isel(**indexers)
+
+ if 'time' in indexers:
+ result = FlowSystem._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps)
+
+ if 'period' in indexers:
+ result = FlowSystem._update_period_metadata(result)
+
+ if 'scenario' in indexers:
+ result = FlowSystem._update_scenario_metadata(result)
+
+ return result
+
+ @classmethod
+ def _dataset_resample(
+ cls,
+ dataset: xr.Dataset,
+ freq: str,
+ method: Literal['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count'] = 'mean',
+ hours_of_last_timestep: int | float | None = None,
+ hours_of_previous_timesteps: int | float | np.ndarray | None = None,
+ fill_gaps: Literal['ffill', 'bfill', 'interpolate'] | None = None,
+ **kwargs: Any,
+ ) -> xr.Dataset:
+ """
+ Resample dataset along time dimension.
+
+ Args:
+ dataset: xarray Dataset from FlowSystem.to_dataset()
+ freq: Resampling frequency (e.g., '2h', '1D', '1M')
+ method: Resampling method (e.g., 'mean', 'sum', 'first')
+ hours_of_last_timestep: Duration of the last timestep after resampling.
+ hours_of_previous_timesteps: Duration of previous timesteps after resampling.
+ fill_gaps: Strategy for filling gaps (NaN values) that arise when resampling
+ irregular timesteps to regular intervals. Options: 'ffill' (forward fill),
+ 'bfill' (backward fill), 'interpolate' (linear interpolation).
+ If None (default), raises an error when gaps are detected.
+ **kwargs: Additional arguments passed to xarray.resample()
+
+ Returns:
+ xr.Dataset: Resampled dataset
+
+ Raises:
+ ValueError: If resampling creates gaps and fill_gaps is not specified.
+ """
+ from .flow_system import FlowSystem
+
+ available_methods = ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median', 'count']
+ if method not in available_methods:
+ raise ValueError(f'Unsupported resampling method: {method}. Available: {available_methods}')
+
+ original_attrs = dict(dataset.attrs)
+
+ time_var_names = [v for v in dataset.data_vars if 'time' in dataset[v].dims]
+ non_time_var_names = [v for v in dataset.data_vars if v not in time_var_names]
+
+ time_dataset = dataset[time_var_names]
+ resampled_time_dataset = cls._resample_by_dimension_groups(time_dataset, freq, method, **kwargs)
+
+ # Handle NaN values that may arise from resampling irregular timesteps to regular intervals.
+ # When irregular data (e.g., [00:00, 01:00, 03:00]) is resampled to regular intervals (e.g., '1h'),
+ # bins without data (e.g., 02:00) get NaN.
+ if resampled_time_dataset.isnull().any().to_array().any():
+ if fill_gaps is None:
+ # Find which variables have NaN values for a helpful error message
+ vars_with_nans = [
+ name for name in resampled_time_dataset.data_vars if resampled_time_dataset[name].isnull().any()
+ ]
+ raise ValueError(
+ f'Resampling created gaps (NaN values) in variables: {vars_with_nans}. '
+ f'This typically happens when resampling irregular timesteps to regular intervals. '
+ f"Specify fill_gaps='ffill', 'bfill', or 'interpolate' to handle gaps, "
+ f'or resample to a coarser frequency.'
+ )
+ elif fill_gaps == 'ffill':
+ resampled_time_dataset = resampled_time_dataset.ffill(dim='time').bfill(dim='time')
+ elif fill_gaps == 'bfill':
+ resampled_time_dataset = resampled_time_dataset.bfill(dim='time').ffill(dim='time')
+ elif fill_gaps == 'interpolate':
+ resampled_time_dataset = resampled_time_dataset.interpolate_na(dim='time', method='linear')
+ # Handle edges that can't be interpolated
+ resampled_time_dataset = resampled_time_dataset.ffill(dim='time').bfill(dim='time')
+
+ if non_time_var_names:
+ non_time_dataset = dataset[non_time_var_names]
+ result = xr.merge([resampled_time_dataset, non_time_dataset])
+ else:
+ result = resampled_time_dataset
+
+ result.attrs.update(original_attrs)
+ return FlowSystem._update_time_metadata(result, hours_of_last_timestep, hours_of_previous_timesteps)
+
+ @staticmethod
+ def _resample_by_dimension_groups(
+ time_dataset: xr.Dataset,
+ time: str,
+ method: str,
+ **kwargs: Any,
+ ) -> xr.Dataset:
+ """
+ Resample variables grouped by their dimension structure to avoid broadcasting.
+
+ Groups variables by their non-time dimensions before resampling for performance
+ and to prevent xarray from broadcasting variables with different dimensions.
+
+ Args:
+ time_dataset: Dataset containing only variables with time dimension
+ time: Resampling frequency (e.g., '2h', '1D', '1M')
+ method: Resampling method name (e.g., 'mean', 'sum', 'first')
+ **kwargs: Additional arguments passed to xarray.resample()
+
+ Returns:
+ Resampled dataset with original dimension structure preserved
+ """
+ dim_groups = defaultdict(list)
+ for var_name, var in time_dataset.data_vars.items():
+ dims_key = tuple(sorted(d for d in var.dims if d != 'time'))
+ dim_groups[dims_key].append(var_name)
+
+ # Note: defaultdict is always truthy, so we check length explicitly
+ if len(dim_groups) == 0:
+ return getattr(time_dataset.resample(time=time, **kwargs), method)()
+
+ resampled_groups = []
+ for var_names in dim_groups.values():
+ if not var_names:
+ continue
+
+ stacked = xr.concat(
+ [time_dataset[name] for name in var_names],
+ dim=pd.Index(var_names, name='variable'),
+ combine_attrs='drop_conflicts',
+ )
+ resampled = getattr(stacked.resample(time=time, **kwargs), method)()
+ resampled_dataset = resampled.to_dataset(dim='variable')
+ resampled_groups.append(resampled_dataset)
+
+ if not resampled_groups:
+ # No data variables to resample, but still resample coordinates
+ return getattr(time_dataset.resample(time=time, **kwargs), method)()
+
+ if len(resampled_groups) == 1:
+ return resampled_groups[0]
+
+ return xr.merge(resampled_groups, combine_attrs='drop_conflicts')
+
+ def fix_sizes(
+ self,
+ sizes: xr.Dataset | dict[str, float] | None = None,
+ decimal_rounding: int | None = 5,
+ ) -> FlowSystem:
+ """
+ Create a new FlowSystem with investment sizes fixed to specified values.
+
+ This is useful for two-stage optimization workflows:
+ 1. Solve a sizing problem (possibly resampled for speed)
+ 2. Fix sizes and solve dispatch at full resolution
+
+ The returned FlowSystem has InvestParameters with fixed_size set,
+ making those sizes mandatory rather than decision variables.
+
+ Args:
+ sizes: The sizes to fix. Can be:
+ - None: Uses sizes from this FlowSystem's solution (must be solved)
+ - xr.Dataset: Dataset with size variables (e.g., from statistics.sizes)
+ - dict: Mapping of component names to sizes (e.g., {'Boiler(Q_fu)': 100})
+ decimal_rounding: Number of decimal places to round sizes to.
+ Rounding helps avoid numerical infeasibility. Set to None to disable.
+
+ Returns:
+ FlowSystem: New FlowSystem with fixed sizes (no solution).
+
+ Raises:
+ ValueError: If no sizes provided and FlowSystem has no solution.
+ KeyError: If a specified size doesn't match any InvestParameters.
+
+ Examples:
+ Two-stage optimization:
+
+ >>> # Stage 1: Size with resampled data
+ >>> fs_sizing = flow_system.transform.resample('2h')
+ >>> fs_sizing.optimize(solver)
+ >>>
+ >>> # Stage 2: Fix sizes and optimize at full resolution
+ >>> fs_dispatch = flow_system.transform.fix_sizes(fs_sizing.statistics.sizes)
+ >>> fs_dispatch.optimize(solver)
+
+ Using a dict:
+
+ >>> fs_fixed = flow_system.transform.fix_sizes(
+ ... {
+ ... 'Boiler(Q_fu)': 100,
+ ... 'Storage': 500,
+ ... }
+ ... )
+ >>> fs_fixed.optimize(solver)
+ """
+ from .flow_system import FlowSystem
+ from .interface import InvestParameters
+
+ # Get sizes from solution if not provided
+ if sizes is None:
+ if self._fs.solution is None:
+ raise ValueError(
+ 'No sizes provided and FlowSystem has no solution. '
+ 'Either provide sizes or optimize the FlowSystem first.'
+ )
+ sizes = self._fs.statistics.sizes
+
+ # Convert dict to Dataset format
+ if isinstance(sizes, dict):
+ sizes = xr.Dataset({k: xr.DataArray(v) for k, v in sizes.items()})
+
+ # Apply rounding
+ if decimal_rounding is not None:
+ sizes = sizes.round(decimal_rounding)
+
+ # Create copy of FlowSystem
+ if not self._fs.connected_and_transformed:
+ self._fs.connect_and_transform()
+
+ ds = self._fs.to_dataset()
+ new_fs = FlowSystem.from_dataset(ds)
+
+ # Fix sizes in the new FlowSystem's InvestParameters
+ # Note: statistics.sizes returns keys without '|size' suffix (e.g., 'Boiler(Q_fu)')
+ # but dicts may have either format
+ for size_var in sizes.data_vars:
+ # Normalize: strip '|size' suffix if present
+ base_name = size_var.replace('|size', '') if size_var.endswith('|size') else size_var
+ fixed_value = float(sizes[size_var].item())
+
+ # Find matching element with InvestParameters
+ found = False
+
+ # Check flows
+ for flow in new_fs.flows.values():
+ if flow.label_full == base_name and isinstance(flow.size, InvestParameters):
+ flow.size.fixed_size = fixed_value
+ flow.size.mandatory = True
+ found = True
+ logger.debug(f'Fixed size of {base_name} to {fixed_value}')
+ break
+
+ # Check storage capacity
+ if not found:
+ for component in new_fs.components.values():
+ if hasattr(component, 'capacity_in_flow_hours'):
+ if component.label == base_name and isinstance(
+ component.capacity_in_flow_hours, InvestParameters
+ ):
+ component.capacity_in_flow_hours.fixed_size = fixed_value
+ component.capacity_in_flow_hours.mandatory = True
+ found = True
+ logger.debug(f'Fixed size of {base_name} to {fixed_value}')
+ break
+
+ if not found:
+ logger.warning(
+ f'Size variable "{base_name}" not found as InvestParameters in FlowSystem. '
+ f'It may be a fixed-size component or the name may not match.'
+ )
+
+ return new_fs
+
+ # Future methods can be added here:
+ #
+ # def mga(self, alternatives: int = 5) -> FlowSystem:
+ # """Create a FlowSystem configured for Modeling to Generate Alternatives."""
+ # ...
diff --git a/mkdocs.yml b/mkdocs.yml
index 8fb6765ae..186e109fd 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -23,9 +23,12 @@ nav:
- User Guide:
- Overview: user-guide/index.md
- Core Concepts: user-guide/core-concepts.md
- - Building Models: user-guide/building-models/index.md
+ - Building Models:
+ - Overview: user-guide/building-models/index.md
+ - Choosing Components: user-guide/building-models/choosing-components.md
- Running Optimizations: user-guide/optimization/index.md
- Analyzing Results: user-guide/results/index.md
+ - Plotting Results: user-guide/results-plotting.md
- Mathematical Notation:
- Overview: user-guide/mathematical-notation/index.md
- Bus: user-guide/mathematical-notation/elements/Bus.md
@@ -36,27 +39,39 @@ nav:
- Investment: user-guide/mathematical-notation/features/InvestParameters.md
- Status: user-guide/mathematical-notation/features/StatusParameters.md
- Piecewise: user-guide/mathematical-notation/features/Piecewise.md
- - Recipes: user-guide/recipes/index.md
+ - Recipes:
+ - user-guide/recipes/index.md
+ - Plotting Custom Data: user-guide/recipes/plotting-custom-data.md
- Support:
- FAQ: user-guide/faq.md
- Troubleshooting: user-guide/troubleshooting.md
- Community: user-guide/support.md
- Migration & Updates:
+ - Migration Guide v5: user-guide/migration-guide-v5.md
- Migration Guide v3: user-guide/migration-guide-v3.md
- Release Notes: changelog.md
- Roadmap: roadmap.md
- Examples:
- - Overview: examples/index.md
- - Basic Examples:
- - examples/00-Minimal Example.md
- - examples/01-Basic Example.md
- - Operational Optimization:
- - examples/02-Complex Example.md
- - examples/03-Optimization Modes.md
- - Planning & Investment:
- - examples/04-Scenarios.md
- - examples/05-Two-stage-optimization.md
+ - Overview: notebooks/index.md
+ - Basics:
+ - Quickstart: notebooks/01-quickstart.ipynb
+ - Heat System: notebooks/02-heat-system.ipynb
+ - Investment:
+ - Sizing: notebooks/03-investment-optimization.ipynb
+ - Constraints: notebooks/04-operational-constraints.ipynb
+ - Advanced:
+ - Multi-Carrier: notebooks/05-multi-carrier-system.ipynb
+ - Transmission: notebooks/10-transmission.ipynb
+ - Non-Linear Modeling:
+ - Time-Varying Parameters: notebooks/06a-time-varying-parameters.ipynb
+ - Piecewise Conversion: notebooks/06b-piecewise-conversion.ipynb
+ - Piecewise Effects: notebooks/06c-piecewise-effects.ipynb
+ - Scaling:
+ - Scenarios: notebooks/07-scenarios-and-periods.ipynb
+ - Large-Scale: notebooks/08-large-scale-optimization.ipynb
+ - Results:
+ - Plotting: notebooks/09-plotting-and-data-access.ipynb
- API Reference: api-reference/
@@ -65,6 +80,7 @@ nav:
theme:
name: material
language: en
+ custom_dir: docs/overrides
palette:
# Palette toggle for automatic mode
@@ -211,6 +227,12 @@ plugins:
- search:
separator: '[\s\u200b\-_,:!=\[\]()"`/]+|\.(?!\d)|&[lg]t;|(?!\b)(?=[A-Z][a-z])'
+ - mkdocs-jupyter:
+ execute: true # Execute notebooks during build
+ allow_errors: false
+ include_source: true
+ include_requirejs: true
+
- plotly
- table-reader
diff --git a/pyproject.toml b/pyproject.toml
index 206283767..88691e468 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -105,12 +105,14 @@ docs = [
"mkdocs-include-markdown-plugin==7.2.0",
"mkdocs-literate-nav==0.6.2",
"mkdocs-plotly-plugin==0.1.3",
+ "mkdocs-jupyter==0.25.1",
"markdown-include==0.8.1",
"pymdown-extensions==10.16.1",
"pygments==2.19.2",
"mike==2.1.3",
"mkdocs-git-revision-date-localized-plugin==1.5.0",
"mkdocs-minify-plugin==0.8.0",
+ "notebook>=7.5.0",
]
[project.urls]
@@ -187,6 +189,7 @@ keep-runtime-typing = false # Allow pyupgrade to drop runtime typing; prefer po
markers = [
"slow: marks tests as slow",
"examples: marks example tests (run only on releases)",
+ "deprecated_api: marks tests using deprecated Optimization/Results API (remove in v6.0.0)",
]
addopts = '-m "not examples"' # Skip examples by default
@@ -197,6 +200,14 @@ filterwarnings = [
# === Default behavior: show all warnings ===
"default",
+ # === Ignore specific deprecation warnings for backward compatibility tests ===
+ # These are raised by deprecated classes (Optimization, Results) used in tests/deprecated/
+ "ignore:Results is deprecated:DeprecationWarning:flixopt",
+ "ignore:Optimization is deprecated:DeprecationWarning:flixopt",
+ "ignore:SegmentedOptimization is deprecated:DeprecationWarning:flixopt",
+ "ignore:SegmentedResults is deprecated:DeprecationWarning:flixopt",
+ "ignore:ClusteredOptimization is deprecated:DeprecationWarning:flixopt",
+
# === Treat flixopt warnings as errors (strict mode for our code) ===
# This ensures we catch deprecations, future changes, and user warnings in our own code
"error::DeprecationWarning:flixopt",
diff --git a/scripts/format_changelog.py b/scripts/format_changelog.py
index 48feddc09..891b10c77 100644
--- a/scripts/format_changelog.py
+++ b/scripts/format_changelog.py
@@ -46,16 +46,21 @@ def format_version_header(match) -> str:
# Format the date
formatted_date = format_date(date_str)
+ # Normalise to a Git tag-style version (e.g. "v4.0.0") for URL, display text and id
+ version_tag = version if version.startswith('v') else f'v{version}'
+
# Create the new header
- github_url = f'https://github.com/flixOpt/flixopt/releases/tag/v{version}'
- new_header = f'## [**{version}**]({github_url}) {formatted_date} {{ id="{version}" }}'
+ github_url = f'https://github.com/flixOpt/flixopt/releases/tag/{version_tag}'
+ new_header = f'## [**{version_tag}**]({github_url}) {formatted_date} {{ id="{version_tag}" }}'
return new_header
def main():
"""Process the changelog file."""
- changelog_path = Path('docs/changelog.md')
+ script_dir = Path(__file__).resolve().parent
+ repo_root = script_dir.parent # assumes this file lives in /scripts/
+ changelog_path = repo_root / 'docs' / 'changelog.md'
if not changelog_path.exists():
print(f'❌ {changelog_path} not found')
diff --git a/tests/conftest.py b/tests/conftest.py
index 11d35f536..ee2c0f4e2 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -5,6 +5,7 @@
"""
import os
+import warnings
from collections.abc import Iterable
import linopy.testing
@@ -212,10 +213,10 @@ def piecewise():
"""Piecewise converter from flow_system_piecewise_conversion"""
return fx.LinearConverter(
'KWK',
- inputs=[fx.Flow('Q_fu', bus='Gas')],
+ inputs=[fx.Flow('Q_fu', bus='Gas', size=200)],
outputs=[
fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Q_th', bus='Fernwärme'),
+ fx.Flow('Q_th', bus='Fernwärme', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -232,10 +233,10 @@ def segments(timesteps_length):
"""Segments converter with time-varying piecewise conversion"""
return fx.LinearConverter(
'KWK',
- inputs=[fx.Flow('Q_fu', bus='Gas')],
+ inputs=[fx.Flow('Q_fu', bus='Gas', size=200)],
outputs=[
fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Q_th', bus='Fernwärme'),
+ fx.Flow('Q_th', bus='Fernwärme', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -265,7 +266,11 @@ def simple(timesteps_length=9):
return fx.Storage(
'Speicher',
- charging=fx.Flow('Q_th_load', bus='Fernwärme', size=1e4),
+ charging=fx.Flow(
+ 'Q_th_load',
+ bus='Fernwärme',
+ size=fx.InvestParameters(fixed_size=1e4, mandatory=True), # Investment for testing sizes
+ ),
discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4),
capacity_in_flow_hours=fx.InvestParameters(effects_of_investment=20, fixed_size=30, mandatory=True),
initial_charge_state=0,
@@ -609,12 +614,12 @@ def flow_system_long():
),
fx.linear_converters.CHP(
'BHKW2',
- thermal_efficiency=0.58,
- electrical_efficiency=0.22,
+ thermal_efficiency=(eta_th := 0.58),
+ electrical_efficiency=(eta_el := 0.22),
status_parameters=fx.StatusParameters(effects_per_startup=24000),
- electrical_flow=fx.Flow('P_el', bus='Strom'),
- thermal_flow=fx.Flow('Q_th', bus='Fernwärme'),
- fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=288, relative_minimum=87 / 288),
+ fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=(fuel_size := 288), relative_minimum=87 / fuel_size),
+ electrical_flow=fx.Flow('P_el', bus='Strom', size=fuel_size * eta_el),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=fuel_size * eta_th),
),
fx.Storage(
'Speicher',
@@ -692,7 +697,10 @@ def assert_almost_equal_numeric(
actual, desired, err_msg, relative_error_range_in_percent=0.011, absolute_tolerance=1e-7
):
"""
- Custom assertion function for comparing numeric values with relative and absolute tolerances
+ Custom assertion function for comparing numeric values with relative and absolute tolerances.
+
+ Handles the extra timestep in solutions by trimming actual arrays to match desired length
+ when the extra values are NaN (from storage charge_state variables using extra_timestep).
"""
relative_tol = relative_error_range_in_percent / 100
@@ -700,6 +708,20 @@ def assert_almost_equal_numeric(
delta = abs(relative_tol * desired) if desired != 0 else absolute_tolerance
assert np.isclose(actual, desired, atol=delta), err_msg
else:
+ actual = np.asarray(actual)
+ desired = np.asarray(desired)
+ # Handle extra timestep: trim actual to desired length if extra values are NaN
+ if actual.shape != desired.shape and actual.ndim == 1 and desired.ndim == 1:
+ if len(actual) > len(desired):
+ extra = actual[len(desired) :]
+ if np.all(np.isnan(extra)):
+ # Warn if trimming more than the expected single extra timestep
+ if len(extra) > 1:
+ warnings.warn(
+ f'Trimming {len(extra)} NaN values from actual array (expected 1)',
+ stacklevel=2,
+ )
+ actual = actual[: len(desired)]
np.testing.assert_allclose(actual, desired, rtol=relative_tol, atol=absolute_tolerance, err_msg=err_msg)
@@ -724,11 +746,10 @@ def create_linopy_model(flow_system: fx.FlowSystem) -> FlowSystemModel:
flow_system: The FlowSystem to build the model from.
Returns:
- FlowSystemModel: The built model from Optimization.do_modeling().
+ FlowSystemModel: The built model from FlowSystem.build_model().
"""
- optimization = fx.Optimization('GenericName', flow_system)
- optimization.do_modeling()
- return optimization.model
+ flow_system.build_model()
+ return flow_system.model
def assert_conequal(actual: linopy.Constraint, desired: linopy.Constraint):
diff --git a/tests/deprecated/__init__.py b/tests/deprecated/__init__.py
new file mode 100644
index 000000000..7a05453a2
--- /dev/null
+++ b/tests/deprecated/__init__.py
@@ -0,0 +1,5 @@
+"""Tests for deprecated Optimization/Results API.
+
+This folder contains tests for the deprecated API that will be removed in v6.0.0.
+Delete this entire folder when the deprecation cycle ends.
+"""
diff --git a/tests/deprecated/conftest.py b/tests/deprecated/conftest.py
new file mode 100644
index 000000000..65434f04c
--- /dev/null
+++ b/tests/deprecated/conftest.py
@@ -0,0 +1,890 @@
+"""
+The conftest.py file is used by pytest to define shared fixtures, hooks, and configuration
+that apply to multiple test files without needing explicit imports.
+It helps avoid redundancy and centralizes reusable test logic.
+
+This folder contains tests for the deprecated Optimization/Results API.
+Delete this entire folder when the deprecation cycle ends in v6.0.0.
+"""
+
+import os
+import warnings
+from collections.abc import Iterable
+
+import linopy.testing
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+import flixopt as fx
+from flixopt.structure import FlowSystemModel
+
+# ============================================================================
+# SOLVER FIXTURES
+# ============================================================================
+
+
+@pytest.fixture()
+def highs_solver():
+ return fx.solvers.HighsSolver(mip_gap=0, time_limit_seconds=300)
+
+
+@pytest.fixture()
+def gurobi_solver():
+ pytest.importorskip('gurobipy', reason='Gurobi not available in this environment')
+ return fx.solvers.GurobiSolver(mip_gap=0, time_limit_seconds=300)
+
+
+@pytest.fixture(params=[highs_solver, gurobi_solver], ids=['highs', 'gurobi'])
+def solver_fixture(request):
+ return request.getfixturevalue(request.param.__name__)
+
+
+# =================================
+# COORDINATE CONFIGURATION FIXTURES
+# =================================
+
+
+@pytest.fixture(
+ params=[
+ {
+ 'timesteps': pd.date_range('2020-01-01', periods=10, freq='h', name='time'),
+ 'periods': None,
+ 'scenarios': None,
+ },
+ {
+ 'timesteps': pd.date_range('2020-01-01', periods=10, freq='h', name='time'),
+ 'periods': None,
+ 'scenarios': pd.Index(['A', 'B'], name='scenario'),
+ },
+ {
+ 'timesteps': pd.date_range('2020-01-01', periods=10, freq='h', name='time'),
+ 'periods': pd.Index([2020, 2030, 2040], name='period'),
+ 'scenarios': None,
+ },
+ {
+ 'timesteps': pd.date_range('2020-01-01', periods=10, freq='h', name='time'),
+ 'periods': pd.Index([2020, 2030, 2040], name='period'),
+ 'scenarios': pd.Index(['A', 'B'], name='scenario'),
+ },
+ ],
+ ids=['time_only', 'time+scenarios', 'time+periods', 'time+periods+scenarios'],
+)
+def coords_config(request):
+ """Coordinate configurations for parametrized testing."""
+ return request.param
+
+
+# ============================================================================
+# HIERARCHICAL ELEMENT LIBRARY
+# ============================================================================
+
+
+class Buses:
+ """Standard buses used across flow systems"""
+
+ @staticmethod
+ def electricity():
+ return fx.Bus('Strom')
+
+ @staticmethod
+ def heat():
+ return fx.Bus('Fernwärme')
+
+ @staticmethod
+ def gas():
+ return fx.Bus('Gas')
+
+ @staticmethod
+ def coal():
+ return fx.Bus('Kohle')
+
+ @staticmethod
+ def defaults():
+ """Get all standard buses at once"""
+ return [Buses.electricity(), Buses.heat(), Buses.gas()]
+
+
+class Effects:
+ """Standard effects used across flow systems"""
+
+ @staticmethod
+ def costs():
+ return fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True)
+
+ @staticmethod
+ def costs_with_co2_share():
+ return fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True, share_from_temporal={'CO2': 0.2})
+
+ @staticmethod
+ def co2():
+ return fx.Effect('CO2', 'kg', 'CO2_e-Emissionen')
+
+ @staticmethod
+ def primary_energy():
+ return fx.Effect('PE', 'kWh_PE', 'Primärenergie')
+
+
+class Converters:
+ """Energy conversion components"""
+
+ class Boilers:
+ @staticmethod
+ def simple():
+ """Simple boiler from simple_flow_system"""
+ return fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=50,
+ relative_minimum=5 / 50,
+ relative_maximum=1,
+ status_parameters=fx.StatusParameters(),
+ ),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ @staticmethod
+ def complex():
+ """Complex boiler with investment parameters from flow_system_complex"""
+ return fx.linear_converters.Boiler(
+ 'Kessel',
+ thermal_efficiency=0.5,
+ status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ load_factor_max=1.0,
+ load_factor_min=0.1,
+ relative_minimum=5 / 50,
+ relative_maximum=1,
+ previous_flow_rate=50,
+ size=fx.InvestParameters(
+ effects_of_investment=1000,
+ fixed_size=50,
+ mandatory=True,
+ effects_of_investment_per_size={'costs': 10, 'PE': 2},
+ ),
+ status_parameters=fx.StatusParameters(
+ active_hours_min=0,
+ active_hours_max=1000,
+ max_uptime=10,
+ min_uptime=1,
+ max_downtime=10,
+ effects_per_startup=0.01,
+ startup_limit=1000,
+ ),
+ flow_hours_max=1e6,
+ ),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas', size=200, relative_minimum=0, relative_maximum=1),
+ )
+
+ class CHPs:
+ @staticmethod
+ def simple():
+ """Simple CHP from simple_flow_system"""
+ return fx.linear_converters.CHP(
+ 'CHP_unit',
+ thermal_efficiency=0.5,
+ electrical_efficiency=0.4,
+ electrical_flow=fx.Flow(
+ 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters()
+ ),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme'),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ @staticmethod
+ def base():
+ """CHP from flow_system_base"""
+ return fx.linear_converters.CHP(
+ 'KWK',
+ thermal_efficiency=0.5,
+ electrical_efficiency=0.4,
+ status_parameters=fx.StatusParameters(effects_per_startup=0.01),
+ electrical_flow=fx.Flow('P_el', bus='Strom', size=60, relative_minimum=5 / 60, previous_flow_rate=10),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=1e3),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas', size=1e3),
+ )
+
+ class LinearConverters:
+ @staticmethod
+ def piecewise():
+ """Piecewise converter from flow_system_piecewise_conversion"""
+ return fx.LinearConverter(
+ 'KWK',
+ inputs=[fx.Flow('Q_fu', bus='Gas', size=200)],
+ outputs=[
+ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow('Q_th', bus='Fernwärme', size=100),
+ ],
+ piecewise_conversion=fx.PiecewiseConversion(
+ {
+ 'P_el': fx.Piecewise([fx.Piece(5, 30), fx.Piece(40, 60)]),
+ 'Q_th': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
+ 'Q_fu': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
+ }
+ ),
+ status_parameters=fx.StatusParameters(effects_per_startup=0.01),
+ )
+
+ @staticmethod
+ def segments(timesteps_length):
+ """Segments converter with time-varying piecewise conversion"""
+ return fx.LinearConverter(
+ 'KWK',
+ inputs=[fx.Flow('Q_fu', bus='Gas', size=200)],
+ outputs=[
+ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow('Q_th', bus='Fernwärme', size=100),
+ ],
+ piecewise_conversion=fx.PiecewiseConversion(
+ {
+ 'P_el': fx.Piecewise(
+ [
+ fx.Piece(np.linspace(5, 6, timesteps_length), 30),
+ fx.Piece(40, np.linspace(60, 70, timesteps_length)),
+ ]
+ ),
+ 'Q_th': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
+ 'Q_fu': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
+ }
+ ),
+ status_parameters=fx.StatusParameters(effects_per_startup=0.01),
+ )
+
+
+class Storage:
+ """Energy storage components"""
+
+ @staticmethod
+ def simple(timesteps_length=9):
+ """Simple storage from simple_flow_system"""
+ # Create pattern [80.0, 70.0, 80.0] and repeat/slice to match timesteps_length
+ pattern = [80.0, 70.0, 80.0, 80, 80, 80, 80, 80, 80]
+ charge_state_values = (pattern * ((timesteps_length // len(pattern)) + 1))[:timesteps_length]
+
+ return fx.Storage(
+ 'Speicher',
+ charging=fx.Flow('Q_th_load', bus='Fernwärme', size=1e4),
+ discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4),
+ capacity_in_flow_hours=fx.InvestParameters(effects_of_investment=20, fixed_size=30, mandatory=True),
+ initial_charge_state=0,
+ relative_maximum_charge_state=1 / 100 * np.array(charge_state_values),
+ relative_maximum_final_charge_state=0.8,
+ eta_charge=0.9,
+ eta_discharge=1,
+ relative_loss_per_hour=0.08,
+ prevent_simultaneous_charge_and_discharge=True,
+ )
+
+ @staticmethod
+ def complex():
+ """Complex storage with piecewise investment from flow_system_complex"""
+ invest_speicher = fx.InvestParameters(
+ effects_of_investment=0,
+ piecewise_effects_of_investment=fx.PiecewiseEffects(
+ piecewise_origin=fx.Piecewise([fx.Piece(5, 25), fx.Piece(25, 100)]),
+ piecewise_shares={
+ 'costs': fx.Piecewise([fx.Piece(50, 250), fx.Piece(250, 800)]),
+ 'PE': fx.Piecewise([fx.Piece(5, 25), fx.Piece(25, 100)]),
+ },
+ ),
+ mandatory=True,
+ effects_of_investment_per_size={'costs': 0.01, 'CO2': 0.01},
+ minimum_size=0,
+ maximum_size=1000,
+ )
+ return fx.Storage(
+ 'Speicher',
+ charging=fx.Flow('Q_th_load', bus='Fernwärme', size=1e4),
+ discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4),
+ capacity_in_flow_hours=invest_speicher,
+ initial_charge_state=0,
+ maximal_final_charge_state=10,
+ eta_charge=0.9,
+ eta_discharge=1,
+ relative_loss_per_hour=0.08,
+ prevent_simultaneous_charge_and_discharge=True,
+ )
+
+
+class LoadProfiles:
+ """Standard load and price profiles"""
+
+ @staticmethod
+ def thermal_simple(timesteps_length=9):
+ # Create pattern and repeat/slice to match timesteps_length
+ pattern = [30.0, 0.0, 90.0, 110, 110, 20, 20, 20, 20]
+ values = (pattern * ((timesteps_length // len(pattern)) + 1))[:timesteps_length]
+ return np.array(values)
+
+ @staticmethod
+ def thermal_complex():
+ return np.array([30, 0, 90, 110, 110, 20, 20, 20, 20])
+
+ @staticmethod
+ def electrical_simple(timesteps_length=9):
+ # Create array of 80.0 repeated to match timesteps_length
+ return np.array([80.0 / 1000] * timesteps_length)
+
+ @staticmethod
+ def electrical_scenario():
+ return np.array([0.08, 0.1, 0.15])
+
+ @staticmethod
+ def electrical_complex(timesteps_length=9):
+ # Create array of 40 repeated to match timesteps_length
+ return np.array([40] * timesteps_length)
+
+ @staticmethod
+ def random_thermal(length=10, seed=42):
+ rng = np.random.default_rng(seed)
+ return rng.random(length) * 180
+
+ @staticmethod
+ def random_electrical(length=10, seed=42):
+ rng = np.random.default_rng(seed)
+ return (rng.random(length) + 0.5) / 1.5 * 50
+
+
+class Sinks:
+ """Energy sinks (loads)"""
+
+ @staticmethod
+ def heat_load(thermal_profile):
+ """Create thermal heat load sink"""
+ return fx.Sink(
+ 'Wärmelast', inputs=[fx.Flow('Q_th_Last', bus='Fernwärme', size=1, fixed_relative_profile=thermal_profile)]
+ )
+
+ @staticmethod
+ def electricity_feed_in(electrical_price_profile):
+ """Create electricity feed-in sink"""
+ return fx.Sink(
+ 'Einspeisung', inputs=[fx.Flow('P_el', bus='Strom', effects_per_flow_hour=-1 * electrical_price_profile)]
+ )
+
+ @staticmethod
+ def electricity_load(electrical_profile):
+ """Create electrical load sink (for flow_system_long)"""
+ return fx.Sink(
+ 'Stromlast', inputs=[fx.Flow('P_el_Last', bus='Strom', size=1, fixed_relative_profile=electrical_profile)]
+ )
+
+
+class Sources:
+ """Energy sources"""
+
+ @staticmethod
+ def gas_with_costs_and_co2():
+ """Standard gas tariff with CO2 emissions"""
+ source = Sources.gas_with_costs()
+ source.outputs[0].effects_per_flow_hour = {'costs': 0.04, 'CO2': 0.3}
+ return source
+
+ @staticmethod
+ def gas_with_costs():
+ """Simple gas tariff without CO2"""
+ return fx.Source(
+ 'Gastarif', outputs=[fx.Flow(label='Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': 0.04})]
+ )
+
+
+# ============================================================================
+# RECREATED FIXTURES USING HIERARCHICAL LIBRARY
+# ============================================================================
+
+
+@pytest.fixture
+def simple_flow_system() -> fx.FlowSystem:
+ """
+ Create a simple energy system for testing
+ """
+ base_timesteps = pd.date_range('2020-01-01', periods=9, freq='h', name='time')
+ timesteps_length = len(base_timesteps)
+ base_thermal_load = LoadProfiles.thermal_simple(timesteps_length)
+ base_electrical_price = LoadProfiles.electrical_simple(timesteps_length)
+
+ # Define effects
+ costs = Effects.costs_with_co2_share()
+ co2 = Effects.co2()
+ co2.maximum_per_hour = 1000
+
+ # Create components
+ boiler = Converters.Boilers.simple()
+ chp = Converters.CHPs.simple()
+ storage = Storage.simple(timesteps_length)
+ heat_load = Sinks.heat_load(base_thermal_load)
+ gas_tariff = Sources.gas_with_costs_and_co2()
+ electricity_feed_in = Sinks.electricity_feed_in(base_electrical_price)
+
+ # Create flow system
+ flow_system = fx.FlowSystem(base_timesteps)
+ flow_system.add_elements(*Buses.defaults())
+ flow_system.add_elements(storage, costs, co2, boiler, heat_load, gas_tariff, electricity_feed_in, chp)
+
+ return flow_system
+
+
+@pytest.fixture
+def simple_flow_system_scenarios() -> fx.FlowSystem:
+ """
+ Create a simple energy system for testing
+ """
+ base_timesteps = pd.date_range('2020-01-01', periods=9, freq='h', name='time')
+ timesteps_length = len(base_timesteps)
+ base_thermal_load = LoadProfiles.thermal_simple(timesteps_length)
+ base_electrical_price = LoadProfiles.electrical_scenario()
+
+ # Define effects
+ costs = Effects.costs_with_co2_share()
+ co2 = Effects.co2()
+ co2.maximum_per_hour = 1000
+
+ # Create components
+ boiler = Converters.Boilers.simple()
+ chp = Converters.CHPs.simple()
+ storage = Storage.simple(timesteps_length)
+ heat_load = Sinks.heat_load(base_thermal_load)
+ gas_tariff = Sources.gas_with_costs_and_co2()
+ electricity_feed_in = Sinks.electricity_feed_in(base_electrical_price)
+
+ # Create flow system
+ flow_system = fx.FlowSystem(
+ base_timesteps, scenarios=pd.Index(['A', 'B', 'C']), scenario_weights=np.array([0.5, 0.25, 0.25])
+ )
+ flow_system.add_elements(*Buses.defaults())
+ flow_system.add_elements(storage, costs, co2, boiler, heat_load, gas_tariff, electricity_feed_in, chp)
+
+ return flow_system
+
+
+@pytest.fixture
+def basic_flow_system() -> fx.FlowSystem:
+ """Create basic elements for component testing"""
+ flow_system = fx.FlowSystem(pd.date_range('2020-01-01', periods=10, freq='h', name='time'))
+
+ thermal_load = LoadProfiles.random_thermal(10)
+ p_el = LoadProfiles.random_electrical(10)
+
+ costs = Effects.costs()
+ heat_load = Sinks.heat_load(thermal_load)
+ gas_source = Sources.gas_with_costs()
+ electricity_sink = Sinks.electricity_feed_in(p_el)
+
+ flow_system.add_elements(*Buses.defaults())
+ flow_system.add_elements(costs, heat_load, gas_source, electricity_sink)
+
+ return flow_system
+
+
+@pytest.fixture
+def flow_system_complex() -> fx.FlowSystem:
+ """
+ Helper method to create a base model with configurable parameters
+ """
+ thermal_load = LoadProfiles.thermal_complex()
+ electrical_load = LoadProfiles.electrical_complex()
+ flow_system = fx.FlowSystem(pd.date_range('2020-01-01', periods=9, freq='h', name='time'))
+
+ # Define the components and flow_system
+ costs = Effects.costs()
+ co2 = Effects.co2()
+ costs.share_from_temporal = {'CO2': 0.2}
+ pe = Effects.primary_energy()
+ pe.maximum_total = 3.5e3
+
+ heat_load = Sinks.heat_load(thermal_load)
+ gas_tariff = Sources.gas_with_costs_and_co2()
+ electricity_feed_in = Sinks.electricity_feed_in(electrical_load)
+
+ flow_system.add_elements(*Buses.defaults())
+ flow_system.add_elements(costs, co2, pe, heat_load, gas_tariff, electricity_feed_in)
+
+ boiler = Converters.Boilers.complex()
+ speicher = Storage.complex()
+
+ flow_system.add_elements(boiler, speicher)
+
+ return flow_system
+
+
+@pytest.fixture
+def flow_system_base(flow_system_complex) -> fx.FlowSystem:
+ """
+ Helper method to create a base model with configurable parameters
+ """
+ flow_system = flow_system_complex
+ chp = Converters.CHPs.base()
+ flow_system.add_elements(chp)
+ return flow_system
+
+
+@pytest.fixture
+def flow_system_piecewise_conversion(flow_system_complex) -> fx.FlowSystem:
+ flow_system = flow_system_complex
+ converter = Converters.LinearConverters.piecewise()
+ flow_system.add_elements(converter)
+ return flow_system
+
+
+@pytest.fixture
+def flow_system_segments_of_flows_2(flow_system_complex) -> fx.FlowSystem:
+ """
+ Use segments/Piecewise with numeric data
+ """
+ flow_system = flow_system_complex
+ converter = Converters.LinearConverters.segments(len(flow_system.timesteps))
+ flow_system.add_elements(converter)
+ return flow_system
+
+
+@pytest.fixture
+def flow_system_long():
+ """
+ Special fixture with CSV data loading - kept separate for backward compatibility
+ Uses library components where possible, but has special elements inline
+ """
+ # Load data - use parent folder's ressources
+ filename = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'ressources', 'Zeitreihen2020.csv')
+ ts_raw = pd.read_csv(filename, index_col=0).sort_index()
+ data = ts_raw['2020-01-01 00:00:00':'2020-12-31 23:45:00']['2020-01-01':'2020-01-03 23:45:00']
+
+ # Extract data columns
+ electrical_load = data['P_Netz/MW'].values
+ thermal_load = data['Q_Netz/MW'].values
+ p_el = data['Strompr.€/MWh'].values
+ gas_price = data['Gaspr.€/MWh'].values
+
+ thermal_load_ts, electrical_load_ts = (
+ fx.TimeSeriesData(thermal_load),
+ fx.TimeSeriesData(electrical_load, clustering_weight=0.7),
+ )
+ p_feed_in, p_sell = (
+ fx.TimeSeriesData(-(p_el - 0.5), clustering_group='p_el'),
+ fx.TimeSeriesData(p_el + 0.5, clustering_group='p_el'),
+ )
+
+ flow_system = fx.FlowSystem(pd.DatetimeIndex(data.index))
+ flow_system.add_elements(
+ *Buses.defaults(),
+ Buses.coal(),
+ Effects.costs(),
+ Effects.co2(),
+ Effects.primary_energy(),
+ fx.Sink(
+ 'Wärmelast', inputs=[fx.Flow('Q_th_Last', bus='Fernwärme', size=1, fixed_relative_profile=thermal_load_ts)]
+ ),
+ fx.Sink(
+ 'Stromlast', inputs=[fx.Flow('P_el_Last', bus='Strom', size=1, fixed_relative_profile=electrical_load_ts)]
+ ),
+ fx.Source(
+ 'Kohletarif',
+ outputs=[fx.Flow('Q_Kohle', bus='Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ ),
+ fx.Source(
+ 'Gastarif',
+ outputs=[fx.Flow('Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
+ ),
+ fx.Sink('Einspeisung', inputs=[fx.Flow('P_el', bus='Strom', size=1000, effects_per_flow_hour=p_feed_in)]),
+ fx.Source(
+ 'Stromtarif',
+ outputs=[fx.Flow('P_el', bus='Strom', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})],
+ ),
+ )
+
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Kessel',
+ thermal_efficiency=0.85,
+ thermal_flow=fx.Flow(label='Q_th', bus='Fernwärme'),
+ fuel_flow=fx.Flow(
+ label='Q_fu',
+ bus='Gas',
+ size=95,
+ relative_minimum=12 / 95,
+ previous_flow_rate=0,
+ status_parameters=fx.StatusParameters(effects_per_startup=1000),
+ ),
+ ),
+ fx.linear_converters.CHP(
+ 'BHKW2',
+ thermal_efficiency=(eta_th := 0.58),
+ electrical_efficiency=(eta_el := 0.22),
+ status_parameters=fx.StatusParameters(effects_per_startup=24000),
+ fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=(fuel_size := 288), relative_minimum=87 / fuel_size),
+ electrical_flow=fx.Flow('P_el', bus='Strom', size=fuel_size * eta_el),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=fuel_size * eta_th),
+ ),
+ fx.Storage(
+ 'Speicher',
+ charging=fx.Flow('Q_th_load', size=137, bus='Fernwärme'),
+ discharging=fx.Flow('Q_th_unload', size=158, bus='Fernwärme'),
+ capacity_in_flow_hours=684,
+ initial_charge_state=137,
+ minimal_final_charge_state=137,
+ maximal_final_charge_state=158,
+ eta_charge=1,
+ eta_discharge=1,
+ relative_loss_per_hour=0.001,
+ prevent_simultaneous_charge_and_discharge=True,
+ ),
+ )
+
+ # Return all the necessary data
+ return flow_system, {
+ 'thermal_load_ts': thermal_load_ts,
+ 'electrical_load_ts': electrical_load_ts,
+ }
+
+
+@pytest.fixture(params=['h', '3h'], ids=['hourly', '3-hourly'])
+def timesteps_linopy(request):
+ return pd.date_range('2020-01-01', periods=10, freq=request.param, name='time')
+
+
+@pytest.fixture
+def basic_flow_system_linopy(timesteps_linopy) -> fx.FlowSystem:
+ """Create basic elements for component testing"""
+ flow_system = fx.FlowSystem(timesteps_linopy)
+
+ n = len(flow_system.timesteps)
+ thermal_load = LoadProfiles.random_thermal(n)
+ p_el = LoadProfiles.random_electrical(n)
+
+ costs = Effects.costs()
+ heat_load = Sinks.heat_load(thermal_load)
+ gas_source = Sources.gas_with_costs()
+ electricity_sink = Sinks.electricity_feed_in(p_el)
+
+ flow_system.add_elements(*Buses.defaults())
+ flow_system.add_elements(costs, heat_load, gas_source, electricity_sink)
+
+ return flow_system
+
+
+@pytest.fixture
+def basic_flow_system_linopy_coords(coords_config) -> fx.FlowSystem:
+ """Create basic elements for component testing with coordinate parametrization."""
+ flow_system = fx.FlowSystem(**coords_config)
+
+ thermal_load = LoadProfiles.random_thermal(10)
+ p_el = LoadProfiles.random_electrical(10)
+
+ costs = Effects.costs()
+ heat_load = Sinks.heat_load(thermal_load)
+ gas_source = Sources.gas_with_costs()
+ electricity_sink = Sinks.electricity_feed_in(p_el)
+
+ flow_system.add_elements(*Buses.defaults())
+ flow_system.add_elements(costs, heat_load, gas_source, electricity_sink)
+
+ return flow_system
+
+
+# ============================================================================
+# UTILITY FUNCTIONS (kept for backward compatibility)
+# ============================================================================
+
+
+# Custom assertion function
+def assert_almost_equal_numeric(
+ actual, desired, err_msg, relative_error_range_in_percent=0.011, absolute_tolerance=1e-7
+):
+ """
+ Custom assertion function for comparing numeric values with relative and absolute tolerances.
+
+ Handles the extra timestep in solutions by trimming actual arrays to match desired length
+ when the extra values are NaN (from storage charge_state variables using extra_timestep).
+ """
+ relative_tol = relative_error_range_in_percent / 100
+
+ if isinstance(desired, (int, float)):
+ delta = abs(relative_tol * desired) if desired != 0 else absolute_tolerance
+ assert np.isclose(actual, desired, atol=delta), err_msg
+ else:
+ actual = np.asarray(actual)
+ desired = np.asarray(desired)
+ # Handle extra timestep: trim actual to desired length if extra values are NaN
+ if actual.shape != desired.shape and actual.ndim == 1 and desired.ndim == 1:
+ if len(actual) > len(desired):
+ extra = actual[len(desired) :]
+ if np.all(np.isnan(extra)):
+ # Warn if trimming more than the expected single extra timestep
+ if len(extra) > 1:
+ warnings.warn(
+ f'Trimming {len(extra)} NaN values from actual array (expected 1)',
+ stacklevel=2,
+ )
+ actual = actual[: len(desired)]
+ np.testing.assert_allclose(actual, desired, rtol=relative_tol, atol=absolute_tolerance, err_msg=err_msg)
+
+
+def create_optimization_and_solve(
+ flow_system: fx.FlowSystem, solver, name: str, allow_infeasible: bool = False
+) -> fx.Optimization:
+ optimization = fx.Optimization(name, flow_system)
+ optimization.do_modeling()
+ try:
+ optimization.solve(solver)
+ except RuntimeError:
+ if not allow_infeasible:
+ raise
+ return optimization
+
+
+def create_linopy_model(flow_system: fx.FlowSystem) -> FlowSystemModel:
+ """
+ Create a FlowSystemModel from a FlowSystem by performing the modeling phase.
+
+ Args:
+ flow_system: The FlowSystem to build the model from.
+
+ Returns:
+ FlowSystemModel: The built model from FlowSystem.build_model().
+ """
+ flow_system.build_model()
+ return flow_system.model
+
+
+def assert_conequal(actual: linopy.Constraint, desired: linopy.Constraint):
+ """Assert that two constraints are equal with detailed error messages."""
+
+ try:
+ linopy.testing.assert_linequal(actual.lhs, desired.lhs)
+ except AssertionError as e:
+ raise AssertionError(f"{actual.name} left-hand sides don't match:\n{e}") from e
+
+ try:
+ xr.testing.assert_equal(actual.sign, desired.sign)
+ except AssertionError as e:
+ raise AssertionError(f"{actual.name} signs don't match:\n{e}") from e
+
+ try:
+ xr.testing.assert_equal(actual.rhs, desired.rhs)
+ except AssertionError as e:
+ raise AssertionError(f"{actual.name} right-hand sides don't match:\n{e}") from e
+
+
+def assert_var_equal(actual: linopy.Variable, desired: linopy.Variable):
+ """Assert that two variables are equal with detailed error messages."""
+ name = actual.name
+ try:
+ xr.testing.assert_equal(actual.lower, desired.lower)
+ except AssertionError as e:
+ raise AssertionError(
+ f"{name} lower bounds don't match:\nActual: {actual.lower}\nExpected: {desired.lower}"
+ ) from e
+
+ try:
+ xr.testing.assert_equal(actual.upper, desired.upper)
+ except AssertionError as e:
+ raise AssertionError(
+ f"{name} upper bounds don't match:\nActual: {actual.upper}\nExpected: {desired.upper}"
+ ) from e
+
+ if actual.type != desired.type:
+ raise AssertionError(f"{name} types don't match: {actual.type} != {desired.type}")
+
+ if actual.size != desired.size:
+ raise AssertionError(f"{name} sizes don't match: {actual.size} != {desired.size}")
+
+ if actual.shape != desired.shape:
+ raise AssertionError(f"{name} shapes don't match: {actual.shape} != {desired.shape}")
+
+ try:
+ xr.testing.assert_equal(actual.coords, desired.coords)
+ except AssertionError as e:
+ raise AssertionError(
+ f"{name} coordinates don't match:\nActual: {actual.coords}\nExpected: {desired.coords}"
+ ) from e
+
+ if actual.coord_dims != desired.coord_dims:
+ raise AssertionError(f"{name} coordinate dimensions don't match: {actual.coord_dims} != {desired.coord_dims}")
+
+
+def assert_sets_equal(set1: Iterable, set2: Iterable, msg=''):
+ """Assert two sets are equal with custom error message."""
+ set1, set2 = set(set1), set(set2)
+
+ extra = set1 - set2
+ missing = set2 - set1
+
+ if extra or missing:
+ parts = []
+ if extra:
+ parts.append(f'Extra: {sorted(extra, key=repr)}')
+ if missing:
+ parts.append(f'Missing: {sorted(missing, key=repr)}')
+
+ error_msg = ', '.join(parts)
+ if msg:
+ error_msg = f'{msg}: {error_msg}'
+
+ raise AssertionError(error_msg)
+
+
+# ============================================================================
+# PLOTTING CLEANUP FIXTURES
+# ============================================================================
+
+
+@pytest.fixture(autouse=True)
+def cleanup_figures():
+ """
+ Cleanup matplotlib figures after each test.
+
+ This fixture runs automatically after every test to:
+ - Close all matplotlib figures to prevent memory leaks
+ """
+ yield
+ # Close all matplotlib figures
+ import matplotlib.pyplot as plt
+
+ plt.close('all')
+
+
+@pytest.fixture(scope='session', autouse=True)
+def set_test_environment():
+ """
+ Configure plotting for test environment.
+
+ This fixture runs once per test session to:
+ - Set matplotlib to use non-interactive 'Agg' backend
+ - Set plotly to use non-interactive 'json' renderer
+ - Prevent GUI windows from opening during tests
+ """
+ import matplotlib
+
+ matplotlib.use('Agg') # Use non-interactive backend
+
+ import plotly.io as pio
+
+ pio.renderers.default = 'json' # Use non-interactive renderer
+
+ fx.CONFIG.Plotting.default_show = False
+
+ yield
+
+
+# ============================================================================
+# DEPRECATED API MARKERS
+# ============================================================================
+
+
+def pytest_collection_modifyitems(items):
+ """Auto-apply markers to all tests in the deprecated folder.
+
+ This hook adds:
+ - deprecated_api marker for filtering
+ - filterwarnings to ignore DeprecationWarning
+ """
+ for item in items:
+ # Only apply to tests in this folder
+ if 'deprecated' in str(item.fspath):
+ item.add_marker(pytest.mark.deprecated_api)
+ item.add_marker(pytest.mark.filterwarnings('ignore::DeprecationWarning'))
diff --git a/tests/deprecated/test_bus.py b/tests/deprecated/test_bus.py
new file mode 100644
index 000000000..cc49a2073
--- /dev/null
+++ b/tests/deprecated/test_bus.py
@@ -0,0 +1,105 @@
+import flixopt as fx
+
+from .conftest import assert_conequal, assert_var_equal, create_linopy_model
+
+
+class TestBusModel:
+ """Test the FlowModel class."""
+
+ def test_bus(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None)
+ flow_system.add_elements(
+ bus,
+ fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ )
+ model = create_linopy_model(flow_system)
+
+ assert set(bus.submodel.variables) == {'WärmelastTest(Q_th_Last)|flow_rate', 'GastarifTest(Q_Gas)|flow_rate'}
+ assert set(bus.submodel.constraints) == {'TestBus|balance'}
+
+ assert_conequal(
+ model.constraints['TestBus|balance'],
+ model.variables['GastarifTest(Q_Gas)|flow_rate'] == model.variables['WärmelastTest(Q_th_Last)|flow_rate'],
+ )
+
+ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=1e5)
+ flow_system.add_elements(
+ bus,
+ fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ )
+ model = create_linopy_model(flow_system)
+
+ assert set(bus.submodel.variables) == {
+ 'TestBus|virtual_supply',
+ 'TestBus|virtual_demand',
+ 'WärmelastTest(Q_th_Last)|flow_rate',
+ 'GastarifTest(Q_Gas)|flow_rate',
+ }
+ assert set(bus.submodel.constraints) == {'TestBus|balance'}
+
+ assert_var_equal(
+ model.variables['TestBus|virtual_supply'], model.add_variables(lower=0, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model.variables['TestBus|virtual_demand'], model.add_variables(lower=0, coords=model.get_coords())
+ )
+
+ assert_conequal(
+ model.constraints['TestBus|balance'],
+ model.variables['GastarifTest(Q_Gas)|flow_rate']
+ - model.variables['WärmelastTest(Q_th_Last)|flow_rate']
+ + model.variables['TestBus|virtual_supply']
+ - model.variables['TestBus|virtual_demand']
+ == 0,
+ )
+
+ # Penalty is now added as shares to the Penalty effect's temporal model
+ # Check that the penalty shares exist
+ assert 'TestBus->Penalty(temporal)' in model.constraints
+ assert 'TestBus->Penalty(temporal)' in model.variables
+
+ # The penalty share should equal the imbalance (virtual_supply + virtual_demand) times the penalty cost
+ # Let's verify the total penalty contribution by checking the effect's temporal model
+ penalty_effect = flow_system.effects.penalty_effect
+ assert penalty_effect.submodel is not None
+ assert 'TestBus' in penalty_effect.submodel.temporal.shares
+
+ assert_conequal(
+ model.constraints['TestBus->Penalty(temporal)'],
+ model.variables['TestBus->Penalty(temporal)']
+ == model.variables['TestBus|virtual_supply'] * 1e5 * model.hours_per_step
+ + model.variables['TestBus|virtual_demand'] * 1e5 * model.hours_per_step,
+ )
+
+ def test_bus_with_coords(self, basic_flow_system_linopy_coords, coords_config):
+ """Test bus behavior across different coordinate configurations."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None)
+ flow_system.add_elements(
+ bus,
+ fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ )
+ model = create_linopy_model(flow_system)
+
+ # Same core assertions as your existing test
+ assert set(bus.submodel.variables) == {'WärmelastTest(Q_th_Last)|flow_rate', 'GastarifTest(Q_Gas)|flow_rate'}
+ assert set(bus.submodel.constraints) == {'TestBus|balance'}
+
+ assert_conequal(
+ model.constraints['TestBus|balance'],
+ model.variables['GastarifTest(Q_Gas)|flow_rate'] == model.variables['WärmelastTest(Q_th_Last)|flow_rate'],
+ )
+
+ # Just verify coordinate dimensions are correct
+ gas_var = model.variables['GastarifTest(Q_Gas)|flow_rate']
+ if flow_system.scenarios is not None:
+ assert 'scenario' in gas_var.dims
+ assert 'time' in gas_var.dims
diff --git a/tests/deprecated/test_component.py b/tests/deprecated/test_component.py
new file mode 100644
index 000000000..497a5c3aa
--- /dev/null
+++ b/tests/deprecated/test_component.py
@@ -0,0 +1,623 @@
+import numpy as np
+import pytest
+
+import flixopt as fx
+import flixopt.elements
+
+from .conftest import (
+ assert_almost_equal_numeric,
+ assert_conequal,
+ assert_sets_equal,
+ assert_var_equal,
+ create_linopy_model,
+)
+
+
+class TestComponentModel:
+ def test_flow_label_check(self):
+ """Test that flow model constraints are correctly generated."""
+ inputs = [
+ fx.Flow('Q_th_Last', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
+ fx.Flow('Q_Gas', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
+ ]
+ outputs = [
+ fx.Flow('Q_th_Last', 'Gas', relative_minimum=np.ones(10) * 0.01),
+ fx.Flow('Q_Gas', 'Gas', relative_minimum=np.ones(10) * 0.01),
+ ]
+ with pytest.raises(ValueError, match='Flow names must be unique!'):
+ _ = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
+
+ def test_component(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ inputs = [
+ fx.Flow('In1', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
+ fx.Flow('In2', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
+ ]
+ outputs = [
+ fx.Flow('Out1', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
+ fx.Flow('Out2', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
+ ]
+ comp = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
+ flow_system.add_elements(comp)
+ _ = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(comp.submodel.variables),
+ {
+ 'TestComponent(In1)|flow_rate',
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In2)|flow_rate',
+ 'TestComponent(In2)|total_flow_hours',
+ 'TestComponent(Out1)|flow_rate',
+ 'TestComponent(Out1)|total_flow_hours',
+ 'TestComponent(Out2)|flow_rate',
+ 'TestComponent(Out2)|total_flow_hours',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(comp.submodel.constraints),
+ {
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In2)|total_flow_hours',
+ 'TestComponent(Out1)|total_flow_hours',
+ 'TestComponent(Out2)|total_flow_hours',
+ },
+ msg='Incorrect constraints',
+ )
+
+ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ ub_out2 = np.linspace(1, 1.5, 10).round(2)
+ inputs = [
+ fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1, size=100),
+ ]
+ outputs = [
+ fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200),
+ fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.3, relative_maximum=ub_out2, size=300),
+ ]
+ comp = flixopt.elements.Component(
+ 'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters()
+ )
+ flow_system.add_elements(comp)
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(comp.submodel.variables),
+ {
+ 'TestComponent(In1)|flow_rate',
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In1)|status',
+ 'TestComponent(In1)|active_hours',
+ 'TestComponent(Out1)|flow_rate',
+ 'TestComponent(Out1)|total_flow_hours',
+ 'TestComponent(Out1)|status',
+ 'TestComponent(Out1)|active_hours',
+ 'TestComponent(Out2)|flow_rate',
+ 'TestComponent(Out2)|total_flow_hours',
+ 'TestComponent(Out2)|status',
+ 'TestComponent(Out2)|active_hours',
+ 'TestComponent|status',
+ 'TestComponent|active_hours',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(comp.submodel.constraints),
+ {
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In1)|flow_rate|lb',
+ 'TestComponent(In1)|flow_rate|ub',
+ 'TestComponent(In1)|active_hours',
+ 'TestComponent(Out1)|total_flow_hours',
+ 'TestComponent(Out1)|flow_rate|lb',
+ 'TestComponent(Out1)|flow_rate|ub',
+ 'TestComponent(Out1)|active_hours',
+ 'TestComponent(Out2)|total_flow_hours',
+ 'TestComponent(Out2)|flow_rate|lb',
+ 'TestComponent(Out2)|flow_rate|ub',
+ 'TestComponent(Out2)|active_hours',
+ 'TestComponent|status|lb',
+ 'TestComponent|status|ub',
+ 'TestComponent|active_hours',
+ },
+ msg='Incorrect constraints',
+ )
+
+ upper_bound_flow_rate = outputs[1].relative_maximum
+
+ assert upper_bound_flow_rate.dims == tuple(model.get_coords())
+
+ assert_var_equal(
+ model['TestComponent(Out2)|flow_rate'],
+ model.add_variables(lower=0, upper=300 * upper_bound_flow_rate, coords=model.get_coords()),
+ )
+ assert_var_equal(model['TestComponent|status'], model.add_variables(binary=True, coords=model.get_coords()))
+ assert_var_equal(
+ model['TestComponent(Out2)|status'], model.add_variables(binary=True, coords=model.get_coords())
+ )
+
+ assert_conequal(
+ model.constraints['TestComponent(Out2)|flow_rate|lb'],
+ model.variables['TestComponent(Out2)|flow_rate']
+ >= model.variables['TestComponent(Out2)|status'] * 0.3 * 300,
+ )
+ assert_conequal(
+ model.constraints['TestComponent(Out2)|flow_rate|ub'],
+ model.variables['TestComponent(Out2)|flow_rate']
+ <= model.variables['TestComponent(Out2)|status'] * 300 * upper_bound_flow_rate,
+ )
+
+ assert_conequal(
+ model.constraints['TestComponent|status|lb'],
+ model.variables['TestComponent|status']
+ >= (
+ model.variables['TestComponent(In1)|status']
+ + model.variables['TestComponent(Out1)|status']
+ + model.variables['TestComponent(Out2)|status']
+ )
+ / (3 + 1e-5),
+ )
+ assert_conequal(
+ model.constraints['TestComponent|status|ub'],
+ model.variables['TestComponent|status']
+ <= (
+ model.variables['TestComponent(In1)|status']
+ + model.variables['TestComponent(Out1)|status']
+ + model.variables['TestComponent(Out2)|status']
+ )
+ + 1e-5,
+ )
+
+ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ inputs = [
+ fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1, size=100),
+ ]
+ outputs = []
+ comp = flixopt.elements.Component(
+ 'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters()
+ )
+ flow_system.add_elements(comp)
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(comp.submodel.variables),
+ {
+ 'TestComponent(In1)|flow_rate',
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In1)|status',
+ 'TestComponent(In1)|active_hours',
+ 'TestComponent|status',
+ 'TestComponent|active_hours',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(comp.submodel.constraints),
+ {
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In1)|flow_rate|lb',
+ 'TestComponent(In1)|flow_rate|ub',
+ 'TestComponent(In1)|active_hours',
+ 'TestComponent|status',
+ 'TestComponent|active_hours',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert_var_equal(
+ model['TestComponent(In1)|flow_rate'], model.add_variables(lower=0, upper=100, coords=model.get_coords())
+ )
+ assert_var_equal(model['TestComponent|status'], model.add_variables(binary=True, coords=model.get_coords()))
+ assert_var_equal(
+ model['TestComponent(In1)|status'], model.add_variables(binary=True, coords=model.get_coords())
+ )
+
+ assert_conequal(
+ model.constraints['TestComponent(In1)|flow_rate|lb'],
+ model.variables['TestComponent(In1)|flow_rate'] >= model.variables['TestComponent(In1)|status'] * 0.1 * 100,
+ )
+ assert_conequal(
+ model.constraints['TestComponent(In1)|flow_rate|ub'],
+ model.variables['TestComponent(In1)|flow_rate'] <= model.variables['TestComponent(In1)|status'] * 100,
+ )
+
+ assert_conequal(
+ model.constraints['TestComponent|status'],
+ model.variables['TestComponent|status'] == model.variables['TestComponent(In1)|status'],
+ )
+
+ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ ub_out2 = np.linspace(1, 1.5, 10).round(2)
+ inputs = [
+ fx.Flow(
+ 'In1',
+ 'Fernwärme',
+ relative_minimum=np.ones(10) * 0.1,
+ size=100,
+ previous_flow_rate=np.array([0, 0, 1e-6, 1e-5, 1e-4, 3, 4]),
+ ),
+ ]
+ outputs = [
+ fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=[3, 4, 5]),
+ fx.Flow(
+ 'Out2',
+ 'Gas',
+ relative_minimum=np.ones(10) * 0.3,
+ relative_maximum=ub_out2,
+ size=300,
+ previous_flow_rate=20,
+ ),
+ ]
+ comp = flixopt.elements.Component(
+ 'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters()
+ )
+ flow_system.add_elements(comp)
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(comp.submodel.variables),
+ {
+ 'TestComponent(In1)|flow_rate',
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In1)|status',
+ 'TestComponent(In1)|active_hours',
+ 'TestComponent(Out1)|flow_rate',
+ 'TestComponent(Out1)|total_flow_hours',
+ 'TestComponent(Out1)|status',
+ 'TestComponent(Out1)|active_hours',
+ 'TestComponent(Out2)|flow_rate',
+ 'TestComponent(Out2)|total_flow_hours',
+ 'TestComponent(Out2)|status',
+ 'TestComponent(Out2)|active_hours',
+ 'TestComponent|status',
+ 'TestComponent|active_hours',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(comp.submodel.constraints),
+ {
+ 'TestComponent(In1)|total_flow_hours',
+ 'TestComponent(In1)|flow_rate|lb',
+ 'TestComponent(In1)|flow_rate|ub',
+ 'TestComponent(In1)|active_hours',
+ 'TestComponent(Out1)|total_flow_hours',
+ 'TestComponent(Out1)|flow_rate|lb',
+ 'TestComponent(Out1)|flow_rate|ub',
+ 'TestComponent(Out1)|active_hours',
+ 'TestComponent(Out2)|total_flow_hours',
+ 'TestComponent(Out2)|flow_rate|lb',
+ 'TestComponent(Out2)|flow_rate|ub',
+ 'TestComponent(Out2)|active_hours',
+ 'TestComponent|status|lb',
+ 'TestComponent|status|ub',
+ 'TestComponent|active_hours',
+ },
+ msg='Incorrect constraints',
+ )
+
+ upper_bound_flow_rate = outputs[1].relative_maximum
+
+ assert upper_bound_flow_rate.dims == tuple(model.get_coords())
+
+ assert_var_equal(
+ model['TestComponent(Out2)|flow_rate'],
+ model.add_variables(lower=0, upper=300 * upper_bound_flow_rate, coords=model.get_coords()),
+ )
+ assert_var_equal(model['TestComponent|status'], model.add_variables(binary=True, coords=model.get_coords()))
+ assert_var_equal(
+ model['TestComponent(Out2)|status'], model.add_variables(binary=True, coords=model.get_coords())
+ )
+
+ assert_conequal(
+ model.constraints['TestComponent(Out2)|flow_rate|lb'],
+ model.variables['TestComponent(Out2)|flow_rate']
+ >= model.variables['TestComponent(Out2)|status'] * 0.3 * 300,
+ )
+ assert_conequal(
+ model.constraints['TestComponent(Out2)|flow_rate|ub'],
+ model.variables['TestComponent(Out2)|flow_rate']
+ <= model.variables['TestComponent(Out2)|status'] * 300 * upper_bound_flow_rate,
+ )
+
+ assert_conequal(
+ model.constraints['TestComponent|status|lb'],
+ model.variables['TestComponent|status']
+ >= (
+ model.variables['TestComponent(In1)|status']
+ + model.variables['TestComponent(Out1)|status']
+ + model.variables['TestComponent(Out2)|status']
+ )
+ / (3 + 1e-5),
+ )
+ assert_conequal(
+ model.constraints['TestComponent|status|ub'],
+ model.variables['TestComponent|status']
+ <= (
+ model.variables['TestComponent(In1)|status']
+ + model.variables['TestComponent(Out1)|status']
+ + model.variables['TestComponent(Out2)|status']
+ )
+ + 1e-5,
+ )
+
+ @pytest.mark.parametrize(
+ 'in1_previous_flow_rate, out1_previous_flow_rate, out2_previous_flow_rate, previous_on_hours',
+ [
+ (None, None, None, 0),
+ (np.array([0, 1e-6, 1e-4, 5]), None, None, 2),
+ (np.array([0, 5, 0, 5]), None, None, 1),
+ (np.array([0, 5, 0, 0]), 3, 0, 1),
+ (np.array([0, 0, 2, 0, 4, 5]), [3, 4, 5], None, 4),
+ ],
+ )
+ def test_previous_states_with_multiple_flows_parameterized(
+ self,
+ basic_flow_system_linopy_coords,
+ coords_config,
+ in1_previous_flow_rate,
+ out1_previous_flow_rate,
+ out2_previous_flow_rate,
+ previous_on_hours,
+ ):
+ """Test that flow model constraints are correctly generated with different previous flow rates and constraint factors."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ ub_out2 = np.linspace(1, 1.5, 10).round(2)
+ inputs = [
+ fx.Flow(
+ 'In1',
+ 'Fernwärme',
+ relative_minimum=np.ones(10) * 0.1,
+ size=100,
+ previous_flow_rate=in1_previous_flow_rate,
+ status_parameters=fx.StatusParameters(min_uptime=3),
+ ),
+ ]
+ outputs = [
+ fx.Flow(
+ 'Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=out1_previous_flow_rate
+ ),
+ fx.Flow(
+ 'Out2',
+ 'Gas',
+ relative_minimum=np.ones(10) * 0.3,
+ relative_maximum=ub_out2,
+ size=300,
+ previous_flow_rate=out2_previous_flow_rate,
+ ),
+ ]
+ comp = flixopt.elements.Component(
+ 'TestComponent',
+ inputs=inputs,
+ outputs=outputs,
+ status_parameters=fx.StatusParameters(min_uptime=3),
+ )
+ flow_system.add_elements(comp)
+ create_linopy_model(flow_system)
+
+ assert_conequal(
+ comp.submodel.constraints['TestComponent|uptime|initial'],
+ comp.submodel.variables['TestComponent|uptime'].isel(time=0)
+ == comp.submodel.variables['TestComponent|status'].isel(time=0) * (previous_on_hours + 1),
+ )
+
+
+class TestTransmissionModel:
+ def test_transmission_basic(self, basic_flow_system, highs_solver):
+ """Test basic transmission functionality"""
+ flow_system = basic_flow_system
+ flow_system.add_elements(fx.Bus('Wärme lokal'))
+
+ boiler = fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ transmission = fx.Transmission(
+ 'Rohr',
+ relative_losses=0.2,
+ absolute_losses=20,
+ in1=fx.Flow(
+ 'Rohr1', 'Wärme lokal', size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1e6)
+ ),
+ out1=fx.Flow('Rohr2', 'Fernwärme', size=1000),
+ )
+
+ flow_system.add_elements(transmission, boiler)
+
+ flow_system.optimize(highs_solver)
+
+ # Assertions
+ assert_almost_equal_numeric(
+ transmission.in1.submodel.status.status.solution.values,
+ np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
+ 'Status does not work properly',
+ )
+
+ assert_almost_equal_numeric(
+ transmission.in1.submodel.flow_rate.solution.values * 0.8 - 20,
+ transmission.out1.submodel.flow_rate.solution.values,
+ 'Losses are not computed correctly',
+ )
+
+ def test_transmission_balanced(self, basic_flow_system, highs_solver):
+ """Test advanced transmission functionality"""
+ flow_system = basic_flow_system
+ flow_system.add_elements(fx.Bus('Wärme lokal'))
+
+ boiler = fx.linear_converters.Boiler(
+ 'Boiler_Standard',
+ thermal_efficiency=0.9,
+ thermal_flow=fx.Flow(
+ 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ ),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ boiler2 = fx.linear_converters.Boiler(
+ 'Boiler_backup',
+ thermal_efficiency=0.4,
+ thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ last2 = fx.Sink(
+ 'Wärmelast2',
+ inputs=[
+ fx.Flow(
+ 'Q_th_Last',
+ bus='Wärme lokal',
+ size=1,
+ fixed_relative_profile=flow_system.components['Wärmelast'].inputs[0].fixed_relative_profile
+ * np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
+ )
+ ],
+ )
+
+ transmission = fx.Transmission(
+ 'Rohr',
+ relative_losses=0.2,
+ absolute_losses=20,
+ in1=fx.Flow(
+ 'Rohr1a',
+ bus='Wärme lokal',
+ size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1000),
+ ),
+ out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
+ in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters(maximum_size=1000)),
+ out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
+ balanced=True,
+ )
+
+ flow_system.add_elements(transmission, boiler, boiler2, last2)
+
+ flow_system.optimize(highs_solver)
+
+ # Assertions
+ assert_almost_equal_numeric(
+ transmission.in1.submodel.status.status.solution.values,
+ np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]),
+ 'Status does not work properly',
+ )
+
+ assert_almost_equal_numeric(
+ flow_system.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values,
+ transmission.out1.submodel.flow_rate.solution.values,
+ 'Flow rate of Rohr__Rohr1b is not correct',
+ )
+
+ assert_almost_equal_numeric(
+ transmission.in1.submodel.flow_rate.solution.values * 0.8
+ - np.array([20 if val > 0.1 else 0 for val in transmission.in1.submodel.flow_rate.solution.values]),
+ transmission.out1.submodel.flow_rate.solution.values,
+ 'Losses are not computed correctly',
+ )
+
+ assert_almost_equal_numeric(
+ transmission.in1.submodel._investment.size.solution.item(),
+ transmission.in2.submodel._investment.size.solution.item(),
+ 'The Investments are not equated correctly',
+ )
+
+ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
+ """Test advanced transmission functionality"""
+ flow_system = basic_flow_system
+ flow_system.add_elements(fx.Bus('Wärme lokal'))
+
+ boiler = fx.linear_converters.Boiler(
+ 'Boiler_Standard',
+ thermal_efficiency=0.9,
+ thermal_flow=fx.Flow(
+ 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ ),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ boiler2 = fx.linear_converters.Boiler(
+ 'Boiler_backup',
+ thermal_efficiency=0.4,
+ thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+
+ last2 = fx.Sink(
+ 'Wärmelast2',
+ inputs=[
+ fx.Flow(
+ 'Q_th_Last',
+ bus='Wärme lokal',
+ size=1,
+ fixed_relative_profile=flow_system.components['Wärmelast'].inputs[0].fixed_relative_profile
+ * np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
+ )
+ ],
+ )
+
+ transmission = fx.Transmission(
+ 'Rohr',
+ relative_losses=0.2,
+ absolute_losses=20,
+ in1=fx.Flow(
+ 'Rohr1a',
+ bus='Wärme lokal',
+ size=fx.InvestParameters(effects_of_investment_per_size=50, maximum_size=1000),
+ ),
+ out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
+ in2=fx.Flow(
+ 'Rohr2a',
+ 'Fernwärme',
+ size=fx.InvestParameters(
+ effects_of_investment_per_size=100, minimum_size=10, maximum_size=1000, mandatory=True
+ ),
+ ),
+ out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
+ balanced=False,
+ )
+
+ flow_system.add_elements(transmission, boiler, boiler2, last2)
+
+ flow_system.optimize(highs_solver)
+
+ # Assertions
+ assert_almost_equal_numeric(
+ transmission.in1.submodel.status.status.solution.values,
+ np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]),
+ 'Status does not work properly',
+ )
+
+ assert_almost_equal_numeric(
+ flow_system.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values,
+ transmission.out1.submodel.flow_rate.solution.values,
+ 'Flow rate of Rohr__Rohr1b is not correct',
+ )
+
+ assert_almost_equal_numeric(
+ transmission.in1.submodel.flow_rate.solution.values * 0.8
+ - np.array([20 if val > 0.1 else 0 for val in transmission.in1.submodel.flow_rate.solution.values]),
+ transmission.out1.submodel.flow_rate.solution.values,
+ 'Losses are not computed correctly',
+ )
+
+ assert transmission.in1.submodel._investment.size.solution.item() > 11
+
+ assert_almost_equal_numeric(
+ transmission.in2.submodel._investment.size.solution.item(),
+ 10,
+ 'Sizing does not work properly',
+ )
diff --git a/tests/deprecated/test_config.py b/tests/deprecated/test_config.py
new file mode 100644
index 000000000..94d626af2
--- /dev/null
+++ b/tests/deprecated/test_config.py
@@ -0,0 +1,282 @@
+"""Tests for the config module."""
+
+import logging
+import sys
+
+import pytest
+
+from flixopt.config import CONFIG, SUCCESS_LEVEL, MultilineFormatter
+
+logger = logging.getLogger('flixopt')
+
+
+@pytest.mark.xdist_group(name='config_tests')
+class TestConfigModule:
+ """Test the CONFIG class and logging setup."""
+
+ def setup_method(self):
+ """Reset CONFIG to defaults before each test."""
+ CONFIG.reset()
+
+ def teardown_method(self):
+ """Clean up after each test."""
+ CONFIG.reset()
+
+ def test_config_defaults(self):
+ """Test that CONFIG has correct default values."""
+ assert CONFIG.Modeling.big == 10_000_000
+ assert CONFIG.Modeling.epsilon == 1e-5
+ assert CONFIG.Solving.mip_gap == 0.01
+ assert CONFIG.Solving.time_limit_seconds == 300
+ assert CONFIG.config_name == 'flixopt'
+
+ def test_silent_by_default(self, capfd):
+ """Test that flixopt is silent by default."""
+ logger.info('should not appear')
+ captured = capfd.readouterr()
+ assert 'should not appear' not in captured.out
+
+ def test_enable_console_logging(self, capfd):
+ """Test enabling console logging."""
+ CONFIG.Logging.enable_console('INFO')
+ logger.info('test message')
+ captured = capfd.readouterr()
+ assert 'test message' in captured.out
+
+ def test_enable_file_logging(self, tmp_path):
+ """Test enabling file logging."""
+ log_file = tmp_path / 'test.log'
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+ logger.info('test file message')
+
+ assert log_file.exists()
+ assert 'test file message' in log_file.read_text()
+
+ def test_console_and_file_together(self, tmp_path, capfd):
+ """Test logging to both console and file."""
+ log_file = tmp_path / 'test.log'
+ CONFIG.Logging.enable_console('INFO')
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+
+ logger.info('test both')
+
+ # Check both outputs
+ assert 'test both' in capfd.readouterr().out
+ assert 'test both' in log_file.read_text()
+
+ def test_disable_logging(self, capfd):
+ """Test disabling logging."""
+ CONFIG.Logging.enable_console('INFO')
+ CONFIG.Logging.disable()
+
+ logger.info('should not appear')
+ assert 'should not appear' not in capfd.readouterr().out
+
+ def test_custom_success_level(self, capfd):
+ """Test custom SUCCESS log level."""
+ CONFIG.Logging.enable_console('INFO')
+ logger.log(SUCCESS_LEVEL, 'success message')
+ assert 'success message' in capfd.readouterr().out
+
+ def test_success_level_as_minimum(self, capfd):
+ """Test setting SUCCESS as minimum log level."""
+ CONFIG.Logging.enable_console('SUCCESS')
+
+ # INFO should not appear (level 20 < 25)
+ logger.info('info message')
+ assert 'info message' not in capfd.readouterr().out
+
+ # SUCCESS should appear (level 25)
+ logger.log(SUCCESS_LEVEL, 'success message')
+ assert 'success message' in capfd.readouterr().out
+
+ # WARNING should appear (level 30 > 25)
+ logger.warning('warning message')
+ assert 'warning message' in capfd.readouterr().out
+
+ def test_success_level_numeric(self, capfd):
+ """Test setting SUCCESS level using numeric value."""
+ CONFIG.Logging.enable_console(25)
+ logger.log(25, 'success with numeric level')
+ assert 'success with numeric level' in capfd.readouterr().out
+
+ def test_success_level_constant(self, capfd):
+ """Test using SUCCESS_LEVEL constant."""
+ CONFIG.Logging.enable_console(SUCCESS_LEVEL)
+ logger.log(SUCCESS_LEVEL, 'success with constant')
+ assert 'success with constant' in capfd.readouterr().out
+ assert SUCCESS_LEVEL == 25
+
+ def test_success_file_logging(self, tmp_path):
+ """Test SUCCESS level with file logging."""
+ log_file = tmp_path / 'test_success.log'
+ CONFIG.Logging.enable_file('SUCCESS', str(log_file))
+
+ # INFO should not be logged
+ logger.info('info not logged')
+
+ # SUCCESS should be logged
+ logger.log(SUCCESS_LEVEL, 'success logged to file')
+
+ content = log_file.read_text()
+ assert 'info not logged' not in content
+ assert 'success logged to file' in content
+
+ def test_success_color_customization(self, capfd):
+ """Test customizing SUCCESS level color."""
+ CONFIG.Logging.enable_console('SUCCESS')
+
+ # Customize SUCCESS color
+ CONFIG.Logging.set_colors(
+ {
+ 'SUCCESS': 'bold_green,bg_black',
+ 'WARNING': 'yellow',
+ }
+ )
+
+ logger.log(SUCCESS_LEVEL, 'colored success')
+ output = capfd.readouterr().out
+ assert 'colored success' in output
+
+ def test_multiline_formatting(self):
+ """Test that multi-line messages get box borders."""
+ formatter = MultilineFormatter()
+ record = logging.LogRecord('test', logging.INFO, '', 1, 'Line 1\nLine 2\nLine 3', (), None)
+ formatted = formatter.format(record)
+ assert '┌─' in formatted
+ assert '└─' in formatted
+
+ def test_console_stderr(self, capfd):
+ """Test logging to stderr."""
+ CONFIG.Logging.enable_console('INFO', stream=sys.stderr)
+ logger.info('stderr test')
+ assert 'stderr test' in capfd.readouterr().err
+
+ def test_non_colored_output(self, capfd):
+ """Test non-colored console output."""
+ CONFIG.Logging.enable_console('INFO', colored=False)
+ logger.info('plain text')
+ assert 'plain text' in capfd.readouterr().out
+
+ def test_preset_exploring(self, capfd):
+ """Test exploring preset."""
+ CONFIG.exploring()
+ logger.info('exploring')
+ assert 'exploring' in capfd.readouterr().out
+ assert CONFIG.Solving.log_to_console is True
+
+ def test_preset_debug(self, capfd):
+ """Test debug preset."""
+ CONFIG.debug()
+ logger.debug('debug')
+ assert 'debug' in capfd.readouterr().out
+
+ def test_preset_production(self, tmp_path):
+ """Test production preset."""
+ log_file = tmp_path / 'prod.log'
+ CONFIG.production(str(log_file))
+ logger.info('production')
+
+ assert log_file.exists()
+ assert 'production' in log_file.read_text()
+ assert CONFIG.Plotting.default_show is False
+
+ def test_preset_silent(self, capfd):
+ """Test silent preset."""
+ CONFIG.silent()
+ logger.info('should not appear')
+ assert 'should not appear' not in capfd.readouterr().out
+
+ def test_config_reset(self):
+ """Test that reset() restores defaults and disables logging."""
+ CONFIG.Modeling.big = 99999999
+ CONFIG.Logging.enable_console('DEBUG')
+
+ CONFIG.reset()
+
+ assert CONFIG.Modeling.big == 10_000_000
+ assert len(logger.handlers) == 0
+
+ def test_config_to_dict(self):
+ """Test converting CONFIG to dictionary."""
+ config_dict = CONFIG.to_dict()
+ assert config_dict['modeling']['big'] == 10_000_000
+ assert config_dict['solving']['mip_gap'] == 0.01
+
+ def test_attribute_modification(self):
+ """Test modifying config attributes."""
+ CONFIG.Modeling.big = 12345678
+ CONFIG.Solving.mip_gap = 0.001
+
+ assert CONFIG.Modeling.big == 12345678
+ assert CONFIG.Solving.mip_gap == 0.001
+
+ def test_exception_logging(self, capfd):
+ """Test that exceptions are properly logged with tracebacks."""
+ CONFIG.Logging.enable_console('INFO')
+
+ try:
+ raise ValueError('Test exception')
+ except ValueError:
+ logger.exception('An error occurred')
+
+ captured = capfd.readouterr().out
+ assert 'An error occurred' in captured
+ assert 'ValueError' in captured
+ assert 'Test exception' in captured
+ assert 'Traceback' in captured
+
+ def test_exception_logging_non_colored(self, capfd):
+ """Test that exceptions are properly logged with tracebacks in non-colored mode."""
+ CONFIG.Logging.enable_console('INFO', colored=False)
+
+ try:
+ raise ValueError('Test exception non-colored')
+ except ValueError:
+ logger.exception('An error occurred')
+
+ captured = capfd.readouterr().out
+ assert 'An error occurred' in captured
+ assert 'ValueError: Test exception non-colored' in captured
+ assert 'Traceback' in captured
+
+ def test_enable_file_preserves_custom_handlers(self, tmp_path, capfd):
+ """Test that enable_file preserves custom non-file handlers."""
+ # Add a custom console handler first
+ CONFIG.Logging.enable_console('INFO')
+ logger.info('console test')
+ assert 'console test' in capfd.readouterr().out
+
+ # Now add file logging - should keep the console handler
+ log_file = tmp_path / 'test.log'
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+
+ logger.info('both outputs')
+
+ # Check console still works
+ console_output = capfd.readouterr().out
+ assert 'both outputs' in console_output
+
+ # Check file was created and has the message
+ assert log_file.exists()
+ assert 'both outputs' in log_file.read_text()
+
+ def test_enable_file_removes_duplicate_file_handlers(self, tmp_path):
+ """Test that enable_file removes existing file handlers to avoid duplicates."""
+ log_file = tmp_path / 'test.log'
+
+ # Enable file logging twice
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+
+ logger.info('duplicate test')
+
+ # Count file handlers - should only be 1
+ from logging.handlers import RotatingFileHandler
+
+ file_handlers = [h for h in logger.handlers if isinstance(h, (logging.FileHandler, RotatingFileHandler))]
+ assert len(file_handlers) == 1
+
+ # Message should appear only once in the file
+ log_content = log_file.read_text()
+ assert log_content.count('duplicate test') == 1
diff --git a/tests/deprecated/test_cycle_detection.py b/tests/deprecated/test_cycle_detection.py
new file mode 100644
index 000000000..753a9a3e5
--- /dev/null
+++ b/tests/deprecated/test_cycle_detection.py
@@ -0,0 +1,200 @@
+import pytest
+
+from flixopt.effects import detect_cycles
+
+
+def test_empty_graph():
+ """Test that an empty graph has no cycles."""
+ assert detect_cycles({}) == []
+
+
+def test_single_node():
+ """Test that a graph with a single node and no edges has no cycles."""
+ assert detect_cycles({'A': []}) == []
+
+
+def test_self_loop():
+ """Test that a graph with a self-loop has a cycle."""
+ cycles = detect_cycles({'A': ['A']})
+ assert len(cycles) == 1
+ assert cycles[0] == ['A', 'A']
+
+
+def test_simple_cycle():
+ """Test that a simple cycle is detected."""
+ graph = {'A': ['B'], 'B': ['C'], 'C': ['A']}
+ cycles = detect_cycles(graph)
+ assert len(cycles) == 1
+ assert cycles[0] == ['A', 'B', 'C', 'A'] or cycles[0] == ['B', 'C', 'A', 'B'] or cycles[0] == ['C', 'A', 'B', 'C']
+
+
+def test_no_cycles():
+ """Test that a directed acyclic graph has no cycles."""
+ graph = {'A': ['B', 'C'], 'B': ['D', 'E'], 'C': ['F'], 'D': [], 'E': [], 'F': []}
+ assert detect_cycles(graph) == []
+
+
+def test_multiple_cycles():
+ """Test that a graph with multiple cycles is detected."""
+ graph = {'A': ['B', 'D'], 'B': ['C'], 'C': ['A'], 'D': ['E'], 'E': ['D']}
+ cycles = detect_cycles(graph)
+ assert len(cycles) == 2
+
+ # Check that both cycles are detected (order might vary)
+ cycle_strings = [','.join(cycle) for cycle in cycles]
+ assert (
+ any('A,B,C,A' in s for s in cycle_strings)
+ or any('B,C,A,B' in s for s in cycle_strings)
+ or any('C,A,B,C' in s for s in cycle_strings)
+ )
+ assert any('D,E,D' in s for s in cycle_strings) or any('E,D,E' in s for s in cycle_strings)
+
+
+def test_hidden_cycle():
+ """Test that a cycle hidden deep in the graph is detected."""
+ graph = {
+ 'A': ['B', 'C'],
+ 'B': ['D'],
+ 'C': ['E'],
+ 'D': ['F'],
+ 'E': ['G'],
+ 'F': ['H'],
+ 'G': ['I'],
+ 'H': ['J'],
+ 'I': ['K'],
+ 'J': ['L'],
+ 'K': ['M'],
+ 'L': ['N'],
+ 'M': ['N'],
+ 'N': ['O'],
+ 'O': ['P'],
+ 'P': ['Q'],
+ 'Q': ['O'], # Hidden cycle O->P->Q->O
+ }
+ cycles = detect_cycles(graph)
+ assert len(cycles) == 1
+
+ # Check that the O-P-Q cycle is detected
+ cycle = cycles[0]
+ assert 'O' in cycle and 'P' in cycle and 'Q' in cycle
+
+ # Check that they appear in the correct order
+ o_index = cycle.index('O')
+ p_index = cycle.index('P')
+ q_index = cycle.index('Q')
+
+ # Check the cycle order is correct (allowing for different starting points)
+ cycle_len = len(cycle)
+ assert (
+ (p_index == (o_index + 1) % cycle_len and q_index == (p_index + 1) % cycle_len)
+ or (q_index == (o_index + 1) % cycle_len and p_index == (q_index + 1) % cycle_len)
+ or (o_index == (p_index + 1) % cycle_len and q_index == (o_index + 1) % cycle_len)
+ )
+
+
+def test_disconnected_graph():
+ """Test with a disconnected graph."""
+ graph = {'A': ['B'], 'B': ['C'], 'C': [], 'D': ['E'], 'E': ['F'], 'F': []}
+ assert detect_cycles(graph) == []
+
+
+def test_disconnected_graph_with_cycle():
+ """Test with a disconnected graph containing a cycle in one component."""
+ graph = {
+ 'A': ['B'],
+ 'B': ['C'],
+ 'C': [],
+ 'D': ['E'],
+ 'E': ['F'],
+ 'F': ['D'], # Cycle in D->E->F->D
+ }
+ cycles = detect_cycles(graph)
+ assert len(cycles) == 1
+
+ # Check that the D-E-F cycle is detected
+ cycle = cycles[0]
+ assert 'D' in cycle and 'E' in cycle and 'F' in cycle
+
+ # Check if they appear in the correct order
+ d_index = cycle.index('D')
+ e_index = cycle.index('E')
+ f_index = cycle.index('F')
+
+ # Check the cycle order is correct (allowing for different starting points)
+ cycle_len = len(cycle)
+ assert (
+ (e_index == (d_index + 1) % cycle_len and f_index == (e_index + 1) % cycle_len)
+ or (f_index == (d_index + 1) % cycle_len and e_index == (f_index + 1) % cycle_len)
+ or (d_index == (e_index + 1) % cycle_len and f_index == (d_index + 1) % cycle_len)
+ )
+
+
+def test_complex_dag():
+ """Test with a complex directed acyclic graph."""
+ graph = {
+ 'A': ['B', 'C', 'D'],
+ 'B': ['E', 'F'],
+ 'C': ['E', 'G'],
+ 'D': ['G', 'H'],
+ 'E': ['I', 'J'],
+ 'F': ['J', 'K'],
+ 'G': ['K', 'L'],
+ 'H': ['L', 'M'],
+ 'I': ['N'],
+ 'J': ['N', 'O'],
+ 'K': ['O', 'P'],
+ 'L': ['P', 'Q'],
+ 'M': ['Q'],
+ 'N': ['R'],
+ 'O': ['R', 'S'],
+ 'P': ['S'],
+ 'Q': ['S'],
+ 'R': [],
+ 'S': [],
+ }
+ assert detect_cycles(graph) == []
+
+
+def test_missing_node_in_connections():
+ """Test behavior when a node referenced in edges doesn't have its own key."""
+ graph = {
+ 'A': ['B', 'C'],
+ 'B': ['D'],
+ # C and D don't have their own entries
+ }
+ assert detect_cycles(graph) == []
+
+
+def test_non_string_keys():
+ """Test with non-string keys to ensure the algorithm is generic."""
+ graph = {1: [2, 3], 2: [4], 3: [4], 4: []}
+ assert detect_cycles(graph) == []
+
+ graph_with_cycle = {1: [2], 2: [3], 3: [1]}
+ cycles = detect_cycles(graph_with_cycle)
+ assert len(cycles) == 1
+ assert cycles[0] == [1, 2, 3, 1] or cycles[0] == [2, 3, 1, 2] or cycles[0] == [3, 1, 2, 3]
+
+
+def test_complex_network_with_many_nodes():
+ """Test with a large network to check performance and correctness."""
+ graph = {}
+ # Create a large DAG
+ for i in range(100):
+ # Connect each node to the next few nodes
+ graph[i] = [j for j in range(i + 1, min(i + 5, 100))]
+
+ # No cycles in this arrangement
+ assert detect_cycles(graph) == []
+
+ # Add a single back edge to create a cycle
+ graph[99] = [0] # This creates a cycle
+ cycles = detect_cycles(graph)
+ assert len(cycles) >= 1
+ # The cycle might include many nodes, but must contain both 0 and 99
+ any_cycle_has_both = any(0 in cycle and 99 in cycle for cycle in cycles)
+ assert any_cycle_has_both
+
+
+if __name__ == '__main__':
+ pytest.main(['-v'])
diff --git a/tests/deprecated/test_dataconverter.py b/tests/deprecated/test_dataconverter.py
new file mode 100644
index 000000000..a5774fd6b
--- /dev/null
+++ b/tests/deprecated/test_dataconverter.py
@@ -0,0 +1,1262 @@
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+from flixopt.core import ( # Adjust this import to match your project structure
+ ConversionError,
+ DataConverter,
+ TimeSeriesData,
+)
+
+
+@pytest.fixture
+def time_coords():
+ return pd.date_range('2024-01-01', periods=5, freq='D', name='time')
+
+
+@pytest.fixture
+def scenario_coords():
+ return pd.Index(['baseline', 'high', 'low'], name='scenario')
+
+
+@pytest.fixture
+def region_coords():
+ return pd.Index(['north', 'south', 'east'], name='region')
+
+
+@pytest.fixture
+def standard_coords():
+ """Standard coordinates with unique lengths for easy testing."""
+ return {
+ 'time': pd.date_range('2024-01-01', periods=5, freq='D', name='time'), # length 5
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['north', 'south'], name='region'), # length 2
+ }
+
+
+class TestScalarConversion:
+ """Test scalar data conversions with different coordinate configurations."""
+
+ def test_scalar_no_coords(self):
+ """Scalar without coordinates should create 0D DataArray."""
+ result = DataConverter.to_dataarray(42)
+ assert result.shape == ()
+ assert result.dims == ()
+ assert result.item() == 42
+
+ def test_scalar_single_coord(self, time_coords):
+ """Scalar with single coordinate should broadcast."""
+ result = DataConverter.to_dataarray(42, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert np.all(result.values == 42)
+
+ def test_scalar_multiple_coords(self, time_coords, scenario_coords):
+ """Scalar with multiple coordinates should broadcast to all."""
+ result = DataConverter.to_dataarray(42, coords={'time': time_coords, 'scenario': scenario_coords})
+ assert result.shape == (5, 3)
+ assert result.dims == ('time', 'scenario')
+ assert np.all(result.values == 42)
+
+ def test_numpy_scalars(self, time_coords):
+ """Test numpy scalar types."""
+ for scalar in [np.int32(42), np.int64(42), np.float32(42.5), np.float64(42.5)]:
+ result = DataConverter.to_dataarray(scalar, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert np.all(result.values == scalar.item())
+
+ def test_scalar_many_dimensions(self, standard_coords):
+ """Scalar should broadcast to any number of dimensions."""
+ coords = {**standard_coords, 'technology': pd.Index(['solar', 'wind'], name='technology')}
+
+ result = DataConverter.to_dataarray(42, coords=coords)
+ assert result.shape == (5, 3, 2, 2)
+ assert result.dims == ('time', 'scenario', 'region', 'technology')
+ assert np.all(result.values == 42)
+
+
+class TestOneDimensionalArrayConversion:
+ """Test 1D numpy array and pandas Series conversions."""
+
+ def test_1d_array_no_coords(self):
+ """1D array without coords should fail unless single element."""
+ # Multi-element fails
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(np.array([1, 2, 3]))
+
+ # Single element succeeds
+ result = DataConverter.to_dataarray(np.array([42]))
+ assert result.shape == ()
+ assert result.item() == 42
+
+ def test_1d_array_matching_coord(self, time_coords):
+ """1D array matching coordinate length should work."""
+ arr = np.array([10, 20, 30, 40, 50])
+ result = DataConverter.to_dataarray(arr, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert np.array_equal(result.values, arr)
+
+ def test_1d_array_mismatched_coord(self, time_coords):
+ """1D array not matching coordinate length should fail."""
+ arr = np.array([10, 20, 30]) # Length 3, time_coords has length 5
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(arr, coords={'time': time_coords})
+
+ def test_1d_array_broadcast_to_multiple_coords(self, time_coords, scenario_coords):
+ """1D array should broadcast to matching dimension."""
+ # Array matching time dimension
+ time_arr = np.array([10, 20, 30, 40, 50])
+ result = DataConverter.to_dataarray(time_arr, coords={'time': time_coords, 'scenario': scenario_coords})
+ assert result.shape == (5, 3)
+ assert result.dims == ('time', 'scenario')
+
+ # Each scenario should have the same time values
+ for scenario in scenario_coords:
+ assert np.array_equal(result.sel(scenario=scenario).values, time_arr)
+
+ # Array matching scenario dimension
+ scenario_arr = np.array([100, 200, 300])
+ result = DataConverter.to_dataarray(scenario_arr, coords={'time': time_coords, 'scenario': scenario_coords})
+ assert result.shape == (5, 3)
+ assert result.dims == ('time', 'scenario')
+
+ # Each time should have the same scenario values
+ for time in time_coords:
+ assert np.array_equal(result.sel(time=time).values, scenario_arr)
+
+ def test_1d_array_ambiguous_length(self):
+ """Array length matching multiple dimensions should fail."""
+ # Both dimensions have length 3
+ coords_3x3 = {
+ 'time': pd.date_range('2024-01-01', periods=3, freq='D', name='time'),
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'),
+ }
+ arr = np.array([1, 2, 3])
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr, coords=coords_3x3)
+
+ def test_1d_array_broadcast_to_many_dimensions(self, standard_coords):
+ """1D array should broadcast to many dimensions."""
+ # Array matching time dimension
+ time_arr = np.array([10, 20, 30, 40, 50])
+ result = DataConverter.to_dataarray(time_arr, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+
+ # Check broadcasting - all scenarios and regions should have same time values
+ for scenario in standard_coords['scenario']:
+ for region in standard_coords['region']:
+ assert np.array_equal(result.sel(scenario=scenario, region=region).values, time_arr)
+
+
+class TestSeriesConversion:
+ """Test pandas Series conversions."""
+
+ def test_series_no_coords(self):
+ """Series without coords should fail unless single element."""
+ # Multi-element fails
+ series = pd.Series([1, 2, 3])
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(series)
+
+ # Single element succeeds
+ single_series = pd.Series([42])
+ result = DataConverter.to_dataarray(single_series)
+ assert result.shape == ()
+ assert result.item() == 42
+
+ def test_series_matching_index(self, time_coords, scenario_coords):
+ """Series with matching index should work."""
+ # Time-indexed series
+ time_series = pd.Series([10, 20, 30, 40, 50], index=time_coords)
+ result = DataConverter.to_dataarray(time_series, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert np.array_equal(result.values, time_series.values)
+
+ # Scenario-indexed series
+ scenario_series = pd.Series([100, 200, 300], index=scenario_coords)
+ result = DataConverter.to_dataarray(scenario_series, coords={'scenario': scenario_coords})
+ assert result.shape == (3,)
+ assert result.dims == ('scenario',)
+ assert np.array_equal(result.values, scenario_series.values)
+
+ def test_series_mismatched_index(self, time_coords):
+ """Series with non-matching index should fail."""
+ wrong_times = pd.date_range('2025-01-01', periods=5, freq='D', name='time')
+ series = pd.Series([10, 20, 30, 40, 50], index=wrong_times)
+
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(series, coords={'time': time_coords})
+
+ def test_series_broadcast_to_multiple_coords(self, time_coords, scenario_coords):
+ """Series should broadcast to non-matching dimensions."""
+ # Time series broadcast to scenarios
+ time_series = pd.Series([10, 20, 30, 40, 50], index=time_coords)
+ result = DataConverter.to_dataarray(time_series, coords={'time': time_coords, 'scenario': scenario_coords})
+ assert result.shape == (5, 3)
+
+ for scenario in scenario_coords:
+ assert np.array_equal(result.sel(scenario=scenario).values, time_series.values)
+
+ def test_series_wrong_dimension(self, time_coords, region_coords):
+ """Series indexed by dimension not in coords should fail."""
+ wrong_series = pd.Series([1, 2, 3], index=region_coords)
+
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(wrong_series, coords={'time': time_coords})
+
+ def test_series_broadcast_to_many_dimensions(self, standard_coords):
+ """Series should broadcast to many dimensions."""
+ time_series = pd.Series([100, 200, 300, 400, 500], index=standard_coords['time'])
+ result = DataConverter.to_dataarray(time_series, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+
+ # Check that all non-time dimensions have the same time series values
+ for scenario in standard_coords['scenario']:
+ for region in standard_coords['region']:
+ assert np.array_equal(result.sel(scenario=scenario, region=region).values, time_series.values)
+
+
+class TestDataFrameConversion:
+ """Test pandas DataFrame conversions."""
+
+ def test_single_column_dataframe(self, time_coords):
+ """Single-column DataFrame should work like Series."""
+ df = pd.DataFrame({'value': [10, 20, 30, 40, 50]}, index=time_coords)
+ result = DataConverter.to_dataarray(df, coords={'time': time_coords})
+
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert np.array_equal(result.values, df['value'].values)
+
+ def test_multi_column_dataframe_accepted(self, time_coords, scenario_coords):
+ """Multi-column DataFrame should now be accepted and converted via numpy array path."""
+ df = pd.DataFrame(
+ {'value1': [10, 20, 30, 40, 50], 'value2': [15, 25, 35, 45, 55], 'value3': [12, 22, 32, 42, 52]},
+ index=time_coords,
+ )
+
+ # Should work by converting to numpy array (5x3) and matching to time x scenario
+ result = DataConverter.to_dataarray(df, coords={'time': time_coords, 'scenario': scenario_coords})
+
+ assert result.shape == (5, 3)
+ assert result.dims == ('time', 'scenario')
+ assert np.array_equal(result.values, df.to_numpy())
+
+ def test_empty_dataframe_rejected(self, time_coords):
+ """Empty DataFrame should be rejected."""
+ df = pd.DataFrame(index=time_coords) # No columns
+
+ with pytest.raises(ConversionError, match='DataFrame must have at least one column'):
+ DataConverter.to_dataarray(df, coords={'time': time_coords})
+
+ def test_dataframe_broadcast(self, time_coords, scenario_coords):
+ """Single-column DataFrame should broadcast like Series."""
+ df = pd.DataFrame({'power': [10, 20, 30, 40, 50]}, index=time_coords)
+ result = DataConverter.to_dataarray(df, coords={'time': time_coords, 'scenario': scenario_coords})
+
+ assert result.shape == (5, 3)
+ for scenario in scenario_coords:
+ assert np.array_equal(result.sel(scenario=scenario).values, df['power'].values)
+
+
+class TestMultiDimensionalArrayConversion:
+ """Test multi-dimensional numpy array conversions."""
+
+ def test_2d_array_unique_dimensions(self, standard_coords):
+ """2D array with unique dimension lengths should work."""
+ # 5x3 array should map to time x scenario
+ data_2d = np.random.rand(5, 3)
+ result = DataConverter.to_dataarray(
+ data_2d, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+
+ assert result.shape == (5, 3)
+ assert result.dims == ('time', 'scenario')
+ assert np.array_equal(result.values, data_2d)
+
+ # 3x5 array should map to scenario x time
+ data_2d_flipped = np.random.rand(3, 5)
+ result_flipped = DataConverter.to_dataarray(
+ data_2d_flipped, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+
+ assert result_flipped.shape == (5, 3)
+ assert result_flipped.dims == ('time', 'scenario')
+ assert np.array_equal(result_flipped.values.transpose(), data_2d_flipped)
+
+ def test_2d_array_broadcast_to_3d(self, standard_coords):
+ """2D array should broadcast to additional dimensions when using partial matching."""
+ # With improved integration, 2D array (5x3) should match time×scenario and broadcast to region
+ data_2d = np.random.rand(5, 3)
+ result = DataConverter.to_dataarray(data_2d, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+
+ # Check that all regions have the same time x scenario data
+ for region in standard_coords['region']:
+ assert np.array_equal(result.sel(region=region).values, data_2d)
+
+ def test_3d_array_unique_dimensions(self, standard_coords):
+ """3D array with unique dimension lengths should work."""
+ # 5x3x2 array should map to time x scenario x region
+ data_3d = np.random.rand(5, 3, 2)
+ result = DataConverter.to_dataarray(data_3d, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+ assert np.array_equal(result.values, data_3d)
+
+ def test_3d_array_different_permutation(self, standard_coords):
+ """3D array with different dimension order should work."""
+ # 2x5x3 array should map to region x time x scenario
+ data_3d = np.random.rand(2, 5, 3)
+ result = DataConverter.to_dataarray(data_3d, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+ assert np.array_equal(result.transpose('region', 'time', 'scenario').values, data_3d)
+
+ def test_4d_array_unique_dimensions(self):
+ """4D array with unique dimension lengths should work."""
+ coords = {
+ 'time': pd.date_range('2024-01-01', periods=2, freq='D', name='time'), # length 2
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['north', 'south', 'east', 'west'], name='region'), # length 4
+ 'technology': pd.Index(['solar', 'wind', 'gas', 'coal', 'hydro'], name='technology'), # length 5
+ }
+
+ # 3x5x2x4 array should map to scenario x technology x time x region
+ data_4d = np.random.rand(3, 5, 2, 4)
+ result = DataConverter.to_dataarray(data_4d, coords=coords)
+
+ assert result.shape == (2, 3, 4, 5)
+ assert result.dims == ('time', 'scenario', 'region', 'technology')
+ assert np.array_equal(result.transpose('scenario', 'technology', 'time', 'region').values, data_4d)
+
+ def test_2d_array_ambiguous_dimensions_error(self):
+ """2D array with ambiguous dimension lengths should fail."""
+ # Both dimensions have length 3
+ coords_ambiguous = {
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['north', 'south', 'east'], name='region'), # length 3
+ }
+
+ data_2d = np.random.rand(3, 3)
+ with pytest.raises(ConversionError, match='matches multiple dimension combinations'):
+ DataConverter.to_dataarray(data_2d, coords=coords_ambiguous)
+
+ def test_multid_array_no_coords(self):
+ """Multi-D arrays without coords should fail unless scalar."""
+ # Multi-element fails
+ data_2d = np.random.rand(2, 3)
+ with pytest.raises(ConversionError, match='Cannot convert multi-element array without target dimensions'):
+ DataConverter.to_dataarray(data_2d)
+
+ # Single element succeeds
+ single_element = np.array([[42]])
+ result = DataConverter.to_dataarray(single_element)
+ assert result.shape == ()
+ assert result.item() == 42
+
+ def test_array_no_matching_dimensions_error(self, standard_coords):
+ """Array with no matching dimension lengths should fail."""
+ # 7x8 array - no dimension has length 7 or 8
+ data_2d = np.random.rand(7, 8)
+ coords_2d = {
+ 'time': standard_coords['time'], # length 5
+ 'scenario': standard_coords['scenario'], # length 3
+ }
+
+ with pytest.raises(ConversionError, match='cannot be mapped to any combination'):
+ DataConverter.to_dataarray(data_2d, coords=coords_2d)
+
+ def test_multid_array_special_values(self, standard_coords):
+ """Multi-D arrays should preserve special values."""
+ # Create 2D array with special values
+ data_2d = np.array(
+ [[1.0, np.nan, 3.0], [np.inf, 5.0, -np.inf], [7.0, 8.0, 9.0], [10.0, np.nan, 12.0], [13.0, 14.0, np.inf]]
+ )
+
+ result = DataConverter.to_dataarray(
+ data_2d, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+
+ assert result.shape == (5, 3)
+ assert np.array_equal(np.isnan(result.values), np.isnan(data_2d))
+ assert np.array_equal(np.isinf(result.values), np.isinf(data_2d))
+
+ def test_multid_array_dtype_preservation(self, standard_coords):
+ """Multi-D arrays should preserve data types."""
+ # Integer array
+ int_data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12], [13, 14, 15]], dtype=np.int32)
+
+ result_int = DataConverter.to_dataarray(
+ int_data, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+
+ assert result_int.dtype == np.int32
+ assert np.array_equal(result_int.values, int_data)
+
+ # Boolean array
+ bool_data = np.array(
+ [[True, False, True], [False, True, False], [True, True, False], [False, False, True], [True, False, True]]
+ )
+
+ result_bool = DataConverter.to_dataarray(
+ bool_data, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+
+ assert result_bool.dtype == bool
+ assert np.array_equal(result_bool.values, bool_data)
+
+
+class TestDataArrayConversion:
+ """Test xarray DataArray conversions."""
+
+ def test_compatible_dataarray(self, time_coords):
+ """Compatible DataArray should pass through."""
+ original = xr.DataArray([10, 20, 30, 40, 50], coords={'time': time_coords}, dims='time')
+ result = DataConverter.to_dataarray(original, coords={'time': time_coords})
+
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert np.array_equal(result.values, original.values)
+
+ # Should be a copy
+ result[0] = 999
+ assert original[0].item() == 10
+
+ def test_incompatible_dataarray_coords(self, time_coords):
+ """DataArray with wrong coordinates should fail."""
+ wrong_times = pd.date_range('2025-01-01', periods=5, freq='D', name='time')
+ original = xr.DataArray([10, 20, 30, 40, 50], coords={'time': wrong_times}, dims='time')
+
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(original, coords={'time': time_coords})
+
+ def test_incompatible_dataarray_dims(self, time_coords):
+ """DataArray with wrong dimensions should fail."""
+ original = xr.DataArray([10, 20, 30, 40, 50], coords={'wrong_dim': range(5)}, dims='wrong_dim')
+
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(original, coords={'time': time_coords})
+
+ def test_dataarray_broadcast(self, time_coords, scenario_coords):
+ """DataArray should broadcast to additional dimensions."""
+ # 1D time DataArray to 2D time+scenario
+ original = xr.DataArray([10, 20, 30, 40, 50], coords={'time': time_coords}, dims='time')
+ result = DataConverter.to_dataarray(original, coords={'time': time_coords, 'scenario': scenario_coords})
+
+ assert result.shape == (5, 3)
+ assert result.dims == ('time', 'scenario')
+
+ for scenario in scenario_coords:
+ assert np.array_equal(result.sel(scenario=scenario).values, original.values)
+
+ def test_scalar_dataarray_broadcast(self, time_coords, scenario_coords):
+ """Scalar DataArray should broadcast to all dimensions."""
+ scalar_da = xr.DataArray(42)
+ result = DataConverter.to_dataarray(scalar_da, coords={'time': time_coords, 'scenario': scenario_coords})
+
+ assert result.shape == (5, 3)
+ assert np.all(result.values == 42)
+
+ def test_2d_dataarray_broadcast_to_more_dimensions(self, standard_coords):
+ """DataArray should broadcast to additional dimensions."""
+ # Start with 2D DataArray
+ original = xr.DataArray(
+ [[10, 20, 30], [40, 50, 60], [70, 80, 90], [100, 110, 120], [130, 140, 150]],
+ coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']},
+ dims=('time', 'scenario'),
+ )
+
+ # Broadcast to 3D
+ result = DataConverter.to_dataarray(original, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+
+ # Check that all regions have the same time+scenario values
+ for region in standard_coords['region']:
+ assert np.array_equal(result.sel(region=region).values, original.values)
+
+
+class TestTimeSeriesDataConversion:
+ """Test TimeSeriesData conversions."""
+
+ def test_timeseries_data_basic(self, time_coords):
+ """TimeSeriesData should work like DataArray."""
+ data_array = xr.DataArray([10, 20, 30, 40, 50], coords={'time': time_coords}, dims='time')
+ ts_data = TimeSeriesData(data_array, clustering_group='test')
+
+ result = DataConverter.to_dataarray(ts_data, coords={'time': time_coords})
+
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert np.array_equal(result.values, [10, 20, 30, 40, 50])
+
+ def test_timeseries_data_broadcast(self, time_coords, scenario_coords):
+ """TimeSeriesData should broadcast to additional dimensions."""
+ data_array = xr.DataArray([10, 20, 30, 40, 50], coords={'time': time_coords}, dims='time')
+ ts_data = TimeSeriesData(data_array)
+
+ result = DataConverter.to_dataarray(ts_data, coords={'time': time_coords, 'scenario': scenario_coords})
+
+ assert result.shape == (5, 3)
+ for scenario in scenario_coords:
+ assert np.array_equal(result.sel(scenario=scenario).values, [10, 20, 30, 40, 50])
+
+
+class TestAsDataArrayAlias:
+ """Test that to_dataarray works as an alias for to_dataarray."""
+
+ def test_to_dataarray_is_alias(self, time_coords, scenario_coords):
+ """to_dataarray should work identically to to_dataarray."""
+ # Test with scalar
+ result_to = DataConverter.to_dataarray(42, coords={'time': time_coords})
+ result_as = DataConverter.to_dataarray(42, coords={'time': time_coords})
+ assert np.array_equal(result_to.values, result_as.values)
+ assert result_to.dims == result_as.dims
+ assert result_to.shape == result_as.shape
+
+ # Test with array
+ arr = np.array([10, 20, 30, 40, 50])
+ result_to_arr = DataConverter.to_dataarray(arr, coords={'time': time_coords})
+ result_as_arr = DataConverter.to_dataarray(arr, coords={'time': time_coords})
+ assert np.array_equal(result_to_arr.values, result_as_arr.values)
+ assert result_to_arr.dims == result_as_arr.dims
+
+ # Test with Series
+ series = pd.Series([100, 200, 300, 400, 500], index=time_coords)
+ result_to_series = DataConverter.to_dataarray(series, coords={'time': time_coords, 'scenario': scenario_coords})
+ result_as_series = DataConverter.to_dataarray(series, coords={'time': time_coords, 'scenario': scenario_coords})
+ assert np.array_equal(result_to_series.values, result_as_series.values)
+ assert result_to_series.dims == result_as_series.dims
+
+
+class TestCustomDimensions:
+ """Test with custom dimension names beyond time/scenario."""
+
+ def test_custom_single_dimension(self, region_coords):
+ """Test with custom dimension name."""
+ result = DataConverter.to_dataarray(42, coords={'region': region_coords})
+ assert result.shape == (3,)
+ assert result.dims == ('region',)
+ assert np.all(result.values == 42)
+
+ def test_custom_multiple_dimensions(self):
+ """Test with multiple custom dimensions."""
+ products = pd.Index(['A', 'B'], name='product')
+ technologies = pd.Index(['solar', 'wind', 'gas'], name='technology')
+
+ # Array matching technology dimension
+ arr = np.array([100, 150, 80])
+ result = DataConverter.to_dataarray(arr, coords={'product': products, 'technology': technologies})
+
+ assert result.shape == (2, 3)
+ assert result.dims == ('product', 'technology')
+
+ # Should broadcast across products
+ for product in products:
+ assert np.array_equal(result.sel(product=product).values, arr)
+
+ def test_mixed_dimension_types(self):
+ """Test mixing time dimension with custom dimensions."""
+ time_coords = pd.date_range('2024-01-01', periods=3, freq='D', name='time')
+ regions = pd.Index(['north', 'south'], name='region')
+
+ # Time series should broadcast to regions
+ time_series = pd.Series([10, 20, 30], index=time_coords)
+ result = DataConverter.to_dataarray(time_series, coords={'time': time_coords, 'region': regions})
+
+ assert result.shape == (3, 2)
+ assert result.dims == ('time', 'region')
+
+ def test_custom_dimensions_complex(self):
+ """Test complex scenario with custom dimensions."""
+ coords = {
+ 'product': pd.Index(['A', 'B'], name='product'),
+ 'factory': pd.Index(['F1', 'F2', 'F3'], name='factory'),
+ 'quarter': pd.Index(['Q1', 'Q2', 'Q3', 'Q4'], name='quarter'),
+ }
+
+ # Array matching factory dimension
+ factory_arr = np.array([100, 200, 300])
+ result = DataConverter.to_dataarray(factory_arr, coords=coords)
+
+ assert result.shape == (2, 3, 4)
+ assert result.dims == ('product', 'factory', 'quarter')
+
+ # Check broadcasting
+ for product in coords['product']:
+ for quarter in coords['quarter']:
+ slice_data = result.sel(product=product, quarter=quarter)
+ assert np.array_equal(slice_data.values, factory_arr)
+
+
+class TestValidation:
+ """Test coordinate validation."""
+
+ def test_empty_coords(self):
+ """Empty coordinates should work for scalars."""
+ result = DataConverter.to_dataarray(42, coords={})
+ assert result.shape == ()
+ assert result.item() == 42
+
+ def test_invalid_coord_type(self):
+ """Non-pandas Index coordinates should fail."""
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(42, coords={'time': [1, 2, 3]})
+
+ def test_empty_coord_index(self):
+ """Empty coordinate index should fail."""
+ empty_index = pd.Index([], name='time')
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(42, coords={'time': empty_index})
+
+ def test_time_coord_validation(self):
+ """Time coordinates must be DatetimeIndex."""
+ # Non-datetime index with name 'time' should fail
+ wrong_time = pd.Index([1, 2, 3], name='time')
+ with pytest.raises(ConversionError, match='DatetimeIndex'):
+ DataConverter.to_dataarray(42, coords={'time': wrong_time})
+
+ def test_coord_naming(self, time_coords):
+ """Coordinates should be auto-renamed to match dimension."""
+ # Unnamed time index should be renamed
+ unnamed_time = time_coords.rename(None)
+ result = DataConverter.to_dataarray(42, coords={'time': unnamed_time})
+ assert result.coords['time'].name == 'time'
+
+
+class TestErrorHandling:
+ """Test error handling and edge cases."""
+
+ def test_unsupported_data_types(self, time_coords):
+ """Unsupported data types should fail with clear messages."""
+ unsupported = ['string', object(), None, {'dict': 'value'}, [1, 2, 3]]
+
+ for data in unsupported:
+ with pytest.raises(ConversionError):
+ DataConverter.to_dataarray(data, coords={'time': time_coords})
+
+ def test_dimension_mismatch_messages(self, time_coords, scenario_coords):
+ """Error messages should be informative."""
+ # Array with wrong length
+ wrong_arr = np.array([1, 2]) # Length 2, but no dimension has length 2
+ with pytest.raises(ConversionError, match='does not match any target dimension lengths'):
+ DataConverter.to_dataarray(wrong_arr, coords={'time': time_coords, 'scenario': scenario_coords})
+
+ def test_multidimensional_array_dimension_count_mismatch(self, standard_coords):
+ """Array with wrong number of dimensions should fail with clear error."""
+ # 4D array with 3D coordinates
+ data_4d = np.random.rand(5, 3, 2, 4)
+ with pytest.raises(ConversionError, match='cannot be mapped to any combination'):
+ DataConverter.to_dataarray(data_4d, coords=standard_coords)
+
+ def test_error_message_quality(self, standard_coords):
+ """Error messages should include helpful information."""
+ # Wrong shape array
+ data_2d = np.random.rand(7, 8)
+ coords_2d = {
+ 'time': standard_coords['time'], # length 5
+ 'scenario': standard_coords['scenario'], # length 3
+ }
+
+ try:
+ DataConverter.to_dataarray(data_2d, coords=coords_2d)
+ raise AssertionError('Should have raised ConversionError')
+ except ConversionError as e:
+ error_msg = str(e)
+ assert 'Array shape (7, 8)' in error_msg
+ assert 'target coordinate lengths:' in error_msg
+
+
+class TestDataIntegrity:
+ """Test data copying and integrity."""
+
+ def test_array_copy_independence(self, time_coords):
+ """Converted arrays should be independent copies."""
+ original_arr = np.array([10, 20, 30, 40, 50])
+ result = DataConverter.to_dataarray(original_arr, coords={'time': time_coords})
+
+ # Modify result
+ result[0] = 999
+
+ # Original should be unchanged
+ assert original_arr[0] == 10
+
+ def test_series_copy_independence(self, time_coords):
+ """Converted Series should be independent copies."""
+ original_series = pd.Series([10, 20, 30, 40, 50], index=time_coords)
+ result = DataConverter.to_dataarray(original_series, coords={'time': time_coords})
+
+ # Modify result
+ result[0] = 999
+
+ # Original should be unchanged
+ assert original_series.iloc[0] == 10
+
+ def test_dataframe_copy_independence(self, time_coords):
+ """Converted DataFrames should be independent copies."""
+ original_df = pd.DataFrame({'value': [10, 20, 30, 40, 50]}, index=time_coords)
+ result = DataConverter.to_dataarray(original_df, coords={'time': time_coords})
+
+ # Modify result
+ result[0] = 999
+
+ # Original should be unchanged
+ assert original_df.loc[time_coords[0], 'value'] == 10
+
+ def test_multid_array_copy_independence(self, standard_coords):
+ """Multi-D arrays should be independent copies."""
+ original_data = np.random.rand(5, 3)
+ result = DataConverter.to_dataarray(
+ original_data, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+
+ # Modify result
+ result[0, 0] = 999
+
+ # Original should be unchanged
+ assert original_data[0, 0] != 999
+
+
+class TestBooleanValues:
+ """Test handling of boolean values and arrays."""
+
+ def test_scalar_boolean_to_dataarray(self, time_coords):
+ """Scalar boolean values should work with to_dataarray."""
+ result_true = DataConverter.to_dataarray(True, coords={'time': time_coords})
+ assert result_true.shape == (5,)
+ assert result_true.dtype == bool
+ assert np.all(result_true.values)
+
+ result_false = DataConverter.to_dataarray(False, coords={'time': time_coords})
+ assert result_false.shape == (5,)
+ assert result_false.dtype == bool
+ assert not np.any(result_false.values)
+
+ def test_numpy_boolean_scalar(self, time_coords):
+ """Numpy boolean scalars should work."""
+ result_np_true = DataConverter.to_dataarray(np.bool_(True), coords={'time': time_coords})
+ assert result_np_true.shape == (5,)
+ assert result_np_true.dtype == bool
+ assert np.all(result_np_true.values)
+
+ result_np_false = DataConverter.to_dataarray(np.bool_(False), coords={'time': time_coords})
+ assert result_np_false.shape == (5,)
+ assert result_np_false.dtype == bool
+ assert not np.any(result_np_false.values)
+
+ def test_boolean_array_to_dataarray(self, time_coords):
+ """Boolean arrays should work with to_dataarray."""
+ bool_arr = np.array([True, False, True, False, True])
+ result = DataConverter.to_dataarray(bool_arr, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert result.dims == ('time',)
+ assert result.dtype == bool
+ assert np.array_equal(result.values, bool_arr)
+
+ def test_boolean_no_coords(self):
+ """Boolean scalar without coordinates should create 0D DataArray."""
+ result = DataConverter.to_dataarray(True)
+ assert result.shape == ()
+ assert result.dims == ()
+ assert result.item()
+
+ result_as = DataConverter.to_dataarray(False)
+ assert result_as.shape == ()
+ assert result_as.dims == ()
+ assert not result_as.item()
+
+ def test_boolean_multidimensional_broadcast(self, standard_coords):
+ """Boolean values should broadcast to multiple dimensions."""
+ result = DataConverter.to_dataarray(True, coords=standard_coords)
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+ assert result.dtype == bool
+ assert np.all(result.values)
+
+ result_as = DataConverter.to_dataarray(False, coords=standard_coords)
+ assert result_as.shape == (5, 3, 2)
+ assert result_as.dims == ('time', 'scenario', 'region')
+ assert result_as.dtype == bool
+ assert not np.any(result_as.values)
+
+ def test_boolean_series(self, time_coords):
+ """Boolean Series should work."""
+ bool_series = pd.Series([True, False, True, False, True], index=time_coords)
+ result = DataConverter.to_dataarray(bool_series, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert result.dtype == bool
+ assert np.array_equal(result.values, bool_series.values)
+
+ result_as = DataConverter.to_dataarray(bool_series, coords={'time': time_coords})
+ assert result_as.shape == (5,)
+ assert result_as.dtype == bool
+ assert np.array_equal(result_as.values, bool_series.values)
+
+ def test_boolean_dataframe(self, time_coords):
+ """Boolean DataFrame should work."""
+ bool_df = pd.DataFrame({'values': [True, False, True, False, True]}, index=time_coords)
+ result = DataConverter.to_dataarray(bool_df, coords={'time': time_coords})
+ assert result.shape == (5,)
+ assert result.dtype == bool
+ assert np.array_equal(result.values, bool_df['values'].values)
+
+ result_as = DataConverter.to_dataarray(bool_df, coords={'time': time_coords})
+ assert result_as.shape == (5,)
+ assert result_as.dtype == bool
+ assert np.array_equal(result_as.values, bool_df['values'].values)
+
+ def test_multidimensional_boolean_array(self, standard_coords):
+ """Multi-dimensional boolean arrays should work."""
+ bool_data = np.array(
+ [[True, False, True], [False, True, False], [True, True, False], [False, False, True], [True, False, True]]
+ )
+ result = DataConverter.to_dataarray(
+ bool_data, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+ assert result.shape == (5, 3)
+ assert result.dtype == bool
+ assert np.array_equal(result.values, bool_data)
+
+ result_as = DataConverter.to_dataarray(
+ bool_data, coords={'time': standard_coords['time'], 'scenario': standard_coords['scenario']}
+ )
+ assert result_as.shape == (5, 3)
+ assert result_as.dtype == bool
+ assert np.array_equal(result_as.values, bool_data)
+
+
+class TestSpecialValues:
+ """Test handling of special numeric values."""
+
+ def test_nan_values(self, time_coords):
+ """NaN values should be preserved."""
+ arr_with_nan = np.array([1, np.nan, 3, np.nan, 5])
+ result = DataConverter.to_dataarray(arr_with_nan, coords={'time': time_coords})
+
+ assert np.array_equal(np.isnan(result.values), np.isnan(arr_with_nan))
+ assert np.array_equal(result.values[~np.isnan(result.values)], arr_with_nan[~np.isnan(arr_with_nan)])
+
+ def test_infinite_values(self, time_coords):
+ """Infinite values should be preserved."""
+ arr_with_inf = np.array([1, np.inf, 3, -np.inf, 5])
+ result = DataConverter.to_dataarray(arr_with_inf, coords={'time': time_coords})
+
+ assert np.array_equal(result.values, arr_with_inf)
+
+ def test_boolean_values(self, time_coords):
+ """Boolean values should be preserved."""
+ bool_arr = np.array([True, False, True, False, True])
+ result = DataConverter.to_dataarray(bool_arr, coords={'time': time_coords})
+
+ assert result.dtype == bool
+ assert np.array_equal(result.values, bool_arr)
+
+ def test_mixed_numeric_types(self, time_coords):
+ """Mixed integer/float should become float."""
+ mixed_arr = np.array([1, 2.5, 3, 4.5, 5])
+ result = DataConverter.to_dataarray(mixed_arr, coords={'time': time_coords})
+
+ assert np.issubdtype(result.dtype, np.floating)
+ assert np.array_equal(result.values, mixed_arr)
+
+ def test_special_values_in_multid_arrays(self, standard_coords):
+ """Special values should be preserved in multi-D arrays and broadcasting."""
+ # Array with NaN and inf
+ special_arr = np.array([1, np.nan, np.inf, -np.inf, 5])
+ result = DataConverter.to_dataarray(special_arr, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+
+ # Check that special values are preserved in all broadcasts
+ for scenario in standard_coords['scenario']:
+ for region in standard_coords['region']:
+ slice_data = result.sel(scenario=scenario, region=region)
+ assert np.array_equal(np.isnan(slice_data.values), np.isnan(special_arr))
+ assert np.array_equal(np.isinf(slice_data.values), np.isinf(special_arr))
+
+
+class TestAdvancedBroadcasting:
+ """Test advanced broadcasting scenarios and edge cases."""
+
+ def test_partial_dimension_matching_with_broadcasting(self, standard_coords):
+ """Test that partial dimension matching works with the improved integration."""
+ # 1D array matching one dimension should broadcast to all target dimensions
+ time_arr = np.array([10, 20, 30, 40, 50]) # matches time (length 5)
+ result = DataConverter.to_dataarray(time_arr, coords=standard_coords)
+
+ assert result.shape == (5, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+
+ # Verify broadcasting
+ for scenario in standard_coords['scenario']:
+ for region in standard_coords['region']:
+ assert np.array_equal(result.sel(scenario=scenario, region=region).values, time_arr)
+
+ def test_complex_multid_scenario(self):
+ """Complex real-world scenario with multi-D array and broadcasting."""
+ # Energy system data: time x technology, broadcast to regions
+ coords = {
+ 'time': pd.date_range('2024-01-01', periods=24, freq='h', name='time'), # 24 hours
+ 'technology': pd.Index(['solar', 'wind', 'gas', 'coal'], name='technology'), # 4 technologies
+ 'region': pd.Index(['north', 'south', 'east'], name='region'), # 3 regions
+ }
+
+ # Capacity factors: 24 x 4 (will broadcast to 24 x 4 x 3)
+ capacity_factors = np.random.rand(24, 4)
+
+ result = DataConverter.to_dataarray(capacity_factors, coords=coords)
+
+ assert result.shape == (24, 4, 3)
+ assert result.dims == ('time', 'technology', 'region')
+ assert isinstance(result.indexes['time'], pd.DatetimeIndex)
+
+ # Verify broadcasting: all regions should have same time×technology data
+ for region in coords['region']:
+ assert np.array_equal(result.sel(region=region).values, capacity_factors)
+
+ def test_ambiguous_length_handling(self):
+ """Test handling of ambiguous length scenarios across different data types."""
+ # All dimensions have length 3
+ coords_3x3x3 = {
+ 'time': pd.date_range('2024-01-01', periods=3, freq='D', name='time'),
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'),
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'),
+ }
+
+ # 1D array - should fail
+ arr_1d = np.array([1, 2, 3])
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_1d, coords=coords_3x3x3)
+
+ # 2D array - should fail
+ arr_2d = np.random.rand(3, 3)
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_2d, coords=coords_3x3x3)
+
+ # 3D array - should fail
+ arr_3d = np.random.rand(3, 3, 3)
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_3d, coords=coords_3x3x3)
+
+ def test_mixed_broadcasting_scenarios(self):
+ """Test various broadcasting scenarios with different input types."""
+ coords = {
+ 'time': pd.date_range('2024-01-01', periods=4, freq='D', name='time'), # length 4
+ 'scenario': pd.Index(['A', 'B'], name='scenario'), # length 2
+ 'region': pd.Index(['north', 'south', 'east'], name='region'), # length 3
+ 'product': pd.Index(['X', 'Y', 'Z', 'W', 'V'], name='product'), # length 5
+ }
+
+ # Scalar to 4D
+ scalar_result = DataConverter.to_dataarray(42, coords=coords)
+ assert scalar_result.shape == (4, 2, 3, 5)
+ assert np.all(scalar_result.values == 42)
+
+ # 1D array (length 4, matches time) to 4D
+ arr_1d = np.array([10, 20, 30, 40])
+ arr_result = DataConverter.to_dataarray(arr_1d, coords=coords)
+ assert arr_result.shape == (4, 2, 3, 5)
+ # Verify broadcasting
+ for scenario in coords['scenario']:
+ for region in coords['region']:
+ for product in coords['product']:
+ assert np.array_equal(
+ arr_result.sel(scenario=scenario, region=region, product=product).values, arr_1d
+ )
+
+ # 2D array (4x2, matches time×scenario) to 4D
+ arr_2d = np.random.rand(4, 2)
+ arr_2d_result = DataConverter.to_dataarray(arr_2d, coords=coords)
+ assert arr_2d_result.shape == (4, 2, 3, 5)
+ # Verify broadcasting
+ for region in coords['region']:
+ for product in coords['product']:
+ assert np.array_equal(arr_2d_result.sel(region=region, product=product).values, arr_2d)
+
+
+class TestAmbiguousDimensionLengthHandling:
+ """Test that DataConverter correctly raises errors when multiple dimensions have the same length."""
+
+ def test_1d_array_ambiguous_dimensions_simple(self):
+ """Test 1D array with two dimensions of same length should fail."""
+ # Both dimensions have length 3
+ coords_ambiguous = {
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['north', 'south', 'east'], name='region'), # length 3
+ }
+
+ arr_1d = np.array([1, 2, 3]) # length 3 - matches both dimensions
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_1d, coords=coords_ambiguous)
+
+ def test_1d_array_ambiguous_dimensions_complex(self):
+ """Test 1D array with multiple dimensions of same length."""
+ # Three dimensions have length 4
+ coords_4x4x4 = {
+ 'time': pd.date_range('2024-01-01', periods=4, freq='D', name='time'), # length 4
+ 'scenario': pd.Index(['A', 'B', 'C', 'D'], name='scenario'), # length 4
+ 'region': pd.Index(['north', 'south', 'east', 'west'], name='region'), # length 4
+ 'product': pd.Index(['X', 'Y'], name='product'), # length 2 - unique
+ }
+
+ # Array matching the ambiguous length
+ arr_1d = np.array([10, 20, 30, 40]) # length 4 - matches time, scenario, region
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_1d, coords=coords_4x4x4)
+
+ # Array matching the unique length should work
+ arr_1d_unique = np.array([100, 200]) # length 2 - matches only product
+ result = DataConverter.to_dataarray(arr_1d_unique, coords=coords_4x4x4)
+ assert result.shape == (4, 4, 4, 2) # broadcast to all dimensions
+ assert result.dims == ('time', 'scenario', 'region', 'product')
+
+ def test_2d_array_ambiguous_dimensions_both_same(self):
+ """Test 2D array where both dimensions have the same ambiguous length."""
+ # All dimensions have length 3
+ coords_3x3x3 = {
+ 'time': pd.date_range('2024-01-01', periods=3, freq='D', name='time'), # length 3
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'), # length 3
+ }
+
+ # 3x3 array - could be any combination of the three dimensions
+ arr_2d = np.random.rand(3, 3)
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_2d, coords=coords_3x3x3)
+
+ def test_2d_array_one_dimension_ambiguous(self):
+ """Test 2D array where only one dimension length is ambiguous."""
+ coords_mixed = {
+ 'time': pd.date_range('2024-01-01', periods=5, freq='D', name='time'), # length 5 - unique
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'), # length 3 - same as scenario
+ 'product': pd.Index(['P1', 'P2'], name='product'), # length 2 - unique
+ }
+
+ # 5x3 array - first dimension clearly maps to time (unique length 5)
+ # but second dimension could be scenario or region (both length 3)
+ arr_5x3 = np.random.rand(5, 3)
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_5x3, coords=coords_mixed)
+
+ # 5x2 array should work - dimensions are unambiguous
+ arr_5x2 = np.random.rand(5, 2)
+ result = DataConverter.to_dataarray(
+ arr_5x2, coords={'time': coords_mixed['time'], 'product': coords_mixed['product']}
+ )
+ assert result.shape == (5, 2)
+ assert result.dims == ('time', 'product')
+
+ def test_3d_array_all_dimensions_ambiguous(self):
+ """Test 3D array where all dimension lengths are ambiguous."""
+ # All dimensions have length 2
+ coords_2x2x2x2 = {
+ 'scenario': pd.Index(['A', 'B'], name='scenario'), # length 2
+ 'region': pd.Index(['north', 'south'], name='region'), # length 2
+ 'technology': pd.Index(['solar', 'wind'], name='technology'), # length 2
+ 'product': pd.Index(['X', 'Y'], name='product'), # length 2
+ }
+
+ # 2x2x2 array - could be any combination of 3 dimensions from the 4 available
+ arr_3d = np.random.rand(2, 2, 2)
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_3d, coords=coords_2x2x2x2)
+
+ def test_3d_array_partial_ambiguity(self):
+ """Test 3D array with partial dimension ambiguity."""
+ coords_partial = {
+ 'time': pd.date_range('2024-01-01', periods=4, freq='D', name='time'), # length 4 - unique
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'), # length 3 - same as scenario
+ 'technology': pd.Index(['solar', 'wind'], name='technology'), # length 2 - unique
+ }
+
+ # 4x3x2 array - first and third dimensions are unique, middle is ambiguous
+ # This should still fail because middle dimension (length 3) could be scenario or region
+ arr_4x3x2 = np.random.rand(4, 3, 2)
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_4x3x2, coords=coords_partial)
+
+ def test_pandas_series_ambiguous_dimensions(self):
+ """Test pandas Series with ambiguous dimension lengths."""
+ coords_ambiguous = {
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['north', 'south', 'east'], name='region'), # length 3
+ }
+
+ # Series with length 3 but index that doesn't match either coordinate exactly
+ generic_series = pd.Series([10, 20, 30], index=[0, 1, 2])
+
+ # Should fail because length matches multiple dimensions and index doesn't match any
+ with pytest.raises(ConversionError, match='Series index does not match any target dimension coordinates'):
+ DataConverter.to_dataarray(generic_series, coords=coords_ambiguous)
+
+ # Series with index that matches one of the ambiguous coordinates should work
+ scenario_series = pd.Series([10, 20, 30], index=coords_ambiguous['scenario'])
+ result = DataConverter.to_dataarray(scenario_series, coords=coords_ambiguous)
+ assert result.shape == (3, 3) # should broadcast to both dimensions
+ assert result.dims == ('scenario', 'region')
+
+ def test_edge_case_many_same_lengths(self):
+ """Test edge case with many dimensions having the same length."""
+ # Five dimensions all have length 2
+ coords_many = {
+ 'dim1': pd.Index(['A', 'B'], name='dim1'),
+ 'dim2': pd.Index(['X', 'Y'], name='dim2'),
+ 'dim3': pd.Index(['P', 'Q'], name='dim3'),
+ 'dim4': pd.Index(['M', 'N'], name='dim4'),
+ 'dim5': pd.Index(['U', 'V'], name='dim5'),
+ }
+
+ # 1D array
+ arr_1d = np.array([1, 2])
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_1d, coords=coords_many)
+
+ # 2D array
+ arr_2d = np.random.rand(2, 2)
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_2d, coords=coords_many)
+
+ # 3D array
+ arr_3d = np.random.rand(2, 2, 2)
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_3d, coords=coords_many)
+
+ def test_mixed_lengths_with_duplicates(self):
+ """Test mixed scenario with some duplicate and some unique lengths."""
+ coords_mixed = {
+ 'time': pd.date_range('2024-01-01', periods=8, freq='D', name='time'), # length 8 - unique
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'), # length 3 - same as scenario
+ 'technology': pd.Index(['solar'], name='technology'), # length 1 - unique
+ 'product': pd.Index(['P1', 'P2', 'P3', 'P4', 'P5'], name='product'), # length 5 - unique
+ }
+
+ # Arrays with unique lengths should work
+ arr_8 = np.arange(8)
+ result_8 = DataConverter.to_dataarray(arr_8, coords=coords_mixed)
+ assert result_8.dims == ('time', 'scenario', 'region', 'technology', 'product')
+
+ arr_1 = np.array([42])
+ result_1 = DataConverter.to_dataarray(arr_1, coords={'technology': coords_mixed['technology']})
+ assert result_1.shape == (1,)
+
+ arr_5 = np.arange(5)
+ result_5 = DataConverter.to_dataarray(arr_5, coords={'product': coords_mixed['product']})
+ assert result_5.shape == (5,)
+
+ # Arrays with ambiguous length should fail
+ arr_3 = np.array([1, 2, 3]) # matches both scenario and region
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_3, coords=coords_mixed)
+
+ def test_dataframe_with_ambiguous_dimensions(self):
+ """Test DataFrame handling with ambiguous dimensions."""
+ coords_ambiguous = {
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'), # length 3
+ }
+
+ # Multi-column DataFrame with ambiguous dimensions
+ df = pd.DataFrame({'col1': [1, 2, 3], 'col2': [4, 5, 6], 'col3': [7, 8, 9]}) # 3x3 DataFrame
+
+ # Should fail due to ambiguous dimensions
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(df, coords=coords_ambiguous)
+
+ def test_error_message_quality_for_ambiguous_dimensions(self):
+ """Test that error messages for ambiguous dimensions are helpful."""
+ coords_ambiguous = {
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'),
+ 'region': pd.Index(['north', 'south', 'east'], name='region'),
+ 'technology': pd.Index(['solar', 'wind', 'gas'], name='technology'),
+ }
+
+ # 1D array case
+ arr_1d = np.array([1, 2, 3])
+ try:
+ DataConverter.to_dataarray(arr_1d, coords=coords_ambiguous)
+ raise AssertionError('Should have raised ConversionError')
+ except ConversionError as e:
+ error_msg = str(e)
+ assert 'matches multiple dimension' in error_msg
+ assert 'scenario' in error_msg
+ assert 'region' in error_msg
+ assert 'technology' in error_msg
+
+ # 2D array case
+ arr_2d = np.random.rand(3, 3)
+ try:
+ DataConverter.to_dataarray(arr_2d, coords=coords_ambiguous)
+ raise AssertionError('Should have raised ConversionError')
+ except ConversionError as e:
+ error_msg = str(e)
+ assert 'matches multiple dimension combinations' in error_msg
+ assert '(3, 3)' in error_msg
+
+ def test_ambiguous_with_broadcasting_target(self):
+ """Test ambiguous dimensions when target includes broadcasting."""
+ coords_ambiguous_plus = {
+ 'time': pd.date_range('2024-01-01', periods=5, freq='D', name='time'), # length 5
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'), # length 3
+ 'region': pd.Index(['X', 'Y', 'Z'], name='region'), # length 3 - same as scenario
+ 'technology': pd.Index(['solar', 'wind'], name='technology'), # length 2
+ }
+
+ # 1D array with ambiguous length, but targeting broadcast scenario
+ arr_3 = np.array([10, 20, 30]) # length 3, matches scenario and region
+
+ # Should fail even though it would broadcast to other dimensions
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_3, coords=coords_ambiguous_plus)
+
+ # 2D array with one ambiguous dimension
+ arr_5x3 = np.random.rand(5, 3) # 5 is unique (time), 3 is ambiguous (scenario/region)
+
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(arr_5x3, coords=coords_ambiguous_plus)
+
+ def test_time_dimension_ambiguity(self):
+ """Test ambiguity specifically involving time dimension."""
+ # Create scenario where time has same length as another dimension
+ coords_time_ambiguous = {
+ 'time': pd.date_range('2024-01-01', periods=3, freq='D', name='time'), # length 3
+ 'scenario': pd.Index(['base', 'high', 'low'], name='scenario'), # length 3 - same as time
+ 'region': pd.Index(['north', 'south'], name='region'), # length 2 - unique
+ }
+
+ # Time-indexed series should work even with ambiguous lengths (index matching takes precedence)
+ time_series = pd.Series([100, 200, 300], index=coords_time_ambiguous['time'])
+ result = DataConverter.to_dataarray(time_series, coords=coords_time_ambiguous)
+ assert result.shape == (3, 3, 2)
+ assert result.dims == ('time', 'scenario', 'region')
+
+ # But generic array with length 3 should still fail
+ generic_array = np.array([100, 200, 300])
+ with pytest.raises(ConversionError, match='matches multiple dimension'):
+ DataConverter.to_dataarray(generic_array, coords=coords_time_ambiguous)
+
+
+if __name__ == '__main__':
+ pytest.main()
diff --git a/tests/deprecated/test_effect.py b/tests/deprecated/test_effect.py
new file mode 100644
index 000000000..b3bb278f0
--- /dev/null
+++ b/tests/deprecated/test_effect.py
@@ -0,0 +1,371 @@
+import numpy as np
+import pytest
+import xarray as xr
+
+import flixopt as fx
+
+from .conftest import (
+ assert_conequal,
+ assert_sets_equal,
+ assert_var_equal,
+ create_linopy_model,
+ create_optimization_and_solve,
+)
+
+
+class TestEffectModel:
+ """Test the FlowModel class."""
+
+ def test_minimal(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ effect = fx.Effect('Effect1', '€', 'Testing Effect')
+
+ flow_system.add_elements(effect)
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(effect.submodel.variables),
+ {
+ 'Effect1(periodic)',
+ 'Effect1(temporal)',
+ 'Effect1(temporal)|per_timestep',
+ 'Effect1',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(effect.submodel.constraints),
+ {
+ 'Effect1(periodic)',
+ 'Effect1(temporal)',
+ 'Effect1(temporal)|per_timestep',
+ 'Effect1',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert_var_equal(
+ model.variables['Effect1'], model.add_variables(coords=model.get_coords(['period', 'scenario']))
+ )
+ assert_var_equal(
+ model.variables['Effect1(periodic)'], model.add_variables(coords=model.get_coords(['period', 'scenario']))
+ )
+ assert_var_equal(
+ model.variables['Effect1(temporal)'],
+ model.add_variables(coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_var_equal(
+ model.variables['Effect1(temporal)|per_timestep'], model.add_variables(coords=model.get_coords())
+ )
+
+ assert_conequal(
+ model.constraints['Effect1'],
+ model.variables['Effect1'] == model.variables['Effect1(temporal)'] + model.variables['Effect1(periodic)'],
+ )
+ # In minimal/bounds tests with no contributing components, periodic totals should be zero
+ assert_conequal(model.constraints['Effect1(periodic)'], model.variables['Effect1(periodic)'] == 0)
+ assert_conequal(
+ model.constraints['Effect1(temporal)'],
+ model.variables['Effect1(temporal)'] == model.variables['Effect1(temporal)|per_timestep'].sum('time'),
+ )
+ assert_conequal(
+ model.constraints['Effect1(temporal)|per_timestep'],
+ model.variables['Effect1(temporal)|per_timestep'] == 0,
+ )
+
+ def test_bounds(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ effect = fx.Effect(
+ 'Effect1',
+ '€',
+ 'Testing Effect',
+ minimum_temporal=1.0,
+ maximum_temporal=1.1,
+ minimum_periodic=2.0,
+ maximum_periodic=2.1,
+ minimum_total=3.0,
+ maximum_total=3.1,
+ minimum_per_hour=4.0,
+ maximum_per_hour=4.1,
+ )
+
+ flow_system.add_elements(effect)
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(effect.submodel.variables),
+ {
+ 'Effect1(periodic)',
+ 'Effect1(temporal)',
+ 'Effect1(temporal)|per_timestep',
+ 'Effect1',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(effect.submodel.constraints),
+ {
+ 'Effect1(periodic)',
+ 'Effect1(temporal)',
+ 'Effect1(temporal)|per_timestep',
+ 'Effect1',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert_var_equal(
+ model.variables['Effect1'],
+ model.add_variables(lower=3.0, upper=3.1, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_var_equal(
+ model.variables['Effect1(periodic)'],
+ model.add_variables(lower=2.0, upper=2.1, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_var_equal(
+ model.variables['Effect1(temporal)'],
+ model.add_variables(lower=1.0, upper=1.1, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_var_equal(
+ model.variables['Effect1(temporal)|per_timestep'],
+ model.add_variables(
+ lower=4.0 * model.hours_per_step,
+ upper=4.1 * model.hours_per_step,
+ coords=model.get_coords(['time', 'period', 'scenario']),
+ ),
+ )
+
+ assert_conequal(
+ model.constraints['Effect1'],
+ model.variables['Effect1'] == model.variables['Effect1(temporal)'] + model.variables['Effect1(periodic)'],
+ )
+ # In minimal/bounds tests with no contributing components, periodic totals should be zero
+ assert_conequal(model.constraints['Effect1(periodic)'], model.variables['Effect1(periodic)'] == 0)
+ assert_conequal(
+ model.constraints['Effect1(temporal)'],
+ model.variables['Effect1(temporal)'] == model.variables['Effect1(temporal)|per_timestep'].sum('time'),
+ )
+ assert_conequal(
+ model.constraints['Effect1(temporal)|per_timestep'],
+ model.variables['Effect1(temporal)|per_timestep'] == 0,
+ )
+
+ def test_shares(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ effect1 = fx.Effect(
+ 'Effect1',
+ '€',
+ 'Testing Effect',
+ )
+ effect2 = fx.Effect(
+ 'Effect2',
+ '€',
+ 'Testing Effect',
+ share_from_temporal={'Effect1': 1.1},
+ share_from_periodic={'Effect1': 2.1},
+ )
+ effect3 = fx.Effect(
+ 'Effect3',
+ '€',
+ 'Testing Effect',
+ share_from_temporal={'Effect1': 1.2},
+ share_from_periodic={'Effect1': 2.2},
+ )
+ flow_system.add_elements(effect1, effect2, effect3)
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(effect2.submodel.variables),
+ {
+ 'Effect2(periodic)',
+ 'Effect2(temporal)',
+ 'Effect2(temporal)|per_timestep',
+ 'Effect2',
+ 'Effect1(periodic)->Effect2(periodic)',
+ 'Effect1(temporal)->Effect2(temporal)',
+ },
+ msg='Incorrect variables for effect2',
+ )
+
+ assert_sets_equal(
+ set(effect2.submodel.constraints),
+ {
+ 'Effect2(periodic)',
+ 'Effect2(temporal)',
+ 'Effect2(temporal)|per_timestep',
+ 'Effect2',
+ 'Effect1(periodic)->Effect2(periodic)',
+ 'Effect1(temporal)->Effect2(temporal)',
+ },
+ msg='Incorrect constraints for effect2',
+ )
+
+ assert_conequal(
+ model.constraints['Effect2(periodic)'],
+ model.variables['Effect2(periodic)'] == model.variables['Effect1(periodic)->Effect2(periodic)'],
+ )
+
+ assert_conequal(
+ model.constraints['Effect2(temporal)|per_timestep'],
+ model.variables['Effect2(temporal)|per_timestep']
+ == model.variables['Effect1(temporal)->Effect2(temporal)'],
+ )
+
+ assert_conequal(
+ model.constraints['Effect1(temporal)->Effect2(temporal)'],
+ model.variables['Effect1(temporal)->Effect2(temporal)']
+ == model.variables['Effect1(temporal)|per_timestep'] * 1.1,
+ )
+
+ assert_conequal(
+ model.constraints['Effect1(periodic)->Effect2(periodic)'],
+ model.variables['Effect1(periodic)->Effect2(periodic)'] == model.variables['Effect1(periodic)'] * 2.1,
+ )
+
+
+class TestEffectResults:
+ @pytest.mark.filterwarnings('ignore::DeprecationWarning')
+ def test_shares(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ effect1 = fx.Effect('Effect1', '€', 'Testing Effect', share_from_temporal={'costs': 0.5})
+ effect2 = fx.Effect(
+ 'Effect2',
+ '€',
+ 'Testing Effect',
+ share_from_temporal={'Effect1': 1.1},
+ share_from_periodic={'Effect1': 2.1},
+ )
+ effect3 = fx.Effect(
+ 'Effect3',
+ '€',
+ 'Testing Effect',
+ share_from_temporal={'Effect1': 1.2, 'Effect2': 5},
+ share_from_periodic={'Effect1': 2.2},
+ )
+ flow_system.add_elements(
+ effect1,
+ effect2,
+ effect3,
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=fx.InvestParameters(
+ effects_of_investment_per_size=10, minimum_size=20, maximum_size=200, mandatory=True
+ ),
+ ),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ ),
+ )
+
+ results = create_optimization_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 60), 'Sim1').results
+
+ effect_share_factors = {
+ 'temporal': {
+ ('costs', 'Effect1'): 0.5,
+ ('costs', 'Effect2'): 0.5 * 1.1,
+ ('costs', 'Effect3'): 0.5 * 1.1 * 5 + 0.5 * 1.2, # This is where the issue lies
+ ('Effect1', 'Effect2'): 1.1,
+ ('Effect1', 'Effect3'): 1.2 + 1.1 * 5,
+ ('Effect2', 'Effect3'): 5,
+ },
+ 'periodic': {
+ ('Effect1', 'Effect2'): 2.1,
+ ('Effect1', 'Effect3'): 2.2,
+ },
+ }
+ for key, value in effect_share_factors['temporal'].items():
+ np.testing.assert_allclose(results.effect_share_factors['temporal'][key].values, value)
+
+ for key, value in effect_share_factors['periodic'].items():
+ np.testing.assert_allclose(results.effect_share_factors['periodic'][key].values, value)
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['temporal'].sum('component').sel(effect='costs', drop=True),
+ results.solution['costs(temporal)|per_timestep'].fillna(0),
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['temporal'].sum('component').sel(effect='Effect1', drop=True),
+ results.solution['Effect1(temporal)|per_timestep'].fillna(0),
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['temporal'].sum('component').sel(effect='Effect2', drop=True),
+ results.solution['Effect2(temporal)|per_timestep'].fillna(0),
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['temporal'].sum('component').sel(effect='Effect3', drop=True),
+ results.solution['Effect3(temporal)|per_timestep'].fillna(0),
+ )
+
+ # periodic mode checks
+ xr.testing.assert_allclose(
+ results.effects_per_component['periodic'].sum('component').sel(effect='costs', drop=True),
+ results.solution['costs(periodic)'],
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['periodic'].sum('component').sel(effect='Effect1', drop=True),
+ results.solution['Effect1(periodic)'],
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['periodic'].sum('component').sel(effect='Effect2', drop=True),
+ results.solution['Effect2(periodic)'],
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['periodic'].sum('component').sel(effect='Effect3', drop=True),
+ results.solution['Effect3(periodic)'],
+ )
+
+ # Total mode checks
+ xr.testing.assert_allclose(
+ results.effects_per_component['total'].sum('component').sel(effect='costs', drop=True),
+ results.solution['costs'],
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['total'].sum('component').sel(effect='Effect1', drop=True),
+ results.solution['Effect1'],
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['total'].sum('component').sel(effect='Effect2', drop=True),
+ results.solution['Effect2'],
+ )
+
+ xr.testing.assert_allclose(
+ results.effects_per_component['total'].sum('component').sel(effect='Effect3', drop=True),
+ results.solution['Effect3'],
+ )
+
+
+class TestPenaltyAsObjective:
+ """Test that Penalty cannot be set as the objective effect."""
+
+ def test_penalty_cannot_be_created_as_objective(self):
+ """Test that creating a Penalty effect with is_objective=True raises ValueError."""
+ import pytest
+
+ with pytest.raises(ValueError, match='Penalty.*cannot be set as the objective'):
+ fx.Effect('Penalty', '€', 'Test Penalty', is_objective=True)
+
+ def test_penalty_cannot_be_set_as_objective_via_setter(self):
+ """Test that setting Penalty as objective via setter raises ValueError."""
+ import pandas as pd
+ import pytest
+
+ # Create a fresh flow system without pre-existing objective
+ flow_system = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h'))
+ penalty_effect = fx.Effect('Penalty', '€', 'Test Penalty', is_objective=False)
+
+ flow_system.add_elements(penalty_effect)
+
+ with pytest.raises(ValueError, match='Penalty.*cannot be set as the objective'):
+ flow_system.effects.objective_effect = penalty_effect
diff --git a/tests/deprecated/test_effects_shares_summation.py b/tests/deprecated/test_effects_shares_summation.py
new file mode 100644
index 000000000..312934732
--- /dev/null
+++ b/tests/deprecated/test_effects_shares_summation.py
@@ -0,0 +1,225 @@
+import pytest
+import xarray as xr
+
+from flixopt.effects import calculate_all_conversion_paths
+
+
+def test_direct_conversions():
+ """Test direct conversions with simple scalar values."""
+ conversion_dict = {'A': {'B': xr.DataArray(2.0)}, 'B': {'C': xr.DataArray(3.0)}}
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # Check direct conversions
+ assert ('A', 'B') in result
+ assert ('B', 'C') in result
+ assert result[('A', 'B')].item() == 2.0
+ assert result[('B', 'C')].item() == 3.0
+
+ # Check indirect conversion
+ assert ('A', 'C') in result
+ assert result[('A', 'C')].item() == 6.0 # 2.0 * 3.0
+
+
+def test_multiple_paths():
+ """Test multiple paths between nodes that should be summed."""
+ conversion_dict = {
+ 'A': {'B': xr.DataArray(2.0), 'C': xr.DataArray(3.0)},
+ 'B': {'D': xr.DataArray(4.0)},
+ 'C': {'D': xr.DataArray(5.0)},
+ }
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # A to D should sum two paths: A->B->D (2*4=8) and A->C->D (3*5=15)
+ assert ('A', 'D') in result
+ assert result[('A', 'D')].item() == 8.0 + 15.0
+
+
+def test_xarray_conversions():
+ """Test with xarray DataArrays that have dimensions."""
+ # Create DataArrays with a time dimension
+ time_points = [1, 2, 3]
+ a_to_b = xr.DataArray([2.0, 2.1, 2.2], dims='time', coords={'time': time_points})
+ b_to_c = xr.DataArray([3.0, 3.1, 3.2], dims='time', coords={'time': time_points})
+
+ conversion_dict = {'A': {'B': a_to_b}, 'B': {'C': b_to_c}}
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # Check indirect conversion preserves dimensions
+ assert ('A', 'C') in result
+ assert result[('A', 'C')].dims == ('time',)
+
+ # Check values at each time point
+ for i, t in enumerate(time_points):
+ expected = a_to_b.values[i] * b_to_c.values[i]
+ assert pytest.approx(result[('A', 'C')].sel(time=t).item()) == expected
+
+
+def test_long_paths():
+ """Test with longer paths (more than one intermediate node)."""
+ conversion_dict = {
+ 'A': {'B': xr.DataArray(2.0)},
+ 'B': {'C': xr.DataArray(3.0)},
+ 'C': {'D': xr.DataArray(4.0)},
+ 'D': {'E': xr.DataArray(5.0)},
+ }
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # Check the full path A->B->C->D->E
+ assert ('A', 'E') in result
+ expected = 2.0 * 3.0 * 4.0 * 5.0 # 120.0
+ assert result[('A', 'E')].item() == expected
+
+
+def test_diamond_paths():
+ """Test with a diamond shape graph with multiple paths to the same destination."""
+ conversion_dict = {
+ 'A': {'B': xr.DataArray(2.0), 'C': xr.DataArray(3.0)},
+ 'B': {'D': xr.DataArray(4.0)},
+ 'C': {'D': xr.DataArray(5.0)},
+ 'D': {'E': xr.DataArray(6.0)},
+ }
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # A to E should go through both paths:
+ # A->B->D->E (2*4*6=48) and A->C->D->E (3*5*6=90)
+ assert ('A', 'E') in result
+ expected = 48.0 + 90.0 # 138.0
+ assert result[('A', 'E')].item() == expected
+
+
+def test_effect_shares_example():
+ """Test the specific example from the effects share factors test."""
+ # Create the conversion dictionary based on test example
+ conversion_dict = {
+ 'costs': {'Effect1': xr.DataArray(0.5)},
+ 'Effect1': {'Effect2': xr.DataArray(1.1), 'Effect3': xr.DataArray(1.2)},
+ 'Effect2': {'Effect3': xr.DataArray(5.0)},
+ }
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # Test direct paths
+ assert result[('costs', 'Effect1')].item() == 0.5
+ assert result[('Effect1', 'Effect2')].item() == 1.1
+ assert result[('Effect2', 'Effect3')].item() == 5.0
+
+ # Test indirect paths
+ # costs -> Effect2 = costs -> Effect1 -> Effect2 = 0.5 * 1.1
+ assert result[('costs', 'Effect2')].item() == 0.5 * 1.1
+
+ # costs -> Effect3 has two paths:
+ # 1. costs -> Effect1 -> Effect3 = 0.5 * 1.2 = 0.6
+ # 2. costs -> Effect1 -> Effect2 -> Effect3 = 0.5 * 1.1 * 5 = 2.75
+ # Total = 0.6 + 2.75 = 3.35
+ assert result[('costs', 'Effect3')].item() == 0.5 * 1.2 + 0.5 * 1.1 * 5
+
+ # Effect1 -> Effect3 has two paths:
+ # 1. Effect1 -> Effect2 -> Effect3 = 1.1 * 5.0 = 5.5
+ # 2. Effect1 -> Effect3 = 1.2
+ # Total = 0.6 + 2.75 = 3.35
+ assert result[('Effect1', 'Effect3')].item() == 1.2 + 1.1 * 5.0
+
+
+def test_empty_conversion_dict():
+ """Test with an empty conversion dictionary."""
+ result = calculate_all_conversion_paths({})
+ assert len(result) == 0
+
+
+def test_no_indirect_paths():
+ """Test with a dictionary that has no indirect paths."""
+ conversion_dict = {'A': {'B': xr.DataArray(2.0)}, 'C': {'D': xr.DataArray(3.0)}}
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # Only direct paths should exist
+ assert len(result) == 2
+ assert ('A', 'B') in result
+ assert ('C', 'D') in result
+ assert result[('A', 'B')].item() == 2.0
+ assert result[('C', 'D')].item() == 3.0
+
+
+def test_complex_network():
+ """Test with a complex network of many nodes and multiple paths, without circular references."""
+ # Create a directed acyclic graph with many nodes
+ # Structure resembles a layered network with multiple paths
+ conversion_dict = {
+ 'A': {'B': xr.DataArray(1.5), 'C': xr.DataArray(2.0), 'D': xr.DataArray(0.5)},
+ 'B': {'E': xr.DataArray(3.0), 'F': xr.DataArray(1.2)},
+ 'C': {'E': xr.DataArray(0.8), 'G': xr.DataArray(2.5)},
+ 'D': {'G': xr.DataArray(1.8), 'H': xr.DataArray(3.2)},
+ 'E': {'I': xr.DataArray(0.7), 'J': xr.DataArray(1.4)},
+ 'F': {'J': xr.DataArray(2.2), 'K': xr.DataArray(0.9)},
+ 'G': {'K': xr.DataArray(1.6), 'L': xr.DataArray(2.8)},
+ 'H': {'L': xr.DataArray(0.4), 'M': xr.DataArray(1.1)},
+ 'I': {'N': xr.DataArray(2.3)},
+ 'J': {'N': xr.DataArray(1.9), 'O': xr.DataArray(0.6)},
+ 'K': {'O': xr.DataArray(3.5), 'P': xr.DataArray(1.3)},
+ 'L': {'P': xr.DataArray(2.7), 'Q': xr.DataArray(0.8)},
+ 'M': {'Q': xr.DataArray(2.1)},
+ 'N': {'R': xr.DataArray(1.7)},
+ 'O': {'R': xr.DataArray(2.9), 'S': xr.DataArray(1.0)},
+ 'P': {'S': xr.DataArray(2.4)},
+ 'Q': {'S': xr.DataArray(1.5)},
+ }
+
+ result = calculate_all_conversion_paths(conversion_dict)
+
+ # Check some direct paths
+ assert result[('A', 'B')].item() == 1.5
+ assert result[('D', 'H')].item() == 3.2
+ assert result[('G', 'L')].item() == 2.8
+
+ # Check some two-step paths
+ assert result[('A', 'E')].item() == 1.5 * 3.0 + 2.0 * 0.8 # A->B->E + A->C->E
+ assert result[('B', 'J')].item() == 3.0 * 1.4 + 1.2 * 2.2 # B->E->J + B->F->J
+
+ # Check some three-step paths
+ # A->B->E->I
+ # A->C->E->I
+ expected_a_to_i = 1.5 * 3.0 * 0.7 + 2.0 * 0.8 * 0.7
+ assert pytest.approx(result[('A', 'I')].item()) == expected_a_to_i
+
+ # Check some four-step paths
+ # A->B->E->I->N
+ # A->C->E->I->N
+ expected_a_to_n = 1.5 * 3.0 * 0.7 * 2.3 + 2.0 * 0.8 * 0.7 * 2.3
+ expected_a_to_n += 1.5 * 3.0 * 1.4 * 1.9 + 2.0 * 0.8 * 1.4 * 1.9 # A->B->E->J->N + A->C->E->J->N
+ expected_a_to_n += 1.5 * 1.2 * 2.2 * 1.9 # A->B->F->J->N
+ assert pytest.approx(result[('A', 'N')].item()) == expected_a_to_n
+
+ # Check a very long path from A to S
+ # This should include:
+ # A->B->E->J->O->S
+ # A->B->F->K->O->S
+ # A->C->E->J->O->S
+ # A->C->G->K->O->S
+ # A->D->G->K->O->S
+ # A->D->H->L->P->S
+ # A->D->H->M->Q->S
+ # And many more
+ assert ('A', 'S') in result
+
+ # There are many paths to R from A - check their existence
+ assert ('A', 'R') in result
+
+ # Check that there's no direct path from A to R
+ # But there should be indirect paths
+ assert ('A', 'R') in result
+ assert 'A' not in conversion_dict.get('R', {})
+
+ # Count the number of paths calculated to verify algorithm explored all connections
+ # In a DAG with 19 nodes (A through S), the maximum number of pairs is 19*18 = 342
+ # But we won't have all possible connections due to the structure
+ # Just verify we have a reasonable number
+ assert len(result) > 50
+
+
+if __name__ == '__main__':
+ pytest.main()
diff --git a/tests/test_examples.py b/tests/deprecated/test_examples.py
similarity index 100%
rename from tests/test_examples.py
rename to tests/deprecated/test_examples.py
diff --git a/tests/deprecated/test_flow.py b/tests/deprecated/test_flow.py
new file mode 100644
index 000000000..594bc1fbb
--- /dev/null
+++ b/tests/deprecated/test_flow.py
@@ -0,0 +1,1339 @@
+import numpy as np
+import pytest
+import xarray as xr
+
+import flixopt as fx
+
+from .conftest import assert_conequal, assert_sets_equal, assert_var_equal, create_linopy_model
+
+
+class TestFlowModel:
+ """Test the FlowModel class."""
+
+ def test_flow_minimal(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that flow model constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow('Wärme', bus='Fernwärme', size=100)
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+
+ model = create_linopy_model(flow_system)
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|total_flow_hours'],
+ flow.submodel.variables['Sink(Wärme)|total_flow_hours']
+ == (flow.submodel.variables['Sink(Wärme)|flow_rate'] * model.hours_per_step).sum('time'),
+ )
+ assert_var_equal(flow.submodel.flow_rate, model.add_variables(lower=0, upper=100, coords=model.get_coords()))
+ assert_var_equal(
+ flow.submodel.total_flow_hours,
+ model.add_variables(lower=0, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate'},
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(set(flow.submodel.constraints), {'Sink(Wärme)|total_flow_hours'}, msg='Incorrect constraints')
+
+ def test_flow(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ relative_minimum=np.linspace(0, 0.5, timesteps.size),
+ relative_maximum=np.linspace(0.5, 1, timesteps.size),
+ flow_hours_max=1000,
+ flow_hours_min=10,
+ load_factor_min=0.1,
+ load_factor_max=0.9,
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ # total_flow_hours
+ assert_conequal(
+ model.constraints['Sink(Wärme)|total_flow_hours'],
+ flow.submodel.variables['Sink(Wärme)|total_flow_hours']
+ == (flow.submodel.variables['Sink(Wärme)|flow_rate'] * model.hours_per_step).sum('time'),
+ )
+
+ assert_var_equal(
+ flow.submodel.total_flow_hours,
+ model.add_variables(lower=10, upper=1000, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert flow.relative_minimum.dims == tuple(model.get_coords())
+ assert flow.relative_maximum.dims == tuple(model.get_coords())
+
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=flow.relative_minimum * 100,
+ upper=flow.relative_maximum * 100,
+ coords=model.get_coords(),
+ ),
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|load_factor_min'],
+ flow.submodel.variables['Sink(Wärme)|total_flow_hours'] >= model.hours_per_step.sum('time') * 0.1 * 100,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|load_factor_max'],
+ flow.submodel.variables['Sink(Wärme)|total_flow_hours'] <= model.hours_per_step.sum('time') * 0.9 * 100,
+ )
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate'},
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|load_factor_max', 'Sink(Wärme)|load_factor_min'},
+ msg='Incorrect constraints',
+ )
+
+ def test_effects_per_flow_hour(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ costs_per_flow_hour = xr.DataArray(np.linspace(1, 2, timesteps.size), coords=(timesteps,))
+ co2_per_flow_hour = xr.DataArray(np.linspace(4, 5, timesteps.size), coords=(timesteps,))
+
+ flow = fx.Flow(
+ 'Wärme', bus='Fernwärme', effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour}
+ )
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), fx.Effect('CO2', 't', ''))
+ model = create_linopy_model(flow_system)
+ costs, co2 = flow_system.effects['costs'], flow_system.effects['CO2']
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate'},
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(set(flow.submodel.constraints), {'Sink(Wärme)|total_flow_hours'}, msg='Incorrect constraints')
+
+ assert 'Sink(Wärme)->costs(temporal)' in set(costs.submodel.constraints)
+ assert 'Sink(Wärme)->CO2(temporal)' in set(co2.submodel.constraints)
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)->costs(temporal)'],
+ model.variables['Sink(Wärme)->costs(temporal)']
+ == flow.submodel.variables['Sink(Wärme)|flow_rate'] * model.hours_per_step * costs_per_flow_hour,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)->CO2(temporal)'],
+ model.variables['Sink(Wärme)->CO2(temporal)']
+ == flow.submodel.variables['Sink(Wärme)|flow_rate'] * model.hours_per_step * co2_per_flow_hour,
+ )
+
+
+class TestFlowInvestModel:
+ """Test the FlowModel class."""
+
+ def test_flow_invest(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=True),
+ relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
+ relative_maximum=np.linspace(0.5, 1, timesteps.size),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate',
+ 'Sink(Wärme)|size',
+ },
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate|ub',
+ 'Sink(Wärme)|flow_rate|lb',
+ },
+ msg='Incorrect constraints',
+ )
+
+ # size
+ assert_var_equal(
+ model['Sink(Wärme)|size'],
+ model.add_variables(lower=20, upper=100, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert flow.relative_minimum.dims == tuple(model.get_coords())
+ assert flow.relative_maximum.dims == tuple(model.get_coords())
+
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=flow.relative_minimum * 20,
+ upper=flow.relative_maximum * 100,
+ coords=model.get_coords(),
+ ),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_minimum,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ <= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_maximum,
+ )
+
+ def test_flow_invest_optional(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=False),
+ relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
+ relative_maximum=np.linspace(0.5, 1, timesteps.size),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|size', 'Sink(Wärme)|invested'},
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|size|lb',
+ 'Sink(Wärme)|size|ub',
+ 'Sink(Wärme)|flow_rate|lb',
+ 'Sink(Wärme)|flow_rate|ub',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert_var_equal(
+ model['Sink(Wärme)|size'],
+ model.add_variables(lower=0, upper=100, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert_var_equal(
+ model['Sink(Wärme)|invested'],
+ model.add_variables(binary=True, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert flow.relative_minimum.dims == tuple(model.get_coords())
+ assert flow.relative_maximum.dims == tuple(model.get_coords())
+
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=0, # Optional investment
+ upper=flow.relative_maximum * 100,
+ coords=model.get_coords(),
+ ),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_minimum,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ <= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_maximum,
+ )
+
+ # Is invested
+ assert_conequal(
+ model.constraints['Sink(Wärme)|size|ub'],
+ flow.submodel.variables['Sink(Wärme)|size'] <= flow.submodel.variables['Sink(Wärme)|invested'] * 100,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|size|lb'],
+ flow.submodel.variables['Sink(Wärme)|size'] >= flow.submodel.variables['Sink(Wärme)|invested'] * 20,
+ )
+
+ def test_flow_invest_optional_wo_min_size(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(maximum_size=100, mandatory=False),
+ relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
+ relative_maximum=np.linspace(0.5, 1, timesteps.size),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|size', 'Sink(Wärme)|invested'},
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|size|ub',
+ 'Sink(Wärme)|size|lb',
+ 'Sink(Wärme)|flow_rate|lb',
+ 'Sink(Wärme)|flow_rate|ub',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert_var_equal(
+ model['Sink(Wärme)|size'],
+ model.add_variables(lower=0, upper=100, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert_var_equal(
+ model['Sink(Wärme)|invested'],
+ model.add_variables(binary=True, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert flow.relative_minimum.dims == tuple(model.get_coords())
+ assert flow.relative_maximum.dims == tuple(model.get_coords())
+
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=0, # Optional investment
+ upper=flow.relative_maximum * 100,
+ coords=model.get_coords(),
+ ),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_minimum,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ <= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_maximum,
+ )
+
+ # Is invested
+ assert_conequal(
+ model.constraints['Sink(Wärme)|size|ub'],
+ flow.submodel.variables['Sink(Wärme)|size'] <= flow.submodel.variables['Sink(Wärme)|invested'] * 100,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|size|lb'],
+ flow.submodel.variables['Sink(Wärme)|size'] >= flow.submodel.variables['Sink(Wärme)|invested'] * 1e-5,
+ )
+
+ def test_flow_invest_wo_min_size_non_optional(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(maximum_size=100, mandatory=True),
+ relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
+ relative_maximum=np.linspace(0.5, 1, timesteps.size),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|size'},
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate|lb',
+ 'Sink(Wärme)|flow_rate|ub',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert_var_equal(
+ model['Sink(Wärme)|size'],
+ model.add_variables(lower=1e-5, upper=100, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ assert flow.relative_minimum.dims == tuple(model.get_coords())
+ assert flow.relative_maximum.dims == tuple(model.get_coords())
+
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=flow.relative_minimum * 1e-5,
+ upper=flow.relative_maximum * 100,
+ coords=model.get_coords(),
+ ),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_minimum,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ <= flow.submodel.variables['Sink(Wärme)|size'] * flow.relative_maximum,
+ )
+
+ def test_flow_invest_fixed_size(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with fixed size investment."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(fixed_size=75, mandatory=True),
+ relative_minimum=0.2,
+ relative_maximum=0.9,
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|size'},
+ msg='Incorrect variables',
+ )
+
+ # Check that size is fixed to 75
+ assert_var_equal(
+ flow.submodel.variables['Sink(Wärme)|size'],
+ model.add_variables(lower=75, upper=75, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ # Check flow rate bounds
+ assert_var_equal(
+ flow.submodel.flow_rate, model.add_variables(lower=0.2 * 75, upper=0.9 * 75, coords=model.get_coords())
+ )
+
+ def test_flow_invest_with_effects(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with investment effects."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create effects
+ co2 = fx.Effect(label='CO2', unit='ton', description='CO2 emissions')
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(
+ minimum_size=20,
+ maximum_size=100,
+ mandatory=False,
+ effects_of_investment={'costs': 1000, 'CO2': 5}, # Fixed investment effects
+ effects_of_investment_per_size={'costs': 500, 'CO2': 0.1}, # Specific investment effects
+ ),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), co2)
+ model = create_linopy_model(flow_system)
+
+ # Check investment effects
+ assert 'Sink(Wärme)->costs(periodic)' in model.variables
+ assert 'Sink(Wärme)->CO2(periodic)' in model.variables
+
+ # Check fix effects (applied only when invested=1)
+ assert_conequal(
+ model.constraints['Sink(Wärme)->costs(periodic)'],
+ model.variables['Sink(Wärme)->costs(periodic)']
+ == flow.submodel.variables['Sink(Wärme)|invested'] * 1000
+ + flow.submodel.variables['Sink(Wärme)|size'] * 500,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)->CO2(periodic)'],
+ model.variables['Sink(Wärme)->CO2(periodic)']
+ == flow.submodel.variables['Sink(Wärme)|invested'] * 5 + flow.submodel.variables['Sink(Wärme)|size'] * 0.1,
+ )
+
+ def test_flow_invest_divest_effects(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with divestment effects."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(
+ minimum_size=20,
+ maximum_size=100,
+ mandatory=False,
+ effects_of_retirement={'costs': 500}, # Cost incurred when NOT investing
+ ),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ # Check divestment effects
+ assert 'Sink(Wärme)->costs(periodic)' in model.constraints
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)->costs(periodic)'],
+ model.variables['Sink(Wärme)->costs(periodic)'] + (model.variables['Sink(Wärme)|invested'] - 1) * 500 == 0,
+ )
+
+
+class TestFlowOnModel:
+ """Test the FlowModel class."""
+
+ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ relative_minimum=0.2,
+ relative_maximum=0.8,
+ status_parameters=fx.StatusParameters(),
+ )
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {'Sink(Wärme)|total_flow_hours', 'Sink(Wärme)|flow_rate', 'Sink(Wärme)|status', 'Sink(Wärme)|active_hours'},
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|active_hours',
+ 'Sink(Wärme)|flow_rate|lb',
+ 'Sink(Wärme)|flow_rate|ub',
+ },
+ msg='Incorrect constraints',
+ )
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=0,
+ upper=0.8 * 100,
+ coords=model.get_coords(),
+ ),
+ )
+
+ # Status
+ assert_var_equal(
+ flow.submodel.status.status,
+ model.add_variables(binary=True, coords=model.get_coords()),
+ )
+ # Upper bound is total hours when active_hours_max is not specified
+ total_hours = model.hours_per_step.sum('time')
+ assert_var_equal(
+ model.variables['Sink(Wärme)|active_hours'],
+ model.add_variables(lower=0, upper=total_hours, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|status'] * 0.2 * 100,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ <= flow.submodel.variables['Sink(Wärme)|status'] * 0.8 * 100,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|active_hours'],
+ flow.submodel.variables['Sink(Wärme)|active_hours']
+ == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'),
+ )
+
+ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ costs_per_running_hour = np.linspace(1, 2, timesteps.size)
+ co2_per_running_hour = np.linspace(4, 5, timesteps.size)
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ effects_per_active_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour}
+ ),
+ )
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), fx.Effect('CO2', 't', ''))
+ model = create_linopy_model(flow_system)
+ costs, co2 = flow_system.effects['costs'], flow_system.effects['CO2']
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate',
+ 'Sink(Wärme)|status',
+ 'Sink(Wärme)|active_hours',
+ },
+ msg='Incorrect variables',
+ )
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate|lb',
+ 'Sink(Wärme)|flow_rate|ub',
+ 'Sink(Wärme)|active_hours',
+ },
+ msg='Incorrect constraints',
+ )
+
+ assert 'Sink(Wärme)->costs(temporal)' in set(costs.submodel.constraints)
+ assert 'Sink(Wärme)->CO2(temporal)' in set(co2.submodel.constraints)
+
+ costs_per_running_hour = flow.status_parameters.effects_per_active_hour['costs']
+ co2_per_running_hour = flow.status_parameters.effects_per_active_hour['CO2']
+
+ assert costs_per_running_hour.dims == tuple(model.get_coords())
+ assert co2_per_running_hour.dims == tuple(model.get_coords())
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)->costs(temporal)'],
+ model.variables['Sink(Wärme)->costs(temporal)']
+ == flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step * costs_per_running_hour,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)->CO2(temporal)'],
+ model.variables['Sink(Wärme)->CO2(temporal)']
+ == flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step * co2_per_running_hour,
+ )
+
+ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with minimum and maximum consecutive on hours."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ min_uptime=2, # Must run for at least 2 hours when turned on
+ max_uptime=8, # Can't run more than 8 consecutive hours
+ ),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert {'Sink(Wärme)|uptime', 'Sink(Wärme)|status'}.issubset(set(flow.submodel.variables))
+
+ assert_sets_equal(
+ {
+ 'Sink(Wärme)|uptime|ub',
+ 'Sink(Wärme)|uptime|forward',
+ 'Sink(Wärme)|uptime|backward',
+ 'Sink(Wärme)|uptime|initial',
+ 'Sink(Wärme)|uptime|lb',
+ }
+ & set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|uptime|ub',
+ 'Sink(Wärme)|uptime|forward',
+ 'Sink(Wärme)|uptime|backward',
+ 'Sink(Wärme)|uptime|initial',
+ 'Sink(Wärme)|uptime|lb',
+ },
+ msg='Missing uptime constraints',
+ )
+
+ assert_var_equal(
+ model.variables['Sink(Wärme)|uptime'],
+ model.add_variables(lower=0, upper=8, coords=model.get_coords()),
+ )
+
+ mega = model.hours_per_step.sum('time')
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|ub'],
+ model.variables['Sink(Wärme)|uptime'] <= model.variables['Sink(Wärme)|status'] * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|forward'],
+ model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None))
+ <= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1)),
+ )
+
+ # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|backward'],
+ model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None))
+ >= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1))
+ + (model.variables['Sink(Wärme)|status'].isel(time=slice(1, None)) - 1) * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|initial'],
+ model.variables['Sink(Wärme)|uptime'].isel(time=0)
+ == model.variables['Sink(Wärme)|status'].isel(time=0) * model.hours_per_step.isel(time=0),
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|lb'],
+ model.variables['Sink(Wärme)|uptime']
+ >= (
+ model.variables['Sink(Wärme)|status'].isel(time=slice(None, -1))
+ - model.variables['Sink(Wärme)|status'].isel(time=slice(1, None))
+ )
+ * 2,
+ )
+
+ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with minimum and maximum uptime."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ min_uptime=2, # Must run for at least 2 hours when active
+ max_uptime=8, # Can't run more than 8 consecutive hours
+ ),
+ previous_flow_rate=np.array([10, 20, 30, 0, 20, 20, 30]), # Previously active for 3 steps
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert {'Sink(Wärme)|uptime', 'Sink(Wärme)|status'}.issubset(set(flow.submodel.variables))
+
+ assert_sets_equal(
+ {
+ 'Sink(Wärme)|uptime|lb',
+ 'Sink(Wärme)|uptime|forward',
+ 'Sink(Wärme)|uptime|backward',
+ 'Sink(Wärme)|uptime|initial',
+ }
+ & set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|uptime|lb',
+ 'Sink(Wärme)|uptime|forward',
+ 'Sink(Wärme)|uptime|backward',
+ 'Sink(Wärme)|uptime|initial',
+ },
+ msg='Missing uptime constraints for previous states',
+ )
+
+ assert_var_equal(
+ model.variables['Sink(Wärme)|uptime'],
+ model.add_variables(lower=0, upper=8, coords=model.get_coords()),
+ )
+
+ mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 3
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|ub'],
+ model.variables['Sink(Wärme)|uptime'] <= model.variables['Sink(Wärme)|status'] * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|forward'],
+ model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None))
+ <= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1)),
+ )
+
+ # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|backward'],
+ model.variables['Sink(Wärme)|uptime'].isel(time=slice(1, None))
+ >= model.variables['Sink(Wärme)|uptime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1))
+ + (model.variables['Sink(Wärme)|status'].isel(time=slice(1, None)) - 1) * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|initial'],
+ model.variables['Sink(Wärme)|uptime'].isel(time=0)
+ == model.variables['Sink(Wärme)|status'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 3)),
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|uptime|lb'],
+ model.variables['Sink(Wärme)|uptime']
+ >= (
+ model.variables['Sink(Wärme)|status'].isel(time=slice(None, -1))
+ - model.variables['Sink(Wärme)|status'].isel(time=slice(1, None))
+ )
+ * 2,
+ )
+
+ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with minimum and maximum consecutive inactive hours."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ min_downtime=4, # Must stay inactive for at least 4 hours when shut down
+ max_downtime=12, # Can't be inactive for more than 12 consecutive hours
+ ),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert {'Sink(Wärme)|downtime', 'Sink(Wärme)|inactive'}.issubset(set(flow.submodel.variables))
+
+ assert_sets_equal(
+ {
+ 'Sink(Wärme)|downtime|ub',
+ 'Sink(Wärme)|downtime|forward',
+ 'Sink(Wärme)|downtime|backward',
+ 'Sink(Wärme)|downtime|initial',
+ 'Sink(Wärme)|downtime|lb',
+ }
+ & set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|downtime|ub',
+ 'Sink(Wärme)|downtime|forward',
+ 'Sink(Wärme)|downtime|backward',
+ 'Sink(Wärme)|downtime|initial',
+ 'Sink(Wärme)|downtime|lb',
+ },
+ msg='Missing consecutive inactive hours constraints',
+ )
+
+ assert_var_equal(
+ model.variables['Sink(Wärme)|downtime'],
+ model.add_variables(lower=0, upper=12, coords=model.get_coords()),
+ )
+
+ mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 1 # previously inactive for 1h
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|ub'],
+ model.variables['Sink(Wärme)|downtime'] <= model.variables['Sink(Wärme)|inactive'] * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|forward'],
+ model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None))
+ <= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1)),
+ )
+
+ # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|backward'],
+ model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None))
+ >= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1))
+ + (model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None)) - 1) * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|initial'],
+ model.variables['Sink(Wärme)|downtime'].isel(time=0)
+ == model.variables['Sink(Wärme)|inactive'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 1)),
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|lb'],
+ model.variables['Sink(Wärme)|downtime']
+ >= (
+ model.variables['Sink(Wärme)|inactive'].isel(time=slice(None, -1))
+ - model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None))
+ )
+ * 4,
+ )
+
+ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with minimum and maximum consecutive inactive hours."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ min_downtime=4, # Must stay inactive for at least 4 hours when shut down
+ max_downtime=12, # Can't be inactive for more than 12 consecutive hours
+ ),
+ previous_flow_rate=np.array([10, 20, 30, 0, 20, 0, 0]), # Previously inactive for 2 steps
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert {'Sink(Wärme)|downtime', 'Sink(Wärme)|inactive'}.issubset(set(flow.submodel.variables))
+
+ assert_sets_equal(
+ {
+ 'Sink(Wärme)|downtime|ub',
+ 'Sink(Wärme)|downtime|forward',
+ 'Sink(Wärme)|downtime|backward',
+ 'Sink(Wärme)|downtime|initial',
+ 'Sink(Wärme)|downtime|lb',
+ }
+ & set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|downtime|ub',
+ 'Sink(Wärme)|downtime|forward',
+ 'Sink(Wärme)|downtime|backward',
+ 'Sink(Wärme)|downtime|initial',
+ 'Sink(Wärme)|downtime|lb',
+ },
+ msg='Missing consecutive inactive hours constraints for previous states',
+ )
+
+ assert_var_equal(
+ model.variables['Sink(Wärme)|downtime'],
+ model.add_variables(lower=0, upper=12, coords=model.get_coords()),
+ )
+
+ mega = model.hours_per_step.sum('time') + model.hours_per_step.isel(time=0) * 2
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|ub'],
+ model.variables['Sink(Wärme)|downtime'] <= model.variables['Sink(Wärme)|inactive'] * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|forward'],
+ model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None))
+ <= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1)),
+ )
+
+ # eq: duration(t) >= duration(t - 1) + dt(t) + (On(t) - 1) * BIG
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|backward'],
+ model.variables['Sink(Wärme)|downtime'].isel(time=slice(1, None))
+ >= model.variables['Sink(Wärme)|downtime'].isel(time=slice(None, -1))
+ + model.hours_per_step.isel(time=slice(None, -1))
+ + (model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None)) - 1) * mega,
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|initial'],
+ model.variables['Sink(Wärme)|downtime'].isel(time=0)
+ == model.variables['Sink(Wärme)|inactive'].isel(time=0) * (model.hours_per_step.isel(time=0) * (1 + 2)),
+ )
+
+ assert_conequal(
+ model.constraints['Sink(Wärme)|downtime|lb'],
+ model.variables['Sink(Wärme)|downtime']
+ >= (
+ model.variables['Sink(Wärme)|inactive'].isel(time=slice(None, -1))
+ - model.variables['Sink(Wärme)|inactive'].isel(time=slice(1, None))
+ )
+ * 4,
+ )
+
+ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with constraints on the number of startups."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ startup_limit=5, # Maximum 5 startups
+ effects_per_startup={'costs': 100}, # 100 EUR startup cost
+ ),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ # Check that variables exist
+ assert {'Sink(Wärme)|startup', 'Sink(Wärme)|shutdown', 'Sink(Wärme)|startup_count'}.issubset(
+ set(flow.submodel.variables)
+ )
+
+ # Check that constraints exist
+ assert_sets_equal(
+ {
+ 'Sink(Wärme)|switch|transition',
+ 'Sink(Wärme)|switch|initial',
+ 'Sink(Wärme)|switch|mutex',
+ 'Sink(Wärme)|startup_count',
+ }
+ & set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|switch|transition',
+ 'Sink(Wärme)|switch|initial',
+ 'Sink(Wärme)|switch|mutex',
+ 'Sink(Wärme)|startup_count',
+ },
+ msg='Missing switch constraints',
+ )
+
+ # Check startup_count variable bounds
+ assert_var_equal(
+ flow.submodel.variables['Sink(Wärme)|startup_count'],
+ model.add_variables(lower=0, upper=5, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ # Verify startup_count constraint (limits number of startups)
+ assert_conequal(
+ model.constraints['Sink(Wärme)|startup_count'],
+ flow.submodel.variables['Sink(Wärme)|startup_count']
+ == flow.submodel.variables['Sink(Wärme)|startup'].sum('time'),
+ )
+
+ # Check that startup cost effect constraint exists
+ assert 'Sink(Wärme)->costs(temporal)' in model.constraints
+
+ # Verify the startup cost effect constraint
+ assert_conequal(
+ model.constraints['Sink(Wärme)->costs(temporal)'],
+ model.variables['Sink(Wärme)->costs(temporal)'] == flow.submodel.variables['Sink(Wärme)|startup'] * 100,
+ )
+
+ def test_on_hours_limits(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with limits on total active hours."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(
+ active_hours_min=20, # Minimum 20 hours of operation
+ active_hours_max=100, # Maximum 100 hours of operation
+ ),
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ # Check that variables exist
+ assert {'Sink(Wärme)|status', 'Sink(Wärme)|active_hours'}.issubset(set(flow.submodel.variables))
+
+ # Check that constraints exist
+ assert 'Sink(Wärme)|active_hours' in model.constraints
+
+ # Check active_hours variable bounds
+ assert_var_equal(
+ flow.submodel.variables['Sink(Wärme)|active_hours'],
+ model.add_variables(lower=20, upper=100, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ # Check active_hours constraint
+ assert_conequal(
+ model.constraints['Sink(Wärme)|active_hours'],
+ flow.submodel.variables['Sink(Wärme)|active_hours']
+ == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'),
+ )
+
+
+class TestFlowOnInvestModel:
+ """Test the FlowModel class."""
+
+ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=False),
+ relative_minimum=0.2,
+ relative_maximum=0.8,
+ status_parameters=fx.StatusParameters(),
+ )
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate',
+ 'Sink(Wärme)|invested',
+ 'Sink(Wärme)|size',
+ 'Sink(Wärme)|status',
+ 'Sink(Wärme)|active_hours',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|active_hours',
+ 'Sink(Wärme)|flow_rate|lb1',
+ 'Sink(Wärme)|flow_rate|ub1',
+ 'Sink(Wärme)|size|lb',
+ 'Sink(Wärme)|size|ub',
+ 'Sink(Wärme)|flow_rate|lb2',
+ 'Sink(Wärme)|flow_rate|ub2',
+ },
+ msg='Incorrect constraints',
+ )
+
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=0,
+ upper=0.8 * 200,
+ coords=model.get_coords(),
+ ),
+ )
+
+ # Status
+ assert_var_equal(
+ flow.submodel.status.status,
+ model.add_variables(binary=True, coords=model.get_coords()),
+ )
+ # Upper bound is total hours when active_hours_max is not specified
+ total_hours = model.hours_per_step.sum('time')
+ assert_var_equal(
+ model.variables['Sink(Wärme)|active_hours'],
+ model.add_variables(lower=0, upper=total_hours, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|size|lb'],
+ flow.submodel.variables['Sink(Wärme)|size'] >= flow.submodel.variables['Sink(Wärme)|invested'] * 20,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|size|ub'],
+ flow.submodel.variables['Sink(Wärme)|size'] <= flow.submodel.variables['Sink(Wärme)|invested'] * 200,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb1'],
+ flow.submodel.variables['Sink(Wärme)|status'] * 0.2 * 20
+ <= flow.submodel.variables['Sink(Wärme)|flow_rate'],
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub1'],
+ flow.submodel.variables['Sink(Wärme)|status'] * 0.8 * 200
+ >= flow.submodel.variables['Sink(Wärme)|flow_rate'],
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|active_hours'],
+ flow.submodel.variables['Sink(Wärme)|active_hours']
+ == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'),
+ )
+
+ # Investment
+ assert_var_equal(
+ model['Sink(Wärme)|size'],
+ model.add_variables(lower=0, upper=200, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ mega = 0.2 * 200 # Relative minimum * maximum size
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb2'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|status'] * mega
+ + flow.submodel.variables['Sink(Wärme)|size'] * 0.2
+ - mega,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub2'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate'] <= flow.submodel.variables['Sink(Wärme)|size'] * 0.8,
+ )
+
+ def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coords_config):
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=True),
+ relative_minimum=0.2,
+ relative_maximum=0.8,
+ status_parameters=fx.StatusParameters(),
+ )
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_sets_equal(
+ set(flow.submodel.variables),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|flow_rate',
+ 'Sink(Wärme)|size',
+ 'Sink(Wärme)|status',
+ 'Sink(Wärme)|active_hours',
+ },
+ msg='Incorrect variables',
+ )
+
+ assert_sets_equal(
+ set(flow.submodel.constraints),
+ {
+ 'Sink(Wärme)|total_flow_hours',
+ 'Sink(Wärme)|active_hours',
+ 'Sink(Wärme)|flow_rate|lb1',
+ 'Sink(Wärme)|flow_rate|ub1',
+ 'Sink(Wärme)|flow_rate|lb2',
+ 'Sink(Wärme)|flow_rate|ub2',
+ },
+ msg='Incorrect constraints',
+ )
+
+ # flow_rate
+ assert_var_equal(
+ flow.submodel.flow_rate,
+ model.add_variables(
+ lower=0,
+ upper=0.8 * 200,
+ coords=model.get_coords(),
+ ),
+ )
+
+ # Status
+ assert_var_equal(
+ flow.submodel.status.status,
+ model.add_variables(binary=True, coords=model.get_coords()),
+ )
+ # Upper bound is total hours when active_hours_max is not specified
+ total_hours = model.hours_per_step.sum('time')
+ assert_var_equal(
+ model.variables['Sink(Wärme)|active_hours'],
+ model.add_variables(lower=0, upper=total_hours, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb1'],
+ flow.submodel.variables['Sink(Wärme)|status'] * 0.2 * 20
+ <= flow.submodel.variables['Sink(Wärme)|flow_rate'],
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub1'],
+ flow.submodel.variables['Sink(Wärme)|status'] * 0.8 * 200
+ >= flow.submodel.variables['Sink(Wärme)|flow_rate'],
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|active_hours'],
+ flow.submodel.variables['Sink(Wärme)|active_hours']
+ == (flow.submodel.variables['Sink(Wärme)|status'] * model.hours_per_step).sum('time'),
+ )
+
+ # Investment
+ assert_var_equal(
+ model['Sink(Wärme)|size'],
+ model.add_variables(lower=20, upper=200, coords=model.get_coords(['period', 'scenario'])),
+ )
+
+ mega = 0.2 * 200 # Relative minimum * maximum size
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|lb2'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ >= flow.submodel.variables['Sink(Wärme)|status'] * mega
+ + flow.submodel.variables['Sink(Wärme)|size'] * 0.2
+ - mega,
+ )
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|ub2'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate'] <= flow.submodel.variables['Sink(Wärme)|size'] * 0.8,
+ )
+
+
+class TestFlowWithFixedProfile:
+ """Test Flow with fixed relative profile."""
+
+ def test_fixed_relative_profile(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with a fixed relative profile."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ # Create a time-varying profile (e.g., for a load or renewable generation)
+ profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5 # Values between 0 and 1
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=100,
+ fixed_relative_profile=profile,
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_var_equal(
+ flow.submodel.variables['Sink(Wärme)|flow_rate'],
+ model.add_variables(
+ lower=flow.fixed_relative_profile * 100,
+ upper=flow.fixed_relative_profile * 100,
+ coords=model.get_coords(),
+ ),
+ )
+
+ def test_fixed_profile_with_investment(self, basic_flow_system_linopy_coords, coords_config):
+ """Test flow with fixed profile and investment."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ # Create a fixed profile
+ profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5
+
+ flow = fx.Flow(
+ 'Wärme',
+ bus='Fernwärme',
+ size=fx.InvestParameters(minimum_size=50, maximum_size=200, mandatory=False),
+ fixed_relative_profile=profile,
+ )
+
+ flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
+ model = create_linopy_model(flow_system)
+
+ assert_var_equal(
+ flow.submodel.variables['Sink(Wärme)|flow_rate'],
+ model.add_variables(lower=0, upper=flow.fixed_relative_profile * 200, coords=model.get_coords()),
+ )
+
+ # The constraint should link flow_rate to size * profile
+ assert_conequal(
+ model.constraints['Sink(Wärme)|flow_rate|fixed'],
+ flow.submodel.variables['Sink(Wärme)|flow_rate']
+ == flow.submodel.variables['Sink(Wärme)|size'] * flow.fixed_relative_profile,
+ )
+
+
+if __name__ == '__main__':
+ pytest.main()
diff --git a/tests/deprecated/test_flow_system_resample.py b/tests/deprecated/test_flow_system_resample.py
new file mode 100644
index 000000000..c76946f80
--- /dev/null
+++ b/tests/deprecated/test_flow_system_resample.py
@@ -0,0 +1,313 @@
+"""Integration tests for FlowSystem.resample() - verifies correct data resampling and structure preservation."""
+
+import numpy as np
+import pandas as pd
+import pytest
+from numpy.testing import assert_allclose
+
+import flixopt as fx
+
+
+@pytest.fixture
+def simple_fs():
+ """Simple FlowSystem with basic components."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ fs = fx.FlowSystem(timesteps)
+ fs.add_elements(
+ fx.Bus('heat'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)
+ )
+ fs.add_elements(
+ fx.Sink(
+ label='demand',
+ inputs=[fx.Flow(label='in', bus='heat', fixed_relative_profile=np.linspace(10, 20, 24), size=1)],
+ ),
+ fx.Source(
+ label='source', outputs=[fx.Flow(label='out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
+ )
+ return fs
+
+
+@pytest.fixture
+def complex_fs():
+ """FlowSystem with complex elements (storage, piecewise, invest)."""
+ timesteps = pd.date_range('2023-01-01', periods=48, freq='h')
+ fs = fx.FlowSystem(timesteps)
+
+ fs.add_elements(
+ fx.Bus('heat'),
+ fx.Bus('elec'),
+ fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True),
+ )
+
+ # Storage
+ fs.add_elements(
+ fx.Storage(
+ label='battery',
+ charging=fx.Flow('charge', bus='elec', size=10),
+ discharging=fx.Flow('discharge', bus='elec', size=10),
+ capacity_in_flow_hours=fx.InvestParameters(fixed_size=100),
+ )
+ )
+
+ # Piecewise converter
+ converter = fx.linear_converters.Boiler(
+ 'boiler', thermal_efficiency=0.9, fuel_flow=fx.Flow('gas', bus='elec'), thermal_flow=fx.Flow('heat', bus='heat')
+ )
+ converter.thermal_flow.size = 100
+ fs.add_elements(converter)
+
+ # Component with investment
+ fs.add_elements(
+ fx.Source(
+ label='pv',
+ outputs=[
+ fx.Flow(
+ 'gen',
+ bus='elec',
+ size=fx.InvestParameters(maximum_size=1000, effects_of_investment_per_size={'costs': 100}),
+ )
+ ],
+ )
+ )
+
+ return fs
+
+
+# === Basic Functionality ===
+
+
+@pytest.mark.parametrize('freq,method', [('2h', 'mean'), ('4h', 'sum'), ('6h', 'first')])
+def test_basic_resample(simple_fs, freq, method):
+ """Test basic resampling preserves structure."""
+ fs_r = simple_fs.resample(freq, method=method)
+ assert len(fs_r.components) == len(simple_fs.components)
+ assert len(fs_r.buses) == len(simple_fs.buses)
+ assert len(fs_r.timesteps) < len(simple_fs.timesteps)
+
+
+@pytest.mark.parametrize(
+ 'method,expected',
+ [
+ ('mean', [15.0, 35.0]),
+ ('sum', [30.0, 70.0]),
+ ('first', [10.0, 30.0]),
+ ('last', [20.0, 40.0]),
+ ],
+)
+def test_resample_methods(method, expected):
+ """Test different resampling methods."""
+ ts = pd.date_range('2023-01-01', periods=4, freq='h')
+ fs = fx.FlowSystem(ts)
+ fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(
+ label='s',
+ inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)],
+ )
+ )
+
+ fs_r = fs.resample('2h', method=method)
+ assert_allclose(fs_r.flows['s(in)'].fixed_relative_profile.values, expected, rtol=1e-10)
+
+
+def test_structure_preserved(simple_fs):
+ """Test all structural elements preserved."""
+ fs_r = simple_fs.resample('2h', method='mean')
+ assert set(simple_fs.components.keys()) == set(fs_r.components.keys())
+ assert set(simple_fs.buses.keys()) == set(fs_r.buses.keys())
+ assert set(simple_fs.effects.keys()) == set(fs_r.effects.keys())
+
+ # Flow connections preserved
+ for label in simple_fs.flows.keys():
+ assert simple_fs.flows[label].bus == fs_r.flows[label].bus
+ assert simple_fs.flows[label].component == fs_r.flows[label].component
+
+
+def test_time_metadata_updated(simple_fs):
+ """Test time metadata correctly updated."""
+ fs_r = simple_fs.resample('3h', method='mean')
+ assert len(fs_r.timesteps) == 8
+ assert_allclose(fs_r.hours_per_timestep.values, 3.0)
+ assert fs_r.hours_of_last_timestep == 3.0
+
+
+# === Advanced Dimensions ===
+
+
+@pytest.mark.parametrize(
+ 'dim_name,dim_value',
+ [
+ ('periods', pd.Index([2023, 2024], name='period')),
+ ('scenarios', pd.Index(['base', 'high'], name='scenario')),
+ ],
+)
+def test_with_dimensions(simple_fs, dim_name, dim_value):
+ """Test resampling preserves period/scenario dimensions."""
+ fs = fx.FlowSystem(simple_fs.timesteps, **{dim_name: dim_value})
+ fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.ones(24), size=1)])
+ )
+
+ fs_r = fs.resample('2h', method='mean')
+ assert getattr(fs_r, dim_name) is not None
+ pd.testing.assert_index_equal(getattr(fs_r, dim_name), dim_value)
+
+
+# === Complex Elements ===
+
+
+def test_storage_resample(complex_fs):
+ """Test storage component resampling."""
+ fs_r = complex_fs.resample('4h', method='mean')
+ assert 'battery' in fs_r.components
+ storage = fs_r.components['battery']
+ assert storage.charging.label == 'charge'
+ assert storage.discharging.label == 'discharge'
+
+
+def test_converter_resample(complex_fs):
+ """Test converter component resampling."""
+ fs_r = complex_fs.resample('4h', method='mean')
+ assert 'boiler' in fs_r.components
+ boiler = fs_r.components['boiler']
+ assert hasattr(boiler, 'thermal_efficiency')
+
+
+def test_invest_resample(complex_fs):
+ """Test investment parameters preserved."""
+ fs_r = complex_fs.resample('4h', method='mean')
+ pv_flow = fs_r.flows['pv(gen)']
+ assert isinstance(pv_flow.size, fx.InvestParameters)
+ assert pv_flow.size.maximum_size == 1000
+
+
+# === Modeling Integration ===
+
+
+@pytest.mark.filterwarnings('ignore::DeprecationWarning')
+@pytest.mark.parametrize('with_dim', [None, 'periods', 'scenarios'])
+def test_modeling(with_dim):
+ """Test resampled FlowSystem can be modeled."""
+ ts = pd.date_range('2023-01-01', periods=48, freq='h')
+ kwargs = {}
+ if with_dim == 'periods':
+ kwargs['periods'] = pd.Index([2023, 2024], name='period')
+ elif with_dim == 'scenarios':
+ kwargs['scenarios'] = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(ts, **kwargs)
+ fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(
+ label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]
+ ),
+ fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]),
+ )
+
+ fs_r = fs.resample('4h', method='mean')
+ calc = fx.Optimization('test', fs_r)
+ calc.do_modeling()
+
+ assert calc.model is not None
+ assert len(calc.model.variables) > 0
+
+
+@pytest.mark.filterwarnings('ignore::DeprecationWarning')
+def test_model_structure_preserved():
+ """Test model structure (var/constraint types) preserved."""
+ ts = pd.date_range('2023-01-01', periods=48, freq='h')
+ fs = fx.FlowSystem(ts)
+ fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(
+ label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]
+ ),
+ fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]),
+ )
+
+ calc_orig = fx.Optimization('orig', fs)
+ calc_orig.do_modeling()
+
+ fs_r = fs.resample('4h', method='mean')
+ calc_r = fx.Optimization('resamp', fs_r)
+ calc_r.do_modeling()
+
+ # Same number of variable/constraint types
+ assert len(calc_orig.model.variables) == len(calc_r.model.variables)
+ assert len(calc_orig.model.constraints) == len(calc_r.model.constraints)
+
+ # Same names
+ assert set(calc_orig.model.variables.labels.data_vars.keys()) == set(calc_r.model.variables.labels.data_vars.keys())
+ assert set(calc_orig.model.constraints.labels.data_vars.keys()) == set(
+ calc_r.model.constraints.labels.data_vars.keys()
+ )
+
+
+# === Advanced Features ===
+
+
+def test_dataset_roundtrip(simple_fs):
+ """Test dataset serialization."""
+ fs_r = simple_fs.resample('2h', method='mean')
+ assert fx.FlowSystem.from_dataset(fs_r.to_dataset()) == fs_r
+
+
+def test_dataset_chaining(simple_fs):
+ """Test power user pattern."""
+ ds = simple_fs.to_dataset()
+ ds = fx.FlowSystem._dataset_sel(ds, time='2023-01-01')
+ ds = fx.FlowSystem._dataset_resample(ds, freq='2h', method='mean')
+ fs_result = fx.FlowSystem.from_dataset(ds)
+
+ fs_simple = simple_fs.sel(time='2023-01-01').resample('2h', method='mean')
+ assert fs_result == fs_simple
+
+
+@pytest.mark.parametrize('freq,exp_len', [('2h', 84), ('6h', 28), ('1D', 7)])
+def test_frequencies(freq, exp_len):
+ """Test various frequencies."""
+ ts = pd.date_range('2023-01-01', periods=168, freq='h')
+ fs = fx.FlowSystem(ts)
+ fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(168), size=1)])
+ )
+
+ assert len(fs.resample(freq, method='mean').timesteps) == exp_len
+
+
+def test_irregular_timesteps_error():
+ """Test that resampling irregular timesteps to finer resolution raises error without fill_gaps."""
+ ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
+ fs = fx.FlowSystem(ts)
+ fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(3), size=1)])
+ )
+
+ with pytest.raises(ValueError, match='Resampling created gaps'):
+ fs.resample('1h', method='mean')
+
+
+def test_irregular_timesteps_with_fill_gaps():
+ """Test that resampling irregular timesteps works with explicit fill_gaps strategy."""
+ ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
+ fs = fx.FlowSystem(ts)
+ fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(
+ label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)]
+ )
+ )
+
+ # Test with ffill (using deprecated method)
+ fs_r = fs.resample('1h', method='mean', fill_gaps='ffill')
+ assert len(fs_r.timesteps) == 4
+ # Gap at 02:00 should be filled with previous value (2.0)
+ assert_allclose(fs_r.flows['s(in)'].fixed_relative_profile.values, [1.0, 2.0, 2.0, 4.0])
+
+
+if __name__ == '__main__':
+ pytest.main(['-v', __file__])
diff --git a/tests/deprecated/test_functional.py b/tests/deprecated/test_functional.py
new file mode 100644
index 000000000..409e20a5f
--- /dev/null
+++ b/tests/deprecated/test_functional.py
@@ -0,0 +1,746 @@
+"""
+Unit tests for the flixopt framework.
+
+This module defines a set of unit tests for testing the functionality of the `flixopt` framework.
+The tests focus on verifying the correct behavior of flow systems, including component modeling,
+investment optimization, and operational constraints like status behavior.
+
+### Approach:
+1. **Setup**: Each test initializes a flow system with a set of predefined elements and parameters.
+2. **Model Creation**: Test-specific flow systems are constructed using `create_model` with datetime arrays.
+3. **Solution**: The models are solved using the `solve_and_load` method, which performs modeling, solves the optimization problem, and loads the results.
+4. **Validation**: Results are validated using assertions, primarily `assert_allclose`, to ensure model outputs match expected values with a specified tolerance.
+
+Tests group related cases by their functional focus:
+- Minimal modeling setup (`TestMinimal` class)
+- Investment behavior (`TestInvestment` class)
+- Status operational constraints (functions: `test_startup_shutdown`, `test_consecutive_uptime_downtime`, etc.)
+"""
+
+import numpy as np
+import pandas as pd
+import pytest
+from numpy.testing import assert_allclose
+
+import flixopt as fx
+from tests.deprecated.conftest import assert_almost_equal_numeric
+
+np.random.seed(45)
+
+
+class Data:
+ """
+ Generates time series data for testing.
+
+ Attributes:
+ length (int): The desired length of the data.
+ thermal_demand (np.ndarray): Thermal demand time series data.
+ electricity_demand (np.ndarray): Electricity demand time series data.
+ """
+
+ def __init__(self, length: int):
+ """
+ Initialize the data generator with a specified length.
+
+ Args:
+ length (int): Length of the time series data to generate.
+ """
+ self.length = length
+
+ self.thermal_demand = np.arange(0, 30, 10)
+ self.electricity_demand = np.arange(1, 10.1, 1)
+
+ self.thermal_demand = self._adjust_length(self.thermal_demand, length)
+ self.electricity_demand = self._adjust_length(self.electricity_demand, length)
+
+ def _adjust_length(self, array, new_length: int):
+ if len(array) >= new_length:
+ return array[:new_length]
+ else:
+ repeats = (new_length + len(array) - 1) // len(array) # Calculate how many times to repeat
+ extended_array = np.tile(array, repeats) # Repeat the array
+ return extended_array[:new_length] # Truncate to exact length
+
+
+def flow_system_base(timesteps: pd.DatetimeIndex) -> fx.FlowSystem:
+ data = Data(len(timesteps))
+
+ flow_system = fx.FlowSystem(timesteps)
+ flow_system.add_elements(
+ fx.Bus('Fernwärme', imbalance_penalty_per_flow_hour=None),
+ fx.Bus('Gas', imbalance_penalty_per_flow_hour=None),
+ )
+ flow_system.add_elements(fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True))
+ flow_system.add_elements(
+ fx.Sink(
+ label='Wärmelast',
+ inputs=[fx.Flow(label='Wärme', bus='Fernwärme', fixed_relative_profile=data.thermal_demand, size=1)],
+ ),
+ fx.Source(label='Gastarif', outputs=[fx.Flow(label='Gas', bus='Gas', effects_per_flow_hour=1)]),
+ )
+ return flow_system
+
+
+def flow_system_minimal(timesteps) -> fx.FlowSystem:
+ flow_system = flow_system_base(timesteps)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme'),
+ )
+ )
+ return flow_system
+
+
+def solve_and_load(flow_system: fx.FlowSystem, solver) -> fx.FlowSystem:
+ """Optimize the flow system and return it with the solution."""
+ flow_system.optimize(solver)
+ return flow_system
+
+
+@pytest.fixture
+def time_steps_fixture(request):
+ return pd.date_range('2020-01-01', periods=5, freq='h')
+
+
+def test_solve_and_load(solver_fixture, time_steps_fixture):
+ flow_system = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture)
+ assert flow_system.solution is not None
+
+
+def test_minimal_model(solver_fixture, time_steps_fixture):
+ flow_system = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture)
+
+ assert_allclose(flow_system.solution['costs'].values, 80, rtol=1e-5, atol=1e-10)
+
+ # Use assert_almost_equal_numeric to handle extra timestep with NaN
+ assert_almost_equal_numeric(
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values,
+ [-0.0, 10.0, 20.0, -0.0, 10.0],
+ 'Boiler flow_rate doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ flow_system.solution['costs(temporal)|per_timestep'].values,
+ [-0.0, 20.0, 40.0, -0.0, 20.0],
+ 'costs per_timestep doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ flow_system.solution['Gastarif(Gas)->costs(temporal)'].values,
+ [-0.0, 20.0, 40.0, -0.0, 20.0],
+ 'Gastarif costs doesnt match expected value',
+ )
+
+
+def test_fixed_size(solver_fixture, time_steps_fixture):
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=fx.InvestParameters(fixed_size=1000, effects_of_investment=10, effects_of_investment_per_size=1),
+ ),
+ )
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80 + 1000 * 1 + 10,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.size.solution.item(),
+ 1000,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ 1,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__invested" does not have the right value',
+ )
+
+
+def test_optimize_size(solver_fixture, time_steps_fixture):
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100),
+ ),
+ )
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80 + 20 * 1 + 10,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.size.solution.item(),
+ 20,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ 1,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__IsInvested" does not have the right value',
+ )
+
+
+def test_size_bounds(solver_fixture, time_steps_fixture):
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=fx.InvestParameters(
+ minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1
+ ),
+ ),
+ )
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80 + 40 * 1 + 10,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.size.solution.item(),
+ 40,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ 1,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__IsInvested" does not have the right value',
+ )
+
+
+def test_optional_invest(solver_fixture, time_steps_fixture):
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=fx.InvestParameters(
+ mandatory=False,
+ minimum_size=40,
+ maximum_size=100,
+ effects_of_investment=10,
+ effects_of_investment_per_size=1,
+ ),
+ ),
+ ),
+ fx.linear_converters.Boiler(
+ 'Boiler_optional',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=fx.InvestParameters(
+ mandatory=False,
+ minimum_size=50,
+ maximum_size=100,
+ effects_of_investment=10,
+ effects_of_investment_per_size=1,
+ ),
+ ),
+ ),
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ boiler_optional = flow_system['Boiler_optional']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80 + 40 * 1 + 10,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.size.solution.item(),
+ 40,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ 1,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__IsInvested" does not have the right value',
+ )
+
+ assert_allclose(
+ boiler_optional.thermal_flow.submodel.investment.size.solution.item(),
+ 0,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
+ )
+ assert_allclose(
+ boiler_optional.thermal_flow.submodel.investment.invested.solution.item(),
+ 0,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__IsInvested" does not have the right value',
+ )
+
+
+def test_on(solver_fixture, time_steps_fixture):
+ """Tests if the On Variable is correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100, status_parameters=fx.StatusParameters()),
+ )
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.status.solution.values,
+ [0, 1, 1, 0, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 10, 20, 0, 10],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+def test_off(solver_fixture, time_steps_fixture):
+ """Tests if the Off Variable is correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(max_downtime=100),
+ ),
+ )
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.status.solution.values,
+ [0, 1, 1, 0, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.inactive.solution.values,
+ 1 - boiler.thermal_flow.submodel.status.status.solution.values,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__off" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 10, 20, 0, 10],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+def test_startup_shutdown(solver_fixture, time_steps_fixture):
+ """Tests if the startup/shutdown Variable is correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(force_startup_tracking=True),
+ ),
+ )
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 80,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.status.solution.values,
+ [0, 1, 1, 0, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.startup.solution.values,
+ [0, 1, 0, 0, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__switch_on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.shutdown.solution.values,
+ [0, 0, 0, 1, 0],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__switch_on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 10, 20, 0, 10],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+def test_on_total_max(solver_fixture, time_steps_fixture):
+ """Tests if the On Total Max Variable is correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(active_hours_max=1),
+ ),
+ ),
+ fx.linear_converters.Boiler(
+ 'Boiler_backup',
+ thermal_efficiency=0.2,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100),
+ ),
+ )
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 140,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.status.solution.values,
+ [0, 0, 1, 0, 0],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 0, 20, 0, 0],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+def test_on_total_bounds(solver_fixture, time_steps_fixture):
+ """Tests if the On Hours min and max are correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(active_hours_max=2),
+ ),
+ ),
+ fx.linear_converters.Boiler(
+ 'Boiler_backup',
+ thermal_efficiency=0.2,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(active_hours_min=3),
+ ),
+ ),
+ )
+ flow_system['Wärmelast'].inputs[0].fixed_relative_profile = np.array(
+ [0, 10, 20, 0, 12]
+ ) # Else its non deterministic
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ boiler_backup = flow_system['Boiler_backup']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 114,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.status.solution.values,
+ [0, 0, 1, 0, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 0, 20, 0, 12 - 1e-5],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+ assert_allclose(
+ sum(boiler_backup.thermal_flow.submodel.status.status.solution.values),
+ 3,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler_backup__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler_backup.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 10, 1.0e-05, 0, 1.0e-05],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
+ """Tests if the consecutive uptime/downtime are correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ status_parameters=fx.StatusParameters(max_uptime=2, min_uptime=2),
+ ),
+ ),
+ fx.linear_converters.Boiler(
+ 'Boiler_backup',
+ thermal_efficiency=0.2,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100),
+ ),
+ )
+ flow_system['Wärmelast'].inputs[0].fixed_relative_profile = np.array([5, 10, 20, 18, 12])
+ # Else its non deterministic
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ boiler_backup = flow_system['Boiler_backup']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 190,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.status.status.solution.values,
+ [1, 1, 0, 1, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [5, 10, 0, 18, 12],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+ assert_allclose(
+ boiler_backup.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 0, 20, 0, 0],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+def test_consecutive_off(solver_fixture, time_steps_fixture):
+ """Tests if the consecutive on hours are correctly created and calculated in a Flow"""
+ flow_system = flow_system_base(time_steps_fixture)
+ flow_system.add_elements(
+ fx.linear_converters.Boiler(
+ 'Boiler',
+ thermal_efficiency=0.5,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow('Q_th', bus='Fernwärme'),
+ ),
+ fx.linear_converters.Boiler(
+ 'Boiler_backup',
+ thermal_efficiency=0.2,
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ size=100,
+ previous_flow_rate=np.array([20]), # Otherwise its Off before the start
+ status_parameters=fx.StatusParameters(max_downtime=2, min_downtime=2),
+ ),
+ ),
+ )
+ flow_system['Wärmelast'].inputs[0].fixed_relative_profile = np.array(
+ [5, 0, 20, 18, 12]
+ ) # Else its non deterministic
+
+ solve_and_load(flow_system, solver_fixture)
+ boiler = flow_system['Boiler']
+ boiler_backup = flow_system['Boiler_backup']
+ costs = flow_system.effects['costs']
+ assert_allclose(
+ costs.submodel.total.solution.item(),
+ 110,
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='The total costs does not have the right value',
+ )
+
+ assert_allclose(
+ boiler_backup.thermal_flow.submodel.status.status.solution.values,
+ [0, 0, 1, 0, 0],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler_backup__Q_th__on" does not have the right value',
+ )
+ assert_allclose(
+ boiler_backup.thermal_flow.submodel.status.inactive.solution.values,
+ [1, 1, 0, 1, 1],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler_backup__Q_th__off" does not have the right value',
+ )
+ assert_allclose(
+ boiler_backup.thermal_flow.submodel.flow_rate.solution.values,
+ [0, 0, 1e-5, 0, 0],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler_backup__Q_th__flow_rate" does not have the right value',
+ )
+
+ assert_allclose(
+ boiler.thermal_flow.submodel.flow_rate.solution.values,
+ [5, 0, 20 - 1e-5, 18, 12],
+ rtol=1e-5,
+ atol=1e-10,
+ err_msg='"Boiler__Q_th__flow_rate" does not have the right value',
+ )
+
+
+if __name__ == '__main__':
+ pytest.main(['-v', '--disable-warnings'])
diff --git a/tests/deprecated/test_heatmap_reshape.py b/tests/deprecated/test_heatmap_reshape.py
new file mode 100644
index 000000000..092adff4e
--- /dev/null
+++ b/tests/deprecated/test_heatmap_reshape.py
@@ -0,0 +1,91 @@
+"""Test reshape_data_for_heatmap() for common use cases."""
+
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+from flixopt.plotting import reshape_data_for_heatmap
+
+# Set random seed for reproducible tests
+np.random.seed(42)
+
+
+@pytest.fixture
+def hourly_week_data():
+ """Typical use case: hourly data for a week."""
+ time = pd.date_range('2024-01-01', periods=168, freq='h')
+ data = np.random.rand(168) * 100
+ return xr.DataArray(data, dims=['time'], coords={'time': time}, name='power')
+
+
+def test_daily_hourly_pattern():
+ """Most common use case: reshape hourly data into days × hours for daily patterns."""
+ time = pd.date_range('2024-01-01', periods=72, freq='h')
+ data = np.random.rand(72) * 100
+ da = xr.DataArray(data, dims=['time'], coords={'time': time})
+
+ result = reshape_data_for_heatmap(da, reshape_time=('D', 'h'))
+
+ assert 'timeframe' in result.dims and 'timestep' in result.dims
+ assert result.sizes['timeframe'] == 3 # 3 days
+ assert result.sizes['timestep'] == 24 # 24 hours
+
+
+def test_weekly_daily_pattern(hourly_week_data):
+ """Common use case: reshape hourly data into weeks × days."""
+ result = reshape_data_for_heatmap(hourly_week_data, reshape_time=('W', 'D'))
+
+ assert 'timeframe' in result.dims and 'timestep' in result.dims
+ # 168 hours = 7 days = 1 week
+ assert result.sizes['timeframe'] == 1 # 1 week
+ assert result.sizes['timestep'] == 7 # 7 days
+
+
+def test_with_irregular_data():
+ """Real-world use case: data with missing timestamps needs filling."""
+ time = pd.date_range('2024-01-01', periods=100, freq='15min')
+ data = np.random.rand(100)
+ # Randomly drop 30% to simulate real data gaps
+ keep = np.sort(np.random.choice(100, 70, replace=False)) # Must be sorted
+ da = xr.DataArray(data[keep], dims=['time'], coords={'time': time[keep]})
+
+ result = reshape_data_for_heatmap(da, reshape_time=('h', 'min'), fill='ffill')
+
+ assert 'timeframe' in result.dims and 'timestep' in result.dims
+ # 100 * 15min = 1500min = 25h; reshaped to hours × minutes
+ assert result.sizes['timeframe'] == 25 # 25 hours
+ assert result.sizes['timestep'] == 60 # 60 minutes per hour
+ # Should handle irregular data without errors
+
+
+def test_multidimensional_scenarios():
+ """Use case: data with scenarios/periods that need to be preserved."""
+ time = pd.date_range('2024-01-01', periods=48, freq='h')
+ scenarios = ['base', 'high']
+ data = np.random.rand(48, 2) * 100
+
+ da = xr.DataArray(data, dims=['time', 'scenario'], coords={'time': time, 'scenario': scenarios}, name='demand')
+
+ result = reshape_data_for_heatmap(da, reshape_time=('D', 'h'))
+
+ # Should preserve scenario dimension
+ assert 'scenario' in result.dims
+ assert result.sizes['scenario'] == 2
+ # 48 hours = 2 days × 24 hours
+ assert result.sizes['timeframe'] == 2 # 2 days
+ assert result.sizes['timestep'] == 24 # 24 hours
+
+
+def test_no_reshape_returns_unchanged():
+ """Use case: when reshape_time=None, return data as-is."""
+ time = pd.date_range('2024-01-01', periods=24, freq='h')
+ da = xr.DataArray(np.random.rand(24), dims=['time'], coords={'time': time})
+
+ result = reshape_data_for_heatmap(da, reshape_time=None)
+
+ xr.testing.assert_equal(result, da)
+
+
+if __name__ == '__main__':
+ pytest.main([__file__, '-v'])
diff --git a/tests/deprecated/test_integration.py b/tests/deprecated/test_integration.py
new file mode 100644
index 000000000..2f083b4fb
--- /dev/null
+++ b/tests/deprecated/test_integration.py
@@ -0,0 +1,333 @@
+"""Tests for deprecated Optimization/Results API - ported from feature/v5.
+
+This module contains the original integration tests from feature/v5 that use the
+deprecated Optimization class. These tests will be removed in v6.0.0.
+
+For new tests, use FlowSystem.optimize(solver) instead.
+"""
+
+import pytest
+
+import flixopt as fx
+
+from ..conftest import (
+ assert_almost_equal_numeric,
+ create_optimization_and_solve,
+)
+
+
+class TestFlowSystem:
+ def test_simple_flow_system(self, simple_flow_system, highs_solver):
+ """
+ Test the effects of the simple energy system model
+ """
+ optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_simple_flow_system')
+
+ effects = optimization.flow_system.effects
+
+ # Cost assertions
+ assert_almost_equal_numeric(
+ effects['costs'].submodel.total.solution.item(), 81.88394666666667, 'costs doesnt match expected value'
+ )
+
+ # CO2 assertions
+ assert_almost_equal_numeric(
+ effects['CO2'].submodel.total.solution.item(), 255.09184, 'CO2 doesnt match expected value'
+ )
+
+ def test_model_components(self, simple_flow_system, highs_solver):
+ """
+ Test the component flows of the simple energy system model
+ """
+ optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_model_components')
+ comps = optimization.flow_system.components
+
+ # Boiler assertions
+ assert_almost_equal_numeric(
+ comps['Boiler'].thermal_flow.submodel.flow_rate.solution.values,
+ [0, 0, 0, 28.4864, 35, 0, 0, 0, 0],
+ 'Q_th doesnt match expected value',
+ )
+
+ # CHP unit assertions
+ assert_almost_equal_numeric(
+ comps['CHP_unit'].thermal_flow.submodel.flow_rate.solution.values,
+ [30.0, 26.66666667, 75.0, 75.0, 75.0, 20.0, 20.0, 20.0, 20.0],
+ 'Q_th doesnt match expected value',
+ )
+
+ def test_results_persistence(self, simple_flow_system, highs_solver):
+ """
+ Test saving and loading results
+ """
+ # Save results to file
+ optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_model_components')
+
+ optimization.results.to_file(overwrite=True)
+
+ # Load results from file
+ results = fx.results.Results.from_file(optimization.folder, optimization.name)
+
+ # Verify key variables from loaded results
+ assert_almost_equal_numeric(
+ results.solution['costs'].values,
+ 81.88394666666667,
+ 'costs doesnt match expected value',
+ )
+ assert_almost_equal_numeric(results.solution['CO2'].values, 255.09184, 'CO2 doesnt match expected value')
+
+
+class TestComplex:
+ def test_basic_flow_system(self, flow_system_base, highs_solver):
+ optimization = create_optimization_and_solve(flow_system_base, highs_solver, 'test_basic_flow_system')
+
+ # Assertions
+ assert_almost_equal_numeric(
+ optimization.results.model['costs'].solution.item(),
+ -11597.873624489237,
+ 'costs doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['costs(temporal)|per_timestep'].solution.values,
+ [
+ -2.38500000e03,
+ -2.21681333e03,
+ -2.38500000e03,
+ -2.17599000e03,
+ -2.35107029e03,
+ -2.38500000e03,
+ 0.00000000e00,
+ -1.68897826e-10,
+ -2.16914486e-12,
+ ],
+ 'costs doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ sum(optimization.results.model['CO2(temporal)->costs(temporal)'].solution.values),
+ 258.63729669618675,
+ 'costs doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ sum(optimization.results.model['Kessel(Q_th)->costs(temporal)'].solution.values),
+ 0.01,
+ 'costs doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ sum(optimization.results.model['Kessel->costs(temporal)'].solution.values),
+ -0.0,
+ 'costs doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ sum(optimization.results.model['Gastarif(Q_Gas)->costs(temporal)'].solution.values),
+ 39.09153113079115,
+ 'costs doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ sum(optimization.results.model['Einspeisung(P_el)->costs(temporal)'].solution.values),
+ -14196.61245231646,
+ 'costs doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ sum(optimization.results.model['KWK->costs(temporal)'].solution.values),
+ 0.0,
+ 'costs doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['Kessel(Q_th)->costs(periodic)'].solution.values,
+ 1000 + 500,
+ 'costs doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['Speicher->costs(periodic)'].solution.values,
+ 800 + 1,
+ 'costs doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['CO2(temporal)'].solution.values,
+ 1293.1864834809337,
+ 'CO2 doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ optimization.results.model['CO2(periodic)'].solution.values,
+ 0.9999999999999994,
+ 'CO2 doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ optimization.results.model['Kessel(Q_th)|flow_rate'].solution.values,
+ [0, 0, 0, 45, 0, 0, 0, 0, 0],
+ 'Kessel doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['KWK(Q_th)|flow_rate'].solution.values,
+ [
+ 7.50000000e01,
+ 6.97111111e01,
+ 7.50000000e01,
+ 7.50000000e01,
+ 7.39330280e01,
+ 7.50000000e01,
+ 0.00000000e00,
+ 3.12638804e-14,
+ 3.83693077e-14,
+ ],
+ 'KWK Q_th doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ optimization.results.model['KWK(P_el)|flow_rate'].solution.values,
+ [
+ 6.00000000e01,
+ 5.57688889e01,
+ 6.00000000e01,
+ 6.00000000e01,
+ 5.91464224e01,
+ 6.00000000e01,
+ 0.00000000e00,
+ 2.50111043e-14,
+ 3.06954462e-14,
+ ],
+ 'KWK P_el doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['Speicher|netto_discharge'].solution.values,
+ [-45.0, -69.71111111, 15.0, -10.0, 36.06697198, -55.0, 20.0, 20.0, 20.0],
+ 'Speicher nettoFlow doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ optimization.results.model['Speicher|charge_state'].solution.values,
+ [0.0, 40.5, 100.0, 77.0, 79.84, 37.38582802, 83.89496178, 57.18336484, 32.60869565, 10.0],
+ 'Speicher nettoFlow doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ optimization.results.model['Speicher|PiecewiseEffects|costs'].solution.values,
+ 800,
+ 'Speicher|PiecewiseEffects|costs doesnt match expected value',
+ )
+
+ def test_piecewise_conversion(self, flow_system_piecewise_conversion, highs_solver):
+ optimization = create_optimization_and_solve(
+ flow_system_piecewise_conversion, highs_solver, 'test_piecewise_conversion'
+ )
+
+ effects = optimization.flow_system.effects
+ comps = optimization.flow_system.components
+
+ # Compare expected values with actual values
+ assert_almost_equal_numeric(
+ effects['costs'].submodel.total.solution.item(), -10710.997365760755, 'costs doesnt match expected value'
+ )
+ assert_almost_equal_numeric(
+ effects['CO2'].submodel.total.solution.item(), 1278.7939026086956, 'CO2 doesnt match expected value'
+ )
+ assert_almost_equal_numeric(
+ comps['Kessel'].thermal_flow.submodel.flow_rate.solution.values,
+ [0, 0, 0, 45, 0, 0, 0, 0, 0],
+ 'Kessel doesnt match expected value',
+ )
+ kwk_flows = {flow.label: flow for flow in comps['KWK'].inputs + comps['KWK'].outputs}
+ assert_almost_equal_numeric(
+ kwk_flows['Q_th'].submodel.flow_rate.solution.values,
+ [45.0, 45.0, 64.5962087, 100.0, 61.3136, 45.0, 45.0, 12.86469565, 0.0],
+ 'KWK Q_th doesnt match expected value',
+ )
+ assert_almost_equal_numeric(
+ kwk_flows['P_el'].submodel.flow_rate.solution.values,
+ [40.0, 40.0, 47.12589407, 60.0, 45.93221818, 40.0, 40.0, 10.91784108, -0.0],
+ 'KWK P_el doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ comps['Speicher'].submodel.netto_discharge.solution.values,
+ [-15.0, -45.0, 25.4037913, -35.0, 48.6864, -25.0, -25.0, 7.13530435, 20.0],
+ 'Speicher nettoFlow doesnt match expected value',
+ )
+
+ assert_almost_equal_numeric(
+ comps['Speicher'].submodel.variables['Speicher|PiecewiseEffects|costs'].solution.values,
+ 454.74666666666667,
+ 'Speicher investcosts_segmented_costs doesnt match expected value',
+ )
+
+
+@pytest.mark.slow
+class TestModelingTypes:
+ @pytest.fixture(params=['full', 'segmented', 'aggregated'])
+ def modeling_calculation(self, request, flow_system_long, highs_solver):
+ """
+ Fixture to run optimizations with different modeling types
+ """
+ # Extract flow system and data from the fixture
+ flow_system = flow_system_long[0]
+ thermal_load_ts = flow_system_long[1]['thermal_load_ts']
+ electrical_load_ts = flow_system_long[1]['electrical_load_ts']
+
+ # Create calculation based on modeling type
+ modeling_type = request.param
+ if modeling_type == 'full':
+ calc = fx.Optimization('fullModel', flow_system)
+ calc.do_modeling()
+ calc.solve(highs_solver)
+ elif modeling_type == 'segmented':
+ calc = fx.SegmentedOptimization('segModel', flow_system, timesteps_per_segment=96, overlap_timesteps=1)
+ calc.do_modeling_and_solve(highs_solver)
+ elif modeling_type == 'aggregated':
+ calc = fx.ClusteredOptimization(
+ 'aggModel',
+ flow_system,
+ fx.ClusteringParameters(
+ hours_per_period=6,
+ nr_of_periods=4,
+ fix_storage_flows=False,
+ aggregate_data_and_fix_non_binary_vars=True,
+ percentage_of_period_freedom=0,
+ penalty_of_period_freedom=0,
+ time_series_for_low_peaks=[electrical_load_ts, thermal_load_ts],
+ time_series_for_high_peaks=[thermal_load_ts],
+ ),
+ )
+ calc.do_modeling()
+ calc.solve(highs_solver)
+
+ return calc, modeling_type
+
+ def test_modeling_types_costs(self, modeling_calculation):
+ """
+ Test total costs for different modeling types
+ """
+ calc, modeling_type = modeling_calculation
+
+ expected_costs = {
+ 'full': 343613,
+ 'segmented': 343613, # Approximate value
+ 'aggregated': 342967.0,
+ }
+
+ if modeling_type in ['full', 'aggregated']:
+ assert_almost_equal_numeric(
+ calc.results.model['costs'].solution.item(),
+ expected_costs[modeling_type],
+ f'costs do not match for {modeling_type} modeling type',
+ )
+ else:
+ assert_almost_equal_numeric(
+ calc.results.solution_without_overlap('costs(temporal)|per_timestep').sum(),
+ expected_costs[modeling_type],
+ f'costs do not match for {modeling_type} modeling type',
+ )
+
+ def test_segmented_io(self, modeling_calculation):
+ calc, modeling_type = modeling_calculation
+ if modeling_type == 'segmented':
+ calc.results.to_file(overwrite=True)
+ _ = fx.results.SegmentedResults.from_file(calc.folder, calc.name)
+
+
+if __name__ == '__main__':
+ pytest.main(['-v'])
diff --git a/tests/deprecated/test_io.py b/tests/deprecated/test_io.py
new file mode 100644
index 000000000..9a00549d7
--- /dev/null
+++ b/tests/deprecated/test_io.py
@@ -0,0 +1,193 @@
+"""Tests for I/O functionality.
+
+Tests for deprecated Results.to_file() and Results.from_file() API
+have been moved to tests/deprecated/test_results_io.py.
+"""
+
+import pytest
+
+import flixopt as fx
+
+from .conftest import (
+ flow_system_base,
+ flow_system_long,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+ simple_flow_system_scenarios,
+)
+
+
+@pytest.fixture(
+ params=[
+ flow_system_base,
+ simple_flow_system_scenarios,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+ flow_system_long,
+ ]
+)
+def flow_system(request):
+ fs = request.getfixturevalue(request.param.__name__)
+ if isinstance(fs, fx.FlowSystem):
+ return fs
+ else:
+ return fs[0]
+
+
+def test_flow_system_io(flow_system):
+ flow_system.to_json('fs.json')
+
+ ds = flow_system.to_dataset()
+ new_fs = fx.FlowSystem.from_dataset(ds)
+
+ assert flow_system == new_fs
+
+ print(flow_system)
+ flow_system.__repr__()
+ flow_system.__str__()
+
+
+def test_suppress_output_file_descriptors(tmp_path):
+ """Test that suppress_output() redirects file descriptors to /dev/null."""
+ import os
+
+ from flixopt.io import suppress_output
+
+ # Create temporary files to capture output
+ test_file = tmp_path / 'test_output.txt'
+
+ # Test that FD 1 (stdout) is redirected during suppression
+ with open(test_file, 'w') as f:
+ original_stdout_fd = os.dup(1) # Save original stdout FD
+ try:
+ # Redirect FD 1 to our test file
+ os.dup2(f.fileno(), 1)
+ os.write(1, b'before suppression\n')
+
+ with suppress_output():
+ # Inside suppress_output, writes should go to /dev/null, not our file
+ os.write(1, b'during suppression\n')
+
+ # After suppress_output, writes should go to our file again
+ os.write(1, b'after suppression\n')
+ finally:
+ # Restore original stdout
+ os.dup2(original_stdout_fd, 1)
+ os.close(original_stdout_fd)
+
+ # Read the file and verify content
+ content = test_file.read_text()
+ assert 'before suppression' in content
+ assert 'during suppression' not in content # This should NOT be in the file
+ assert 'after suppression' in content
+
+
+def test_suppress_output_python_level():
+ """Test that Python-level stdout/stderr continue to work after suppress_output()."""
+ import io
+ import sys
+
+ from flixopt.io import suppress_output
+
+ # Create a StringIO to capture Python-level output
+ captured_output = io.StringIO()
+
+ # After suppress_output exits, Python streams should be functional
+ with suppress_output():
+ pass # Just enter and exit the context
+
+ # Redirect sys.stdout to our StringIO
+ old_stdout = sys.stdout
+ try:
+ sys.stdout = captured_output
+ print('test message')
+ finally:
+ sys.stdout = old_stdout
+
+ # Verify Python-level stdout works
+ assert 'test message' in captured_output.getvalue()
+
+
+def test_suppress_output_exception_handling():
+ """Test that suppress_output() properly restores streams even on exception."""
+ import sys
+
+ from flixopt.io import suppress_output
+
+ # Save original file descriptors
+ original_stdout_fd = sys.stdout.fileno()
+ original_stderr_fd = sys.stderr.fileno()
+
+ try:
+ with suppress_output():
+ raise ValueError('Test exception')
+ except ValueError:
+ pass
+
+ # Verify streams are restored after exception
+ assert sys.stdout.fileno() == original_stdout_fd
+ assert sys.stderr.fileno() == original_stderr_fd
+
+ # Verify we can still write to stdout/stderr
+ sys.stdout.write('test after exception\n')
+ sys.stdout.flush()
+
+
+def test_suppress_output_c_level():
+ """Test that suppress_output() suppresses C-level output (file descriptor level)."""
+ import os
+ import sys
+
+ from flixopt.io import suppress_output
+
+ # This test verifies that even low-level C writes are suppressed
+ # by writing directly to file descriptor 1 (stdout)
+ with suppress_output():
+ # Try to write directly to FD 1 (stdout) - should be suppressed
+ os.write(1, b'C-level stdout write\n')
+ # Try to write directly to FD 2 (stderr) - should be suppressed
+ os.write(2, b'C-level stderr write\n')
+
+ # After exiting context, ensure streams work
+ sys.stdout.write('After C-level test\n')
+ sys.stdout.flush()
+
+
+def test_tqdm_cleanup_on_exception():
+ """Test that tqdm progress bar is properly cleaned up even when exceptions occur.
+
+ This test verifies the pattern used in SegmentedCalculation where a try/finally
+ block ensures progress_bar.close() is called even if an exception occurs.
+ """
+ from tqdm import tqdm
+
+ # Create a progress bar (disabled to avoid output during tests)
+ items = enumerate(range(5))
+ progress_bar = tqdm(items, total=5, desc='Test progress', disable=True)
+
+ # Track whether cleanup was called
+ cleanup_called = False
+ exception_raised = False
+
+ try:
+ try:
+ for idx, _ in progress_bar:
+ if idx == 2:
+ raise ValueError('Test exception')
+ finally:
+ # This should always execute, even with exception
+ progress_bar.close()
+ cleanup_called = True
+ except ValueError:
+ exception_raised = True
+
+ # Verify both that the exception was raised AND cleanup happened
+ assert exception_raised, 'Test exception should have been raised'
+ assert cleanup_called, 'Cleanup should have been called even with exception'
+
+ # Verify that close() is idempotent - calling it again should not raise
+ progress_bar.close() # Should not raise even if already closed
+
+
+if __name__ == '__main__':
+ pytest.main(['-v', '--disable-warnings'])
diff --git a/tests/deprecated/test_linear_converter.py b/tests/deprecated/test_linear_converter.py
new file mode 100644
index 000000000..57b911d64
--- /dev/null
+++ b/tests/deprecated/test_linear_converter.py
@@ -0,0 +1,501 @@
+import numpy as np
+import pytest
+import xarray as xr
+
+import flixopt as fx
+
+from .conftest import assert_conequal, assert_var_equal, create_linopy_model
+
+
+class TestLinearConverterModel:
+ """Test the LinearConverterModel class."""
+
+ def test_basic_linear_converter(self, basic_flow_system_linopy_coords, coords_config):
+ """Test basic initialization and modeling of a LinearConverter."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create input and output flows
+ input_flow = fx.Flow('input', bus='input_bus', size=100)
+ output_flow = fx.Flow('output', bus='output_bus', size=100)
+
+ # Create a simple linear converter with constant conversion factor
+ converter = fx.LinearConverter(
+ label='Converter',
+ inputs=[input_flow],
+ outputs=[output_flow],
+ conversion_factors=[{input_flow.label: 0.8, output_flow.label: 1.0}],
+ )
+
+ # Add to flow system
+ flow_system.add_elements(fx.Bus('input_bus'), fx.Bus('output_bus'), converter)
+
+ # Create model
+ model = create_linopy_model(flow_system)
+
+ # Check variables and constraints
+ assert 'Converter(input)|flow_rate' in model.variables
+ assert 'Converter(output)|flow_rate' in model.variables
+ assert 'Converter|conversion_0' in model.constraints
+
+ # Check conversion constraint (input * 0.8 == output * 1.0)
+ assert_conequal(
+ model.constraints['Converter|conversion_0'],
+ input_flow.submodel.flow_rate * 0.8 == output_flow.submodel.flow_rate * 1.0,
+ )
+
+ def test_linear_converter_time_varying(self, basic_flow_system_linopy_coords, coords_config):
+ """Test a LinearConverter with time-varying conversion factors."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ # Create time-varying efficiency (e.g., temperature-dependent)
+ varying_efficiency = np.linspace(0.7, 0.9, len(timesteps))
+ efficiency_series = xr.DataArray(varying_efficiency, coords=(timesteps,))
+
+ # Create input and output flows
+ input_flow = fx.Flow('input', bus='input_bus', size=100)
+ output_flow = fx.Flow('output', bus='output_bus', size=100)
+
+ # Create a linear converter with time-varying conversion factor
+ converter = fx.LinearConverter(
+ label='Converter',
+ inputs=[input_flow],
+ outputs=[output_flow],
+ conversion_factors=[{input_flow.label: efficiency_series, output_flow.label: 1.0}],
+ )
+
+ # Add to flow system
+ flow_system.add_elements(fx.Bus('input_bus'), fx.Bus('output_bus'), converter)
+
+ # Create model
+ model = create_linopy_model(flow_system)
+
+ # Check variables and constraints
+ assert 'Converter(input)|flow_rate' in model.variables
+ assert 'Converter(output)|flow_rate' in model.variables
+ assert 'Converter|conversion_0' in model.constraints
+
+ # Check conversion constraint (input * efficiency_series == output * 1.0)
+ assert_conequal(
+ model.constraints['Converter|conversion_0'],
+ input_flow.submodel.flow_rate * efficiency_series == output_flow.submodel.flow_rate * 1.0,
+ )
+
+ def test_linear_converter_multiple_factors(self, basic_flow_system_linopy_coords, coords_config):
+ """Test a LinearConverter with multiple conversion factors."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create flows
+ input_flow1 = fx.Flow('input1', bus='input_bus1', size=100)
+ input_flow2 = fx.Flow('input2', bus='input_bus2', size=100)
+ output_flow1 = fx.Flow('output1', bus='output_bus1', size=100)
+ output_flow2 = fx.Flow('output2', bus='output_bus2', size=100)
+
+ # Create a linear converter with multiple inputs/outputs and conversion factors
+ converter = fx.LinearConverter(
+ label='Converter',
+ inputs=[input_flow1, input_flow2],
+ outputs=[output_flow1, output_flow2],
+ conversion_factors=[
+ {input_flow1.label: 0.8, output_flow1.label: 1.0}, # input1 -> output1
+ {input_flow2.label: 0.5, output_flow2.label: 1.0}, # input2 -> output2
+ {input_flow1.label: 0.2, output_flow2.label: 0.3}, # input1 contributes to output2
+ ],
+ )
+
+ # Add to flow system
+ flow_system.add_elements(
+ fx.Bus('input_bus1'), fx.Bus('input_bus2'), fx.Bus('output_bus1'), fx.Bus('output_bus2'), converter
+ )
+
+ # Create model
+ model = create_linopy_model(flow_system)
+
+ # Check constraints for each conversion factor
+ assert 'Converter|conversion_0' in model.constraints
+ assert 'Converter|conversion_1' in model.constraints
+ assert 'Converter|conversion_2' in model.constraints
+
+ # Check conversion constraint 1 (input1 * 0.8 == output1 * 1.0)
+ assert_conequal(
+ model.constraints['Converter|conversion_0'],
+ input_flow1.submodel.flow_rate * 0.8 == output_flow1.submodel.flow_rate * 1.0,
+ )
+
+ # Check conversion constraint 2 (input2 * 0.5 == output2 * 1.0)
+ assert_conequal(
+ model.constraints['Converter|conversion_1'],
+ input_flow2.submodel.flow_rate * 0.5 == output_flow2.submodel.flow_rate * 1.0,
+ )
+
+ # Check conversion constraint 3 (input1 * 0.2 == output2 * 0.3)
+ assert_conequal(
+ model.constraints['Converter|conversion_2'],
+ input_flow1.submodel.flow_rate * 0.2 == output_flow2.submodel.flow_rate * 0.3,
+ )
+
+ def test_linear_converter_with_status(self, basic_flow_system_linopy_coords, coords_config):
+ """Test a LinearConverter with StatusParameters."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create input and output flows
+ input_flow = fx.Flow('input', bus='input_bus', size=100)
+ output_flow = fx.Flow('output', bus='output_bus', size=100)
+
+ # Create StatusParameters
+ status_params = fx.StatusParameters(
+ active_hours_min=10, active_hours_max=40, effects_per_active_hour={'costs': 5}
+ )
+
+ # Create a linear converter with StatusParameters
+ converter = fx.LinearConverter(
+ label='Converter',
+ inputs=[input_flow],
+ outputs=[output_flow],
+ conversion_factors=[{input_flow.label: 0.8, output_flow.label: 1.0}],
+ status_parameters=status_params,
+ )
+
+ # Add to flow system
+ flow_system.add_elements(
+ fx.Bus('input_bus'),
+ fx.Bus('output_bus'),
+ converter,
+ )
+
+ # Create model
+ model = create_linopy_model(flow_system)
+
+ # Verify Status variables and constraints
+ assert 'Converter|status' in model.variables
+ assert 'Converter|active_hours' in model.variables
+
+ # Check active_hours constraint
+ assert_conequal(
+ model.constraints['Converter|active_hours'],
+ model.variables['Converter|active_hours']
+ == (model.variables['Converter|status'] * model.hours_per_step).sum('time'),
+ )
+
+ # Check conversion constraint
+ assert_conequal(
+ model.constraints['Converter|conversion_0'],
+ input_flow.submodel.flow_rate * 0.8 == output_flow.submodel.flow_rate * 1.0,
+ )
+
+ # Check status effects
+ assert 'Converter->costs(temporal)' in model.constraints
+ assert_conequal(
+ model.constraints['Converter->costs(temporal)'],
+ model.variables['Converter->costs(temporal)']
+ == model.variables['Converter|status'] * model.hours_per_step * 5,
+ )
+
+ def test_linear_converter_multidimensional(self, basic_flow_system_linopy_coords, coords_config):
+ """Test LinearConverter with multiple inputs, outputs, and connections between them."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create a more complex setup with multiple flows
+ input_flow1 = fx.Flow('fuel', bus='fuel_bus', size=100)
+ input_flow2 = fx.Flow('electricity', bus='electricity_bus', size=50)
+ output_flow1 = fx.Flow('heat', bus='heat_bus', size=70)
+ output_flow2 = fx.Flow('cooling', bus='cooling_bus', size=30)
+
+ # Create a CHP-like converter with more complex connections
+ converter = fx.LinearConverter(
+ label='MultiConverter',
+ inputs=[input_flow1, input_flow2],
+ outputs=[output_flow1, output_flow2],
+ conversion_factors=[
+ # Fuel to heat (primary)
+ {input_flow1.label: 0.7, output_flow1.label: 1.0},
+ # Electricity to cooling
+ {input_flow2.label: 0.3, output_flow2.label: 1.0},
+ # Fuel also contributes to cooling
+ {input_flow1.label: 0.1, output_flow2.label: 0.5},
+ ],
+ )
+
+ # Add to flow system
+ flow_system.add_elements(
+ fx.Bus('fuel_bus'), fx.Bus('electricity_bus'), fx.Bus('heat_bus'), fx.Bus('cooling_bus'), converter
+ )
+
+ # Create model
+ model = create_linopy_model(flow_system)
+
+ # Check all expected constraints
+ assert 'MultiConverter|conversion_0' in model.constraints
+ assert 'MultiConverter|conversion_1' in model.constraints
+ assert 'MultiConverter|conversion_2' in model.constraints
+
+ # Check the conversion equations
+ assert_conequal(
+ model.constraints['MultiConverter|conversion_0'],
+ input_flow1.submodel.flow_rate * 0.7 == output_flow1.submodel.flow_rate * 1.0,
+ )
+
+ assert_conequal(
+ model.constraints['MultiConverter|conversion_1'],
+ input_flow2.submodel.flow_rate * 0.3 == output_flow2.submodel.flow_rate * 1.0,
+ )
+
+ assert_conequal(
+ model.constraints['MultiConverter|conversion_2'],
+ input_flow1.submodel.flow_rate * 0.1 == output_flow2.submodel.flow_rate * 0.5,
+ )
+
+ def test_edge_case_time_varying_conversion(self, basic_flow_system_linopy_coords, coords_config):
+ """Test edge case with extreme time-varying conversion factors."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ timesteps = flow_system.timesteps
+
+ # Create fluctuating conversion efficiency (e.g., for a heat pump)
+ # Values range from very low (0.1) to very high (5.0)
+ fluctuating_cop = np.concatenate(
+ [
+ np.linspace(0.1, 1.0, len(timesteps) // 3),
+ np.linspace(1.0, 5.0, len(timesteps) // 3),
+ np.linspace(5.0, 0.1, len(timesteps) // 3 + len(timesteps) % 3),
+ ]
+ )
+
+ # Create input and output flows
+ input_flow = fx.Flow('electricity', bus='electricity_bus', size=100)
+ output_flow = fx.Flow('heat', bus='heat_bus', size=500) # Higher maximum to allow for COP of 5
+
+ conversion_factors = [{input_flow.label: fluctuating_cop, output_flow.label: np.ones(len(timesteps))}]
+
+ # Create the converter
+ converter = fx.LinearConverter(
+ label='VariableConverter', inputs=[input_flow], outputs=[output_flow], conversion_factors=conversion_factors
+ )
+
+ # Add to flow system
+ flow_system.add_elements(fx.Bus('electricity_bus'), fx.Bus('heat_bus'), converter)
+
+ # Create model
+ model = create_linopy_model(flow_system)
+
+ # Check that the correct constraint was created
+ assert 'VariableConverter|conversion_0' in model.constraints
+
+ factor = converter.conversion_factors[0]['electricity']
+
+ assert factor.dims == tuple(model.get_coords())
+
+ # Verify the constraint has the time-varying coefficient
+ assert_conequal(
+ model.constraints['VariableConverter|conversion_0'],
+ input_flow.submodel.flow_rate * factor == output_flow.submodel.flow_rate * 1.0,
+ )
+
+ def test_piecewise_conversion(self, basic_flow_system_linopy_coords, coords_config):
+ """Test a LinearConverter with PiecewiseConversion."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create input and output flows
+ input_flow = fx.Flow('input', bus='input_bus', size=100)
+ output_flow = fx.Flow('output', bus='output_bus', size=100)
+
+ # Create pieces for piecewise conversion
+ # For input flow: two pieces from 0-50 and 50-100
+ input_pieces = [fx.Piece(start=0, end=50), fx.Piece(start=50, end=100)]
+
+ # For output flow: two pieces from 0-30 and 30-90
+ output_pieces = [fx.Piece(start=0, end=30), fx.Piece(start=30, end=90)]
+
+ # Create piecewise conversion
+ piecewise_conversion = fx.PiecewiseConversion(
+ {input_flow.label: fx.Piecewise(input_pieces), output_flow.label: fx.Piecewise(output_pieces)}
+ )
+
+ # Create a linear converter with piecewise conversion
+ converter = fx.LinearConverter(
+ label='Converter', inputs=[input_flow], outputs=[output_flow], piecewise_conversion=piecewise_conversion
+ )
+
+ # Add to flow system
+ flow_system.add_elements(fx.Bus('input_bus'), fx.Bus('output_bus'), converter)
+
+ # Create model with the piecewise conversion
+ model = create_linopy_model(flow_system)
+
+ # Verify that PiecewiseModel was created and added as a submodel
+ assert converter.submodel.piecewise_conversion is not None
+
+ # Get the PiecewiseModel instance
+ piecewise_model = converter.submodel.piecewise_conversion
+
+ # Check that we have the expected pieces (2 in this case)
+ assert len(piecewise_model.pieces) == 2
+
+ # Verify that variables were created for each piece
+ for i, _ in enumerate(piecewise_model.pieces):
+ # Each piece should have lambda0, lambda1, and inside_piece variables
+ assert f'Converter|Piece_{i}|lambda0' in model.variables
+ assert f'Converter|Piece_{i}|lambda1' in model.variables
+ assert f'Converter|Piece_{i}|inside_piece' in model.variables
+ lambda0 = model.variables[f'Converter|Piece_{i}|lambda0']
+ lambda1 = model.variables[f'Converter|Piece_{i}|lambda1']
+ inside_piece = model.variables[f'Converter|Piece_{i}|inside_piece']
+
+ assert_var_equal(inside_piece, model.add_variables(binary=True, coords=model.get_coords()))
+ assert_var_equal(lambda0, model.add_variables(lower=0, upper=1, coords=model.get_coords()))
+ assert_var_equal(lambda1, model.add_variables(lower=0, upper=1, coords=model.get_coords()))
+
+ # Check that the inside_piece constraint exists
+ assert f'Converter|Piece_{i}|inside_piece' in model.constraints
+ # Check the relationship between inside_piece and lambdas
+ assert_conequal(model.constraints[f'Converter|Piece_{i}|inside_piece'], inside_piece == lambda0 + lambda1)
+
+ assert_conequal(
+ model.constraints['Converter|Converter(input)|flow_rate|lambda'],
+ model.variables['Converter(input)|flow_rate']
+ == model.variables['Converter|Piece_0|lambda0'] * 0
+ + model.variables['Converter|Piece_0|lambda1'] * 50
+ + model.variables['Converter|Piece_1|lambda0'] * 50
+ + model.variables['Converter|Piece_1|lambda1'] * 100,
+ )
+
+ assert_conequal(
+ model.constraints['Converter|Converter(output)|flow_rate|lambda'],
+ model.variables['Converter(output)|flow_rate']
+ == model.variables['Converter|Piece_0|lambda0'] * 0
+ + model.variables['Converter|Piece_0|lambda1'] * 30
+ + model.variables['Converter|Piece_1|lambda0'] * 30
+ + model.variables['Converter|Piece_1|lambda1'] * 90,
+ )
+
+ # Check that we enforce the constraint that only one segment can be active
+ assert 'Converter|Converter(input)|flow_rate|single_segment' in model.constraints
+
+ # The constraint should enforce that the sum of inside_piece variables is limited
+ # If there's no status parameter, the right-hand side should be 1
+ assert_conequal(
+ model.constraints['Converter|Converter(input)|flow_rate|single_segment'],
+ sum([model.variables[f'Converter|Piece_{i}|inside_piece'] for i in range(len(piecewise_model.pieces))])
+ <= 1,
+ )
+
+ def test_piecewise_conversion_with_status(self, basic_flow_system_linopy_coords, coords_config):
+ """Test a LinearConverter with PiecewiseConversion and StatusParameters."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create input and output flows
+ input_flow = fx.Flow('input', bus='input_bus', size=100)
+ output_flow = fx.Flow('output', bus='output_bus', size=100)
+
+ # Create pieces for piecewise conversion
+ input_pieces = [fx.Piece(start=0, end=50), fx.Piece(start=50, end=100)]
+
+ output_pieces = [fx.Piece(start=0, end=30), fx.Piece(start=30, end=90)]
+
+ # Create piecewise conversion
+ piecewise_conversion = fx.PiecewiseConversion(
+ {input_flow.label: fx.Piecewise(input_pieces), output_flow.label: fx.Piecewise(output_pieces)}
+ )
+
+ # Create StatusParameters
+ status_params = fx.StatusParameters(
+ active_hours_min=10, active_hours_max=40, effects_per_active_hour={'costs': 5}
+ )
+
+ # Create a linear converter with piecewise conversion and status parameters
+ converter = fx.LinearConverter(
+ label='Converter',
+ inputs=[input_flow],
+ outputs=[output_flow],
+ piecewise_conversion=piecewise_conversion,
+ status_parameters=status_params,
+ )
+
+ # Add to flow system
+ flow_system.add_elements(
+ fx.Bus('input_bus'),
+ fx.Bus('output_bus'),
+ converter,
+ )
+
+ # Create model with the piecewise conversion
+ model = create_linopy_model(flow_system)
+
+ # Verify that PiecewiseModel was created and added as a submodel
+ assert converter.submodel.piecewise_conversion is not None
+
+ # Get the PiecewiseModel instance
+ piecewise_model = converter.submodel.piecewise_conversion
+
+ # Check that we have the expected pieces (2 in this case)
+ assert len(piecewise_model.pieces) == 2
+
+ # Verify that the status variable was used as the zero_point for the piecewise model
+ # When using StatusParameters, the zero_point should be the status variable
+ assert 'Converter|status' in model.variables
+ assert piecewise_model.zero_point is not None # Should be a variable
+
+ # Verify that variables were created for each piece
+ for i, _ in enumerate(piecewise_model.pieces):
+ # Each piece should have lambda0, lambda1, and inside_piece variables
+ assert f'Converter|Piece_{i}|lambda0' in model.variables
+ assert f'Converter|Piece_{i}|lambda1' in model.variables
+ assert f'Converter|Piece_{i}|inside_piece' in model.variables
+ lambda0 = model.variables[f'Converter|Piece_{i}|lambda0']
+ lambda1 = model.variables[f'Converter|Piece_{i}|lambda1']
+ inside_piece = model.variables[f'Converter|Piece_{i}|inside_piece']
+
+ assert_var_equal(inside_piece, model.add_variables(binary=True, coords=model.get_coords()))
+ assert_var_equal(lambda0, model.add_variables(lower=0, upper=1, coords=model.get_coords()))
+ assert_var_equal(lambda1, model.add_variables(lower=0, upper=1, coords=model.get_coords()))
+
+ # Check that the inside_piece constraint exists
+ assert f'Converter|Piece_{i}|inside_piece' in model.constraints
+ # Check the relationship between inside_piece and lambdas
+ assert_conequal(model.constraints[f'Converter|Piece_{i}|inside_piece'], inside_piece == lambda0 + lambda1)
+
+ assert_conequal(
+ model.constraints['Converter|Converter(input)|flow_rate|lambda'],
+ model.variables['Converter(input)|flow_rate']
+ == model.variables['Converter|Piece_0|lambda0'] * 0
+ + model.variables['Converter|Piece_0|lambda1'] * 50
+ + model.variables['Converter|Piece_1|lambda0'] * 50
+ + model.variables['Converter|Piece_1|lambda1'] * 100,
+ )
+
+ assert_conequal(
+ model.constraints['Converter|Converter(output)|flow_rate|lambda'],
+ model.variables['Converter(output)|flow_rate']
+ == model.variables['Converter|Piece_0|lambda0'] * 0
+ + model.variables['Converter|Piece_0|lambda1'] * 30
+ + model.variables['Converter|Piece_1|lambda0'] * 30
+ + model.variables['Converter|Piece_1|lambda1'] * 90,
+ )
+
+ # Check that we enforce the constraint that only one segment can be active
+ assert 'Converter|Converter(input)|flow_rate|single_segment' in model.constraints
+
+ # The constraint should enforce that the sum of inside_piece variables is limited
+ assert_conequal(
+ model.constraints['Converter|Converter(input)|flow_rate|single_segment'],
+ sum([model.variables[f'Converter|Piece_{i}|inside_piece'] for i in range(len(piecewise_model.pieces))])
+ <= model.variables['Converter|status'],
+ )
+
+ # Also check that the Status model is working correctly
+ assert 'Converter|active_hours' in model.constraints
+ assert_conequal(
+ model.constraints['Converter|active_hours'],
+ model['Converter|active_hours'] == (model['Converter|status'] * model.hours_per_step).sum('time'),
+ )
+
+ # Verify that the costs effect is applied
+ assert 'Converter->costs(temporal)' in model.constraints
+ assert_conequal(
+ model.constraints['Converter->costs(temporal)'],
+ model.variables['Converter->costs(temporal)']
+ == model.variables['Converter|status'] * model.hours_per_step * 5,
+ )
+
+
+if __name__ == '__main__':
+ pytest.main()
diff --git a/tests/deprecated/test_network_app.py b/tests/deprecated/test_network_app.py
new file mode 100644
index 000000000..f3f250797
--- /dev/null
+++ b/tests/deprecated/test_network_app.py
@@ -0,0 +1,24 @@
+import pytest
+
+import flixopt as fx
+
+from .conftest import (
+ flow_system_long,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+)
+
+
+@pytest.fixture(params=[simple_flow_system, flow_system_segments_of_flows_2, flow_system_long])
+def flow_system(request):
+ fs = request.getfixturevalue(request.param.__name__)
+ if isinstance(fs, fx.FlowSystem):
+ return fs
+ else:
+ return fs[0]
+
+
+def test_network_app(flow_system):
+ """Test that flow model constraints are correctly generated."""
+ flow_system.start_network_app()
+ flow_system.stop_network_app()
diff --git a/tests/deprecated/test_on_hours_computation.py b/tests/deprecated/test_on_hours_computation.py
new file mode 100644
index 000000000..578fd7792
--- /dev/null
+++ b/tests/deprecated/test_on_hours_computation.py
@@ -0,0 +1,99 @@
+import numpy as np
+import pytest
+import xarray as xr
+
+from flixopt.modeling import ModelingUtilities
+
+
+class TestComputeConsecutiveDuration:
+ """Tests for the compute_consecutive_hours_in_state static method."""
+
+ @pytest.mark.parametrize(
+ 'binary_values, hours_per_timestep, expected',
+ [
+ # Case 1: Single timestep DataArrays
+ (xr.DataArray([1], dims=['time']), 5, 5),
+ (xr.DataArray([0], dims=['time']), 3, 0),
+ # Case 2: Array binary, scalar hours
+ (xr.DataArray([0, 0, 1, 1, 1, 0], dims=['time']), 2, 0),
+ (xr.DataArray([0, 1, 1, 0, 1, 1], dims=['time']), 1, 2),
+ (xr.DataArray([1, 1, 1], dims=['time']), 2, 6),
+ # Case 3: Edge cases
+ (xr.DataArray([1], dims=['time']), 4, 4),
+ (xr.DataArray([0], dims=['time']), 3, 0),
+ # Case 4: More complex patterns
+ (xr.DataArray([1, 0, 0, 1, 1, 1], dims=['time']), 2, 6), # 3 consecutive at end * 2 hours
+ (xr.DataArray([0, 1, 1, 1, 0, 0], dims=['time']), 1, 0), # ends with 0
+ ],
+ )
+ def test_compute_duration(self, binary_values, hours_per_timestep, expected):
+ """Test compute_consecutive_hours_in_state with various inputs."""
+ result = ModelingUtilities.compute_consecutive_hours_in_state(binary_values, hours_per_timestep)
+ assert np.isclose(result, expected)
+
+ @pytest.mark.parametrize(
+ 'binary_values, hours_per_timestep',
+ [
+ # Case: hours_per_timestep must be scalar
+ (xr.DataArray([1, 1, 1, 1, 1], dims=['time']), np.array([1, 2])),
+ ],
+ )
+ def test_compute_duration_raises_error(self, binary_values, hours_per_timestep):
+ """Test error conditions."""
+ with pytest.raises(TypeError):
+ ModelingUtilities.compute_consecutive_hours_in_state(binary_values, hours_per_timestep)
+
+
+class TestComputePreviousOnStates:
+ """Tests for the compute_previous_states static method."""
+
+ @pytest.mark.parametrize(
+ 'previous_values, expected',
+ [
+ # Case 1: Single value DataArrays
+ (xr.DataArray([0], dims=['time']), xr.DataArray([0], dims=['time'])),
+ (xr.DataArray([1], dims=['time']), xr.DataArray([1], dims=['time'])),
+ (xr.DataArray([0.001], dims=['time']), xr.DataArray([1], dims=['time'])), # Using default epsilon
+ (xr.DataArray([1e-4], dims=['time']), xr.DataArray([1], dims=['time'])),
+ (xr.DataArray([1e-8], dims=['time']), xr.DataArray([0], dims=['time'])),
+ # Case 1: Multiple timestep DataArrays
+ (xr.DataArray([0, 5, 0], dims=['time']), xr.DataArray([0, 1, 0], dims=['time'])),
+ (xr.DataArray([0.1, 0, 0.3], dims=['time']), xr.DataArray([1, 0, 1], dims=['time'])),
+ (xr.DataArray([0, 0, 0], dims=['time']), xr.DataArray([0, 0, 0], dims=['time'])),
+ (xr.DataArray([0.1, 0, 0.2], dims=['time']), xr.DataArray([1, 0, 1], dims=['time'])),
+ ],
+ )
+ def test_compute_previous_on_states(self, previous_values, expected):
+ """Test compute_previous_states with various inputs."""
+ result = ModelingUtilities.compute_previous_states(previous_values)
+ xr.testing.assert_equal(result, expected)
+
+ @pytest.mark.parametrize(
+ 'previous_values, epsilon, expected',
+ [
+ # Testing with different epsilon values
+ (xr.DataArray([1e-6, 1e-4, 1e-2], dims=['time']), 1e-3, xr.DataArray([0, 0, 1], dims=['time'])),
+ (xr.DataArray([1e-6, 1e-4, 1e-2], dims=['time']), 1e-5, xr.DataArray([0, 1, 1], dims=['time'])),
+ (xr.DataArray([1e-6, 1e-4, 1e-2], dims=['time']), 1e-1, xr.DataArray([0, 0, 0], dims=['time'])),
+ # Mixed case with custom epsilon
+ (xr.DataArray([0.05, 0.005, 0.0005], dims=['time']), 0.01, xr.DataArray([1, 0, 0], dims=['time'])),
+ ],
+ )
+ def test_compute_previous_on_states_with_epsilon(self, previous_values, epsilon, expected):
+ """Test compute_previous_states with custom epsilon values."""
+ result = ModelingUtilities.compute_previous_states(previous_values, epsilon)
+ xr.testing.assert_equal(result, expected)
+
+ @pytest.mark.parametrize(
+ 'previous_values, expected_shape',
+ [
+ # Check that output shapes match expected dimensions
+ (xr.DataArray([0, 1, 0, 1], dims=['time']), (4,)),
+ (xr.DataArray([0, 1], dims=['time']), (2,)),
+ (xr.DataArray([1, 0], dims=['time']), (2,)),
+ ],
+ )
+ def test_output_shapes(self, previous_values, expected_shape):
+ """Test that output array has the correct shape."""
+ result = ModelingUtilities.compute_previous_states(previous_values)
+ assert result.shape == expected_shape
diff --git a/tests/deprecated/test_plotting_api.py b/tests/deprecated/test_plotting_api.py
new file mode 100644
index 000000000..141623cae
--- /dev/null
+++ b/tests/deprecated/test_plotting_api.py
@@ -0,0 +1,138 @@
+"""Smoke tests for plotting API robustness improvements."""
+
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+from flixopt import plotting
+
+
+@pytest.fixture
+def sample_dataset():
+ """Create a sample xarray Dataset for testing."""
+ rng = np.random.default_rng(0)
+ time = np.arange(10)
+ data = xr.Dataset(
+ {
+ 'var1': (['time'], rng.random(10)),
+ 'var2': (['time'], rng.random(10)),
+ 'var3': (['time'], rng.random(10)),
+ },
+ coords={'time': time},
+ )
+ return data
+
+
+@pytest.fixture
+def sample_dataframe():
+ """Create a sample pandas DataFrame for testing."""
+ rng = np.random.default_rng(1)
+ time = np.arange(10)
+ df = pd.DataFrame({'var1': rng.random(10), 'var2': rng.random(10), 'var3': rng.random(10)}, index=time)
+ df.index.name = 'time'
+ return df
+
+
+def test_kwargs_passthrough_plotly(sample_dataset):
+ """Test that px_kwargs are passed through and figure can be customized after creation."""
+ # Test that px_kwargs are passed through
+ fig = plotting.with_plotly(
+ sample_dataset,
+ mode='line',
+ range_y=[0, 100],
+ )
+ assert list(fig.layout.yaxis.range) == [0, 100]
+
+ # Test that figure can be customized after creation
+ fig.update_traces(line={'width': 5})
+ fig.update_layout(width=1200, height=600)
+ assert fig.layout.width == 1200
+ assert fig.layout.height == 600
+ assert all(getattr(t, 'line', None) and t.line.width == 5 for t in fig.data)
+
+
+def test_dataframe_support_plotly(sample_dataframe):
+ """Test that DataFrames are accepted by plotting functions."""
+ fig = plotting.with_plotly(sample_dataframe, mode='line')
+ assert fig is not None
+
+
+def test_data_validation_non_numeric():
+ """Test that validation catches non-numeric data."""
+ data = xr.Dataset({'var1': (['time'], ['a', 'b', 'c'])}, coords={'time': [0, 1, 2]})
+
+ with pytest.raises(TypeError, match='non-?numeric'):
+ plotting.with_plotly(data)
+
+
+def test_ensure_dataset_invalid_type():
+ """Test that invalid types raise error via the public API."""
+ with pytest.raises(TypeError, match='xr\\.Dataset|pd\\.DataFrame'):
+ plotting.with_plotly([1, 2, 3], mode='line')
+
+
+@pytest.mark.parametrize(
+ 'engine,mode,data_type',
+ [
+ *[
+ (e, m, dt)
+ for e in ['plotly', 'matplotlib']
+ for m in ['stacked_bar', 'line', 'area', 'grouped_bar']
+ for dt in ['dataset', 'dataframe', 'series']
+ if not (e == 'matplotlib' and m in ['area', 'grouped_bar'])
+ ],
+ ],
+)
+def test_all_data_types_and_modes(engine, mode, data_type):
+ """Test that Dataset, DataFrame, and Series work with all plotting modes."""
+ time = pd.date_range('2020-01-01', periods=5, freq='h')
+
+ data = {
+ 'dataset': xr.Dataset(
+ {'A': (['time'], [1, 2, 3, 4, 5]), 'B': (['time'], [5, 4, 3, 2, 1])}, coords={'time': time}
+ ),
+ 'dataframe': pd.DataFrame({'A': [1, 2, 3, 4, 5], 'B': [5, 4, 3, 2, 1]}, index=time),
+ 'series': pd.Series([1, 2, 3, 4, 5], index=time, name='A'),
+ }[data_type]
+
+ if engine == 'plotly':
+ fig = plotting.with_plotly(data, mode=mode)
+ assert fig is not None and len(fig.data) > 0
+ else:
+ fig, ax = plotting.with_matplotlib(data, mode=mode)
+ assert fig is not None and ax is not None
+
+
+@pytest.mark.parametrize(
+ 'engine,data_type', [(e, dt) for e in ['plotly', 'matplotlib'] for dt in ['dataset', 'dataframe', 'series']]
+)
+def test_pie_plots(engine, data_type):
+ """Test pie charts with all data types, including automatic summing."""
+ time = pd.date_range('2020-01-01', periods=5, freq='h')
+
+ # Single-value data
+ single_data = {
+ 'dataset': xr.Dataset({'A': xr.DataArray(10), 'B': xr.DataArray(20), 'C': xr.DataArray(30)}),
+ 'dataframe': pd.DataFrame({'A': [10], 'B': [20], 'C': [30]}),
+ 'series': pd.Series({'A': 10, 'B': 20, 'C': 30}),
+ }[data_type]
+
+ # Multi-dimensional data (for summing test)
+ multi_data = {
+ 'dataset': xr.Dataset(
+ {'A': (['time'], [1, 2, 3, 4, 5]), 'B': (['time'], [5, 5, 5, 5, 5])}, coords={'time': time}
+ ),
+ 'dataframe': pd.DataFrame({'A': [1, 2, 3, 4, 5], 'B': [5, 5, 5, 5, 5]}, index=time),
+ 'series': pd.Series([1, 2, 3, 4, 5], index=time, name='A'),
+ }[data_type]
+
+ for data in [single_data, multi_data]:
+ if engine == 'plotly':
+ fig = plotting.dual_pie_with_plotly(data, data)
+ assert fig is not None and len(fig.data) >= 2
+ if data is multi_data and data_type != 'series':
+ assert sum(fig.data[0].values) == pytest.approx(40)
+ else:
+ fig, axes = plotting.dual_pie_with_matplotlib(data, data)
+ assert fig is not None and len(axes) == 2
diff --git a/tests/deprecated/test_resample_equivalence.py b/tests/deprecated/test_resample_equivalence.py
new file mode 100644
index 000000000..19144b6a1
--- /dev/null
+++ b/tests/deprecated/test_resample_equivalence.py
@@ -0,0 +1,310 @@
+"""
+Tests to ensure the dimension grouping optimization in _resample_by_dimension_groups
+is equivalent to naive Dataset resampling.
+
+These tests verify that the optimization (grouping variables by dimensions before
+resampling) produces identical results to simply calling Dataset.resample() directly.
+"""
+
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+import flixopt as fx
+
+
+def naive_dataset_resample(dataset: xr.Dataset, freq: str, method: str) -> xr.Dataset:
+ """
+ Naive resampling: simply call Dataset.resample().method() directly.
+
+ This is the straightforward approach without dimension grouping optimization.
+ """
+ return getattr(dataset.resample(time=freq), method)()
+
+
+def create_dataset_with_mixed_dimensions(n_timesteps=48, seed=42):
+ """
+ Create a dataset with variables having different dimension structures.
+
+ This mimics realistic data with:
+ - Variables with only time dimension
+ - Variables with time + one other dimension
+ - Variables with time + multiple dimensions
+ """
+ np.random.seed(seed)
+ timesteps = pd.date_range('2020-01-01', periods=n_timesteps, freq='h')
+
+ ds = xr.Dataset(
+ coords={
+ 'time': timesteps,
+ 'component': ['comp1', 'comp2'],
+ 'bus': ['bus1', 'bus2'],
+ 'scenario': ['base', 'alt'],
+ }
+ )
+
+ # Variable with only time dimension
+ ds['total_demand'] = xr.DataArray(
+ np.random.randn(n_timesteps),
+ dims=['time'],
+ coords={'time': ds.time},
+ )
+
+ # Variable with time + component
+ ds['component_flow'] = xr.DataArray(
+ np.random.randn(n_timesteps, 2),
+ dims=['time', 'component'],
+ coords={'time': ds.time, 'component': ds.component},
+ )
+
+ # Variable with time + bus
+ ds['bus_balance'] = xr.DataArray(
+ np.random.randn(n_timesteps, 2),
+ dims=['time', 'bus'],
+ coords={'time': ds.time, 'bus': ds.bus},
+ )
+
+ # Variable with time + component + bus
+ ds['flow_on_bus'] = xr.DataArray(
+ np.random.randn(n_timesteps, 2, 2),
+ dims=['time', 'component', 'bus'],
+ coords={'time': ds.time, 'component': ds.component, 'bus': ds.bus},
+ )
+
+ # Variable with time + scenario
+ ds['scenario_demand'] = xr.DataArray(
+ np.random.randn(n_timesteps, 2),
+ dims=['time', 'scenario'],
+ coords={'time': ds.time, 'scenario': ds.scenario},
+ )
+
+ # Variable with time + component + scenario
+ ds['component_scenario_flow'] = xr.DataArray(
+ np.random.randn(n_timesteps, 2, 2),
+ dims=['time', 'component', 'scenario'],
+ coords={'time': ds.time, 'component': ds.component, 'scenario': ds.scenario},
+ )
+
+ return ds
+
+
+@pytest.mark.parametrize('method', ['mean', 'sum', 'max', 'min', 'first', 'last'])
+@pytest.mark.parametrize('freq', ['2h', '4h', '1D'])
+def test_resample_equivalence_mixed_dimensions(method, freq):
+ """
+ Test that _resample_by_dimension_groups produces same results as naive resampling.
+
+ Uses a dataset with variables having different dimension structures.
+ """
+ ds = create_dataset_with_mixed_dimensions(n_timesteps=100)
+
+ # Method 1: Optimized approach (with dimension grouping)
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, freq, method)
+
+ # Method 2: Naive approach (direct Dataset resampling)
+ result_naive = naive_dataset_resample(ds, freq, method)
+
+ # Compare results
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+@pytest.mark.parametrize('method', ['mean', 'sum', 'max', 'min', 'first', 'last', 'std', 'var', 'median'])
+def test_resample_equivalence_single_dimension(method):
+ """
+ Test with variables having only time dimension.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+
+ ds = xr.Dataset(coords={'time': timesteps})
+ ds['var1'] = xr.DataArray(np.random.randn(48), dims=['time'], coords={'time': ds.time})
+ ds['var2'] = xr.DataArray(np.random.randn(48) * 10, dims=['time'], coords={'time': ds.time})
+ ds['var3'] = xr.DataArray(np.random.randn(48) / 5, dims=['time'], coords={'time': ds.time})
+
+ # Optimized approach
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method)
+
+ # Naive approach
+ result_naive = naive_dataset_resample(ds, '2h', method)
+
+ # Compare results
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_empty_dataset():
+ """
+ Test with an empty dataset (edge case).
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+ ds = xr.Dataset(coords={'time': timesteps})
+
+ # Both should handle empty dataset gracefully
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', 'mean')
+ result_naive = naive_dataset_resample(ds, '2h', 'mean')
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_single_variable():
+ """
+ Test with a single variable.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+ ds = xr.Dataset(coords={'time': timesteps})
+ ds['single_var'] = xr.DataArray(np.random.randn(48), dims=['time'], coords={'time': ds.time})
+
+ # Test multiple methods
+ for method in ['mean', 'sum', 'max', 'min']:
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '3h', method)
+ result_naive = naive_dataset_resample(ds, '3h', method)
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_with_nans():
+ """
+ Test with NaN values to ensure they're handled consistently.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+
+ ds = xr.Dataset(coords={'time': timesteps, 'component': ['a', 'b']})
+
+ # Create variable with some NaN values
+ data = np.random.randn(48, 2)
+ data[5:10, 0] = np.nan
+ data[20:25, 1] = np.nan
+
+ ds['var_with_nans'] = xr.DataArray(
+ data, dims=['time', 'component'], coords={'time': ds.time, 'component': ds.component}
+ )
+
+ # Test with methods that handle NaNs
+ for method in ['mean', 'sum', 'max', 'min', 'first', 'last']:
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method)
+ result_naive = naive_dataset_resample(ds, '2h', method)
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_different_dimension_orders():
+ """
+ Test that dimension order doesn't affect the equivalence.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+
+ ds = xr.Dataset(
+ coords={
+ 'time': timesteps,
+ 'x': ['x1', 'x2'],
+ 'y': ['y1', 'y2'],
+ }
+ )
+
+ # Variable with time first
+ ds['var_time_first'] = xr.DataArray(
+ np.random.randn(48, 2, 2),
+ dims=['time', 'x', 'y'],
+ coords={'time': ds.time, 'x': ds.x, 'y': ds.y},
+ )
+
+ # Variable with time in middle
+ ds['var_time_middle'] = xr.DataArray(
+ np.random.randn(2, 48, 2),
+ dims=['x', 'time', 'y'],
+ coords={'x': ds.x, 'time': ds.time, 'y': ds.y},
+ )
+
+ # Variable with time last
+ ds['var_time_last'] = xr.DataArray(
+ np.random.randn(2, 2, 48),
+ dims=['x', 'y', 'time'],
+ coords={'x': ds.x, 'y': ds.y, 'time': ds.time},
+ )
+
+ for method in ['mean', 'sum', 'max', 'min']:
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method)
+ result_naive = naive_dataset_resample(ds, '2h', method)
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_multiple_variables_same_dims():
+ """
+ Test with multiple variables sharing the same dimensions.
+
+ This is the key optimization case - variables with same dims should be
+ grouped and resampled together.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+
+ ds = xr.Dataset(coords={'time': timesteps, 'location': ['A', 'B', 'C']})
+
+ # Multiple variables with same dimensions (time, location)
+ for i in range(3):
+ ds[f'var_{i}'] = xr.DataArray(
+ np.random.randn(48, 3),
+ dims=['time', 'location'],
+ coords={'time': ds.time, 'location': ds.location},
+ )
+
+ for method in ['mean', 'sum', 'max', 'min']:
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', method)
+ result_naive = naive_dataset_resample(ds, '2h', method)
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_large_dataset():
+ """
+ Test with a larger, more realistic dataset.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=168, freq='h') # One week
+
+ ds = xr.Dataset(
+ coords={
+ 'time': timesteps,
+ 'component': [f'comp_{i}' for i in range(5)],
+ 'bus': [f'bus_{i}' for i in range(3)],
+ }
+ )
+
+ # Various variable types
+ ds['simple_var'] = xr.DataArray(np.random.randn(168), dims=['time'], coords={'time': ds.time})
+ ds['component_var'] = xr.DataArray(
+ np.random.randn(168, 5), dims=['time', 'component'], coords={'time': ds.time, 'component': ds.component}
+ )
+ ds['bus_var'] = xr.DataArray(np.random.randn(168, 3), dims=['time', 'bus'], coords={'time': ds.time, 'bus': ds.bus})
+ ds['complex_var'] = xr.DataArray(
+ np.random.randn(168, 5, 3),
+ dims=['time', 'component', 'bus'],
+ coords={'time': ds.time, 'component': ds.component, 'bus': ds.bus},
+ )
+
+ # Test with a subset of methods (to keep test time reasonable)
+ for method in ['mean', 'sum', 'first']:
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '1D', method)
+ result_naive = naive_dataset_resample(ds, '1D', method)
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+def test_resample_equivalence_with_kwargs():
+ """
+ Test that kwargs are properly forwarded to resample().
+
+ Verifies that additional arguments like label and closed are correctly
+ passed through the optimization path.
+ """
+ timesteps = pd.date_range('2020-01-01', periods=48, freq='h')
+ ds = xr.Dataset(coords={'time': timesteps})
+ ds['var'] = xr.DataArray(np.random.randn(48), dims=['time'], coords={'time': ds.time})
+
+ kwargs = {'label': 'right', 'closed': 'right'}
+ result_optimized = fx.FlowSystem._resample_by_dimension_groups(ds, '2h', 'mean', **kwargs)
+ result_naive = ds.resample(time='2h', **kwargs).mean()
+
+ xr.testing.assert_allclose(result_optimized, result_naive)
+
+
+if __name__ == '__main__':
+ pytest.main(['-v', __file__])
diff --git a/tests/deprecated/test_results_io.py b/tests/deprecated/test_results_io.py
new file mode 100644
index 000000000..a42ca542b
--- /dev/null
+++ b/tests/deprecated/test_results_io.py
@@ -0,0 +1,74 @@
+"""Tests for deprecated Results I/O functionality - ported from feature/v5.
+
+This module contains the original test_flow_system_file_io test from feature/v5
+that uses the deprecated Optimization/Results API. This test will be removed in v6.0.0.
+
+For new tests, use FlowSystem.solution.to_netcdf() instead.
+"""
+
+import uuid
+
+import pytest
+
+import flixopt as fx
+from flixopt.io import ResultsPaths
+
+from ..conftest import (
+ assert_almost_equal_numeric,
+ flow_system_base,
+ flow_system_long,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+ simple_flow_system_scenarios,
+)
+
+
+@pytest.fixture(
+ params=[
+ flow_system_base,
+ simple_flow_system_scenarios,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+ flow_system_long,
+ ]
+)
+def flow_system(request):
+ fs = request.getfixturevalue(request.param.__name__)
+ if isinstance(fs, fx.FlowSystem):
+ return fs
+ else:
+ return fs[0]
+
+
+@pytest.mark.slow
+def test_flow_system_file_io(flow_system, highs_solver, request):
+ # Use UUID to ensure unique names across parallel test workers
+ unique_id = uuid.uuid4().hex[:12]
+ worker_id = getattr(request.config, 'workerinput', {}).get('workerid', 'main')
+ test_id = f'{worker_id}-{unique_id}'
+
+ calculation_0 = fx.Optimization(f'IO-{test_id}', flow_system=flow_system)
+ calculation_0.do_modeling()
+ calculation_0.solve(highs_solver)
+ calculation_0.flow_system.plot_network()
+
+ calculation_0.results.to_file()
+ paths = ResultsPaths(calculation_0.folder, calculation_0.name)
+ flow_system_1 = fx.FlowSystem.from_netcdf(paths.flow_system)
+
+ calculation_1 = fx.Optimization(f'Loaded_IO-{test_id}', flow_system=flow_system_1)
+ calculation_1.do_modeling()
+ calculation_1.solve(highs_solver)
+ calculation_1.flow_system.plot_network()
+
+ assert_almost_equal_numeric(
+ calculation_0.results.model.objective.value,
+ calculation_1.results.model.objective.value,
+ 'objective of loaded flow_system doesnt match the original',
+ )
+
+ assert_almost_equal_numeric(
+ calculation_0.results.solution['costs'].values,
+ calculation_1.results.solution['costs'].values,
+ 'costs doesnt match expected value',
+ )
diff --git a/tests/test_overwrite_protection.py b/tests/deprecated/test_results_overwrite.py
similarity index 89%
rename from tests/test_overwrite_protection.py
rename to tests/deprecated/test_results_overwrite.py
index 4651f1a68..731368e78 100644
--- a/tests/test_overwrite_protection.py
+++ b/tests/deprecated/test_results_overwrite.py
@@ -1,4 +1,10 @@
-"""Tests for Results.to_file() overwrite protection."""
+"""Tests for deprecated Results.to_file() overwrite protection - ported from feature/v5.
+
+This module contains the original overwrite protection tests from feature/v5
+that use the deprecated Optimization/Results API. These tests will be removed in v6.0.0.
+
+For new tests, use FlowSystem.to_netcdf() instead.
+"""
import pathlib
import tempfile
diff --git a/tests/test_results_plots.py b/tests/deprecated/test_results_plots.py
similarity index 100%
rename from tests/test_results_plots.py
rename to tests/deprecated/test_results_plots.py
diff --git a/tests/deprecated/test_scenarios.py b/tests/deprecated/test_scenarios.py
new file mode 100644
index 000000000..b4a1cd161
--- /dev/null
+++ b/tests/deprecated/test_scenarios.py
@@ -0,0 +1,769 @@
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+from linopy.testing import assert_linequal
+
+import flixopt as fx
+from flixopt import Effect, InvestParameters, Sink, Source, Storage
+from flixopt.elements import Bus, Flow
+from flixopt.flow_system import FlowSystem
+
+from .conftest import create_linopy_model
+
+
+@pytest.fixture
+def test_system():
+ """Create a basic test system with scenarios."""
+ # Create a two-day time index with hourly resolution
+ timesteps = pd.date_range('2023-01-01', periods=48, freq='h', name='time')
+
+ # Create two scenarios
+ scenarios = pd.Index(['Scenario A', 'Scenario B'], name='scenario')
+
+ # Create scenario weights
+ scenario_weights = np.array([0.7, 0.3])
+
+ # Create a flow system with scenarios
+ flow_system = FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_weights=scenario_weights,
+ )
+
+ # Create demand profiles that differ between scenarios
+ # Scenario A: Higher demand in first day, lower in second day
+ # Scenario B: Lower demand in first day, higher in second day
+ demand_profile_a = np.concatenate(
+ [
+ np.sin(np.linspace(0, 2 * np.pi, 24)) * 5 + 10, # Day 1, max ~15
+ np.sin(np.linspace(0, 2 * np.pi, 24)) * 2 + 5, # Day 2, max ~7
+ ]
+ )
+
+ demand_profile_b = np.concatenate(
+ [
+ np.sin(np.linspace(0, 2 * np.pi, 24)) * 2 + 5, # Day 1, max ~7
+ np.sin(np.linspace(0, 2 * np.pi, 24)) * 5 + 10, # Day 2, max ~15
+ ]
+ )
+
+ # Stack the profiles into a 2D array (time, scenario)
+ demand_profiles = np.column_stack([demand_profile_a, demand_profile_b])
+
+ # Create the necessary model elements
+ # Create buses
+ electricity_bus = Bus('Electricity')
+
+ # Create a demand sink with scenario-dependent profiles
+ demand = Flow(label='Demand', bus=electricity_bus.label_full, fixed_relative_profile=demand_profiles)
+ demand_sink = Sink('Demand', inputs=[demand])
+
+ # Create a power source with investment option
+ power_gen = Flow(
+ label='Generation',
+ bus=electricity_bus.label_full,
+ size=InvestParameters(
+ minimum_size=0,
+ maximum_size=20,
+ effects_of_investment_per_size={'costs': 100}, # €/kW
+ ),
+ effects_per_flow_hour={'costs': 20}, # €/MWh
+ )
+ generator = Source('Generator', outputs=[power_gen])
+
+ # Create a storage for electricity
+ storage_charge = Flow(label='Charge', bus=electricity_bus.label_full, size=10)
+ storage_discharge = Flow(label='Discharge', bus=electricity_bus.label_full, size=10)
+ storage = Storage(
+ label='Battery',
+ charging=storage_charge,
+ discharging=storage_discharge,
+ capacity_in_flow_hours=InvestParameters(
+ minimum_size=0,
+ maximum_size=50,
+ effects_of_investment_per_size={'costs': 50}, # €/kWh
+ ),
+ eta_charge=0.95,
+ eta_discharge=0.95,
+ initial_charge_state='equals_final',
+ )
+
+ # Create effects and objective
+ cost_effect = Effect(label='costs', unit='€', description='Total costs', is_standard=True, is_objective=True)
+
+ # Add all elements to the flow system
+ flow_system.add_elements(electricity_bus, generator, demand_sink, storage, cost_effect)
+
+ # Return the created system and its components
+ return {
+ 'flow_system': flow_system,
+ 'timesteps': timesteps,
+ 'scenarios': scenarios,
+ 'electricity_bus': electricity_bus,
+ 'demand': demand,
+ 'demand_sink': demand_sink,
+ 'generator': generator,
+ 'power_gen': power_gen,
+ 'storage': storage,
+ 'storage_charge': storage_charge,
+ 'storage_discharge': storage_discharge,
+ 'cost_effect': cost_effect,
+ }
+
+
+@pytest.fixture
+def flow_system_complex_scenarios() -> fx.FlowSystem:
+ """
+ Helper method to create a base model with configurable parameters
+ """
+ thermal_load = np.array([30, 0, 90, 110, 110, 20, 20, 20, 20])
+ electrical_load = np.array([40, 40, 40, 40, 40, 40, 40, 40, 40])
+ flow_system = fx.FlowSystem(
+ pd.date_range('2020-01-01', periods=9, freq='h', name='time'),
+ scenarios=pd.Index(['A', 'B', 'C'], name='scenario'),
+ )
+ # Define the components and flow_system
+ flow_system.add_elements(
+ fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True, share_from_temporal={'CO2': 0.2}),
+ fx.Effect('CO2', 'kg', 'CO2_e-Emissionen'),
+ fx.Effect('PE', 'kWh_PE', 'Primärenergie', maximum_total=3.5e3),
+ fx.Bus('Strom'),
+ fx.Bus('Fernwärme'),
+ fx.Bus('Gas'),
+ fx.Sink('Wärmelast', inputs=[fx.Flow('Q_th_Last', 'Fernwärme', size=1, fixed_relative_profile=thermal_load)]),
+ fx.Source(
+ 'Gastarif', outputs=[fx.Flow('Q_Gas', 'Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})]
+ ),
+ fx.Sink('Einspeisung', inputs=[fx.Flow('P_el', 'Strom', effects_per_flow_hour=-1 * electrical_load)]),
+ )
+
+ boiler = fx.linear_converters.Boiler(
+ 'Kessel',
+ thermal_efficiency=0.5,
+ status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}),
+ thermal_flow=fx.Flow(
+ 'Q_th',
+ bus='Fernwärme',
+ load_factor_max=1.0,
+ load_factor_min=0.1,
+ relative_minimum=5 / 50,
+ relative_maximum=1,
+ previous_flow_rate=50,
+ size=fx.InvestParameters(
+ effects_of_investment=1000,
+ fixed_size=50,
+ mandatory=True,
+ effects_of_investment_per_size={'costs': 10, 'PE': 2},
+ ),
+ status_parameters=fx.StatusParameters(
+ active_hours_min=0,
+ active_hours_max=1000,
+ max_uptime=10,
+ min_uptime=1,
+ max_downtime=10,
+ effects_per_startup=0.01,
+ startup_limit=1000,
+ ),
+ flow_hours_max=1e6,
+ ),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas', size=200, relative_minimum=0, relative_maximum=1),
+ )
+
+ invest_speicher = fx.InvestParameters(
+ effects_of_investment=0,
+ piecewise_effects_of_investment=fx.PiecewiseEffects(
+ piecewise_origin=fx.Piecewise([fx.Piece(5, 25), fx.Piece(25, 100)]),
+ piecewise_shares={
+ 'costs': fx.Piecewise([fx.Piece(50, 250), fx.Piece(250, 800)]),
+ 'PE': fx.Piecewise([fx.Piece(5, 25), fx.Piece(25, 100)]),
+ },
+ ),
+ mandatory=True,
+ effects_of_investment_per_size={'costs': 0.01, 'CO2': 0.01},
+ minimum_size=0,
+ maximum_size=1000,
+ )
+ speicher = fx.Storage(
+ 'Speicher',
+ charging=fx.Flow('Q_th_load', bus='Fernwärme', size=1e4),
+ discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4),
+ capacity_in_flow_hours=invest_speicher,
+ initial_charge_state=0,
+ maximal_final_charge_state=10,
+ eta_charge=0.9,
+ eta_discharge=1,
+ relative_loss_per_hour=0.08,
+ prevent_simultaneous_charge_and_discharge=True,
+ )
+
+ flow_system.add_elements(boiler, speicher)
+
+ return flow_system
+
+
+@pytest.fixture
+def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) -> fx.FlowSystem:
+ """
+ Use segments/Piecewise with numeric data
+ """
+ flow_system = flow_system_complex_scenarios
+
+ flow_system.add_elements(
+ fx.LinearConverter(
+ 'KWK',
+ inputs=[fx.Flow('Q_fu', bus='Gas', size=200)],
+ outputs=[
+ fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow('Q_th', bus='Fernwärme', size=100),
+ ],
+ piecewise_conversion=fx.PiecewiseConversion(
+ {
+ 'P_el': fx.Piecewise(
+ [
+ fx.Piece(np.linspace(5, 6, len(flow_system.timesteps)), 30),
+ fx.Piece(40, np.linspace(60, 70, len(flow_system.timesteps))),
+ ]
+ ),
+ 'Q_th': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
+ 'Q_fu': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
+ }
+ ),
+ status_parameters=fx.StatusParameters(effects_per_startup=0.01),
+ )
+ )
+
+ return flow_system
+
+
+def test_weights(flow_system_piecewise_conversion_scenarios):
+ """Test that scenario weights are correctly used in the model."""
+ scenarios = flow_system_piecewise_conversion_scenarios.scenarios
+ scenario_weights = np.linspace(0.5, 1, len(scenarios))
+ scenario_weights_da = xr.DataArray(
+ scenario_weights,
+ dims=['scenario'],
+ coords={'scenario': scenarios},
+ )
+ flow_system_piecewise_conversion_scenarios.scenario_weights = scenario_weights_da
+ model = create_linopy_model(flow_system_piecewise_conversion_scenarios)
+ normalized_weights = scenario_weights / sum(scenario_weights)
+ np.testing.assert_allclose(model.objective_weights.values, normalized_weights)
+ # Penalty is now an effect with temporal and periodic components
+ penalty_total = flow_system_piecewise_conversion_scenarios.effects.penalty_effect.submodel.total
+ assert_linequal(
+ model.objective.expression,
+ (model.variables['costs'] * normalized_weights).sum() + (penalty_total * normalized_weights).sum(),
+ )
+ assert np.isclose(model.objective_weights.sum().item(), 1)
+
+
+def test_weights_io(flow_system_piecewise_conversion_scenarios):
+ """Test that scenario weights are correctly used in the model."""
+ scenarios = flow_system_piecewise_conversion_scenarios.scenarios
+ scenario_weights = np.linspace(0.5, 1, len(scenarios))
+ scenario_weights_da = xr.DataArray(
+ scenario_weights,
+ dims=['scenario'],
+ coords={'scenario': scenarios},
+ )
+ normalized_scenario_weights_da = scenario_weights_da / scenario_weights_da.sum()
+ flow_system_piecewise_conversion_scenarios.scenario_weights = scenario_weights_da
+
+ model = create_linopy_model(flow_system_piecewise_conversion_scenarios)
+ np.testing.assert_allclose(model.objective_weights.values, normalized_scenario_weights_da)
+ # Penalty is now an effect with temporal and periodic components
+ penalty_total = flow_system_piecewise_conversion_scenarios.effects.penalty_effect.submodel.total
+ assert_linequal(
+ model.objective.expression,
+ (model.variables['costs'] * normalized_scenario_weights_da).sum()
+ + (penalty_total * normalized_scenario_weights_da).sum(),
+ )
+ assert np.isclose(model.objective_weights.sum().item(), 1.0)
+
+
+def test_scenario_dimensions_in_variables(flow_system_piecewise_conversion_scenarios):
+ """Test that all time variables are correctly broadcasted to scenario dimensions."""
+ model = create_linopy_model(flow_system_piecewise_conversion_scenarios)
+ for var in model.variables:
+ assert model.variables[var].dims in [('time', 'scenario'), ('scenario',), ()]
+
+
+def test_full_scenario_optimization(flow_system_piecewise_conversion_scenarios):
+ """Test a full optimization with scenarios and verify results."""
+ scenarios = flow_system_piecewise_conversion_scenarios.scenarios
+ weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios)))
+ flow_system_piecewise_conversion_scenarios.scenario_weights = weights
+
+ # Optimize using new API
+ flow_system_piecewise_conversion_scenarios.optimize(fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60))
+
+ # Verify solution exists and has scenario dimension
+ assert flow_system_piecewise_conversion_scenarios.solution is not None
+ assert 'scenario' in flow_system_piecewise_conversion_scenarios.solution.dims
+
+
+@pytest.mark.skip(reason='This test is taking too long with highs and is too big for gurobipy free')
+def test_io_persistence(flow_system_piecewise_conversion_scenarios, tmp_path):
+ """Test a full optimization with scenarios and verify results."""
+ scenarios = flow_system_piecewise_conversion_scenarios.scenarios
+ weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios)))
+ flow_system_piecewise_conversion_scenarios.scenario_weights = weights
+
+ # Optimize using new API
+ flow_system_piecewise_conversion_scenarios.optimize(fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60))
+ original_objective = flow_system_piecewise_conversion_scenarios.solution['objective'].item()
+
+ # Save and restore
+ filepath = tmp_path / 'flow_system_scenarios.nc4'
+ flow_system_piecewise_conversion_scenarios.to_netcdf(filepath)
+ flow_system_2 = fx.FlowSystem.from_netcdf(filepath)
+
+ # Re-optimize restored flow system
+ flow_system_2.optimize(fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60))
+
+ np.testing.assert_allclose(original_objective, flow_system_2.solution['objective'].item(), rtol=0.001)
+
+
+def test_scenarios_selection(flow_system_piecewise_conversion_scenarios):
+ """Test scenario selection/subsetting functionality."""
+ flow_system_full = flow_system_piecewise_conversion_scenarios
+ scenarios = flow_system_full.scenarios
+ scenario_weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios)))
+ flow_system_full.scenario_weights = scenario_weights
+ flow_system = flow_system_full.sel(scenario=scenarios[0:2])
+
+ assert flow_system.scenarios.equals(flow_system_full.scenarios[0:2])
+
+ np.testing.assert_allclose(flow_system.scenario_weights.values, flow_system_full.scenario_weights[0:2])
+
+ # Optimize using new API with normalize_weights=False
+ flow_system.optimize(
+ fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60),
+ normalize_weights=False,
+ )
+
+ # Penalty has same structure as other effects: 'Penalty' is the total, 'Penalty(temporal)' and 'Penalty(periodic)' are components
+ np.testing.assert_allclose(
+ flow_system.solution['objective'].item(),
+ (
+ (flow_system.solution['costs'] * flow_system.scenario_weights).sum()
+ + (flow_system.solution['Penalty'] * flow_system.scenario_weights).sum()
+ ).item(),
+ ) ## Account for rounding errors
+
+ assert flow_system.solution.indexes['scenario'].equals(flow_system_full.scenarios[0:2])
+
+
+def test_sizes_per_scenario_default():
+ """Test that scenario_independent_sizes defaults to True (sizes equalized) and flow_rates to False (vary)."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios)
+
+ assert fs.scenario_independent_sizes is True
+ assert fs.scenario_independent_flow_rates is False
+
+
+def test_sizes_per_scenario_bool():
+ """Test scenario_independent_sizes with boolean values."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ # Test False (vary per scenario)
+ fs1 = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios, scenario_independent_sizes=False)
+ assert fs1.scenario_independent_sizes is False
+
+ # Test True (equalized across scenarios)
+ fs2 = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios, scenario_independent_sizes=True)
+ assert fs2.scenario_independent_sizes is True
+
+
+def test_sizes_per_scenario_list():
+ """Test scenario_independent_sizes with list of element labels."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_independent_sizes=['solar->grid', 'battery->grid'],
+ )
+
+ assert fs.scenario_independent_sizes == ['solar->grid', 'battery->grid']
+
+
+def test_flow_rates_per_scenario_default():
+ """Test that scenario_independent_flow_rates defaults to False (flow rates vary by scenario)."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios)
+
+ assert fs.scenario_independent_flow_rates is False
+
+
+def test_flow_rates_per_scenario_bool():
+ """Test scenario_independent_flow_rates with boolean values."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ # Test False (vary per scenario)
+ fs1 = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios, scenario_independent_flow_rates=False)
+ assert fs1.scenario_independent_flow_rates is False
+
+ # Test True (equalized across scenarios)
+ fs2 = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios, scenario_independent_flow_rates=True)
+ assert fs2.scenario_independent_flow_rates is True
+
+
+def test_scenario_parameters_property_setters():
+ """Test that scenario parameters can be changed via property setters."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios)
+
+ # Change scenario_independent_sizes
+ fs.scenario_independent_sizes = True
+ assert fs.scenario_independent_sizes is True
+
+ fs.scenario_independent_sizes = ['component1', 'component2']
+ assert fs.scenario_independent_sizes == ['component1', 'component2']
+
+ # Change scenario_independent_flow_rates
+ fs.scenario_independent_flow_rates = True
+ assert fs.scenario_independent_flow_rates is True
+
+ fs.scenario_independent_flow_rates = ['flow1', 'flow2']
+ assert fs.scenario_independent_flow_rates == ['flow1', 'flow2']
+
+
+def test_scenario_parameters_validation():
+ """Test that scenario parameters are validated correctly."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios)
+
+ # Test invalid type
+ with pytest.raises(TypeError, match='must be bool or list'):
+ fs.scenario_independent_sizes = 'invalid'
+
+ # Test invalid list content
+ with pytest.raises(ValueError, match='must contain only strings'):
+ fs.scenario_independent_sizes = [1, 2, 3]
+
+
+def test_size_equality_constraints():
+ """Test that size equality constraints are created when scenario_independent_sizes=True."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_independent_sizes=True, # Sizes should be equalized
+ scenario_independent_flow_rates=False, # Flow rates can vary
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=fx.InvestParameters(
+ minimum_size=10,
+ maximum_size=100,
+ effects_of_investment_per_size={'cost': 100},
+ ),
+ )
+ ],
+ )
+
+ fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ fs.build_model()
+
+ # Check that size equality constraint exists
+ constraint_names = [str(c) for c in fs.model.constraints]
+ size_constraints = [c for c in constraint_names if 'scenario_independent' in c and 'size' in c]
+
+ assert len(size_constraints) > 0, 'Size equality constraint should exist'
+
+
+def test_flow_rate_equality_constraints():
+ """Test that flow_rate equality constraints are created when scenario_independent_flow_rates=True."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_independent_sizes=False, # Sizes can vary
+ scenario_independent_flow_rates=True, # Flow rates should be equalized
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=fx.InvestParameters(
+ minimum_size=10,
+ maximum_size=100,
+ effects_of_investment_per_size={'cost': 100},
+ ),
+ )
+ ],
+ )
+
+ fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ fs.build_model()
+
+ # Check that flow_rate equality constraint exists
+ constraint_names = [str(c) for c in fs.model.constraints]
+ flow_rate_constraints = [c for c in constraint_names if 'scenario_independent' in c and 'flow_rate' in c]
+
+ assert len(flow_rate_constraints) > 0, 'Flow rate equality constraint should exist'
+
+
+def test_selective_scenario_independence():
+ """Test selective scenario independence with specific element lists."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_independent_sizes=['solar(out)'], # Only solar size is equalized
+ scenario_independent_flow_rates=['demand(in)'], # Only demand flow_rate is equalized
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=fx.InvestParameters(
+ minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
+ ),
+ )
+ ],
+ )
+ sink = fx.Sink(
+ label='demand',
+ inputs=[fx.Flow(label='in', bus='grid', size=50)],
+ )
+
+ fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ fs.build_model()
+
+ constraint_names = [str(c) for c in fs.model.constraints]
+
+ # Solar SHOULD have size constraints (it's in the list, so equalized)
+ solar_size_constraints = [c for c in constraint_names if 'solar(out)|size' in c and 'scenario_independent' in c]
+ assert len(solar_size_constraints) > 0
+
+ # Solar should NOT have flow_rate constraints (not in the list, so varies per scenario)
+ solar_flow_constraints = [
+ c for c in constraint_names if 'solar(out)|flow_rate' in c and 'scenario_independent' in c
+ ]
+ assert len(solar_flow_constraints) == 0
+
+ # Demand should NOT have size constraints (no InvestParameters, size is fixed)
+ demand_size_constraints = [c for c in constraint_names if 'demand(in)|size' in c and 'scenario_independent' in c]
+ assert len(demand_size_constraints) == 0
+
+ # Demand SHOULD have flow_rate constraints (it's in the list, so equalized)
+ demand_flow_constraints = [
+ c for c in constraint_names if 'demand(in)|flow_rate' in c and 'scenario_independent' in c
+ ]
+ assert len(demand_flow_constraints) > 0
+
+
+def test_scenario_parameters_io_persistence():
+ """Test that scenario_independent_sizes and scenario_independent_flow_rates persist through IO operations."""
+
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ # Create FlowSystem with custom scenario parameters
+ fs_original = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_independent_sizes=['solar(out)'],
+ scenario_independent_flow_rates=True,
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=fx.InvestParameters(
+ minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
+ ),
+ )
+ ],
+ )
+
+ fs_original.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ # Save to dataset
+ fs_original.connect_and_transform()
+ ds = fs_original.to_dataset()
+
+ # Load from dataset
+ fs_loaded = fx.FlowSystem.from_dataset(ds)
+
+ # Verify parameters persisted
+ assert fs_loaded.scenario_independent_sizes == fs_original.scenario_independent_sizes
+ assert fs_loaded.scenario_independent_flow_rates == fs_original.scenario_independent_flow_rates
+
+
+def test_scenario_parameters_io_with_calculation(tmp_path):
+ """Test that scenario parameters persist through full calculation IO."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'high'], name='scenario')
+
+ fs = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_independent_sizes=True,
+ scenario_independent_flow_rates=['demand(in)'],
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=fx.InvestParameters(
+ minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
+ ),
+ )
+ ],
+ )
+ sink = fx.Sink(
+ label='demand',
+ inputs=[fx.Flow(label='in', bus='grid', size=50)],
+ )
+
+ fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ # Solve using new API
+ fs.optimize(fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=60))
+ original_model = fs.model
+
+ # Save and restore
+ filepath = tmp_path / 'flow_system_scenarios.nc4'
+ fs.to_netcdf(filepath)
+ fs_loaded = fx.FlowSystem.from_netcdf(filepath)
+
+ # Verify parameters persisted
+ assert fs_loaded.scenario_independent_sizes == fs.scenario_independent_sizes
+ assert fs_loaded.scenario_independent_flow_rates == fs.scenario_independent_flow_rates
+
+ # Verify constraints are recreated correctly when building model
+ fs_loaded.build_model()
+
+ constraint_names1 = [str(c) for c in original_model.constraints]
+ constraint_names2 = [str(c) for c in fs_loaded.model.constraints]
+
+ size_constraints1 = [c for c in constraint_names1 if 'scenario_independent' in c and 'size' in c]
+ size_constraints2 = [c for c in constraint_names2 if 'scenario_independent' in c and 'size' in c]
+
+ assert len(size_constraints1) == len(size_constraints2)
+
+
+def test_weights_io_persistence():
+ """Test that weights persist through IO operations (to_dataset/from_dataset)."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'mid', 'high'], name='scenario')
+ custom_scenario_weights = np.array([0.3, 0.5, 0.2])
+
+ # Create FlowSystem with custom scenario weights
+ fs_original = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_weights=custom_scenario_weights,
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=fx.InvestParameters(
+ minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
+ ),
+ )
+ ],
+ )
+
+ fs_original.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ # Save to dataset
+ fs_original.connect_and_transform()
+ ds = fs_original.to_dataset()
+
+ # Load from dataset
+ fs_loaded = fx.FlowSystem.from_dataset(ds)
+
+ # Verify weights persisted correctly
+ np.testing.assert_allclose(fs_loaded.scenario_weights.values, fs_original.scenario_weights.values)
+ assert fs_loaded.scenario_weights.dims == fs_original.scenario_weights.dims
+
+
+def test_weights_selection():
+ """Test that weights are correctly sliced when using FlowSystem.sel()."""
+ timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
+ scenarios = pd.Index(['base', 'mid', 'high'], name='scenario')
+ custom_scenario_weights = np.array([0.3, 0.5, 0.2])
+
+ # Create FlowSystem with custom scenario weights
+ fs_full = fx.FlowSystem(
+ timesteps=timesteps,
+ scenarios=scenarios,
+ scenario_weights=custom_scenario_weights,
+ )
+
+ bus = fx.Bus('grid')
+ source = fx.Source(
+ label='solar',
+ outputs=[
+ fx.Flow(
+ label='out',
+ bus='grid',
+ size=10,
+ )
+ ],
+ )
+
+ fs_full.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+
+ # Select a subset of scenarios
+ fs_subset = fs_full.sel(scenario=['base', 'high'])
+
+ # Verify weights are correctly sliced
+ assert fs_subset.scenarios.equals(pd.Index(['base', 'high'], name='scenario'))
+ np.testing.assert_allclose(fs_subset.scenario_weights.values, custom_scenario_weights[[0, 2]])
+
+ # Verify weights are 1D with just scenario dimension (no period dimension)
+ assert fs_subset.scenario_weights.dims == ('scenario',)
diff --git a/tests/deprecated/test_storage.py b/tests/deprecated/test_storage.py
new file mode 100644
index 000000000..15170a321
--- /dev/null
+++ b/tests/deprecated/test_storage.py
@@ -0,0 +1,490 @@
+import numpy as np
+import pytest
+
+import flixopt as fx
+
+from .conftest import assert_conequal, assert_var_equal, create_linopy_model
+
+
+class TestStorageModel:
+ """Test that storage model variables and constraints are correctly generated."""
+
+ def test_basic_storage(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that basic storage model variables and constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create a simple storage
+ storage = fx.Storage(
+ 'TestStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=30, # 30 kWh storage capacity
+ initial_charge_state=0, # Start empty
+ prevent_simultaneous_charge_and_discharge=True,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check that all expected variables exist - linopy model variables are accessed by indexing
+ expected_variables = {
+ 'TestStorage(Q_th_in)|flow_rate',
+ 'TestStorage(Q_th_in)|total_flow_hours',
+ 'TestStorage(Q_th_out)|flow_rate',
+ 'TestStorage(Q_th_out)|total_flow_hours',
+ 'TestStorage|charge_state',
+ 'TestStorage|netto_discharge',
+ }
+ for var_name in expected_variables:
+ assert var_name in model.variables, f'Missing variable: {var_name}'
+
+ # Check that all expected constraints exist - linopy model constraints are accessed by indexing
+ expected_constraints = {
+ 'TestStorage(Q_th_in)|total_flow_hours',
+ 'TestStorage(Q_th_out)|total_flow_hours',
+ 'TestStorage|netto_discharge',
+ 'TestStorage|charge_state',
+ 'TestStorage|initial_charge_state',
+ }
+ for con_name in expected_constraints:
+ assert con_name in model.constraints, f'Missing constraint: {con_name}'
+
+ # Check variable properties
+ assert_var_equal(
+ model['TestStorage(Q_th_in)|flow_rate'], model.add_variables(lower=0, upper=20, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model['TestStorage(Q_th_out)|flow_rate'], model.add_variables(lower=0, upper=20, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model['TestStorage|charge_state'],
+ model.add_variables(lower=0, upper=30, coords=model.get_coords(extra_timestep=True)),
+ )
+
+ # Check constraint formulations
+ assert_conequal(
+ model.constraints['TestStorage|netto_discharge'],
+ model.variables['TestStorage|netto_discharge']
+ == model.variables['TestStorage(Q_th_out)|flow_rate'] - model.variables['TestStorage(Q_th_in)|flow_rate'],
+ )
+
+ charge_state = model.variables['TestStorage|charge_state']
+ assert_conequal(
+ model.constraints['TestStorage|charge_state'],
+ charge_state.isel(time=slice(1, None))
+ == charge_state.isel(time=slice(None, -1))
+ + model.variables['TestStorage(Q_th_in)|flow_rate'] * model.hours_per_step
+ - model.variables['TestStorage(Q_th_out)|flow_rate'] * model.hours_per_step,
+ )
+ # Check initial charge state constraint
+ assert_conequal(
+ model.constraints['TestStorage|initial_charge_state'],
+ model.variables['TestStorage|charge_state'].isel(time=0) == 0,
+ )
+
+ def test_lossy_storage(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that basic storage model variables and constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create a simple storage
+ storage = fx.Storage(
+ 'TestStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=30, # 30 kWh storage capacity
+ initial_charge_state=0, # Start empty
+ eta_charge=0.9, # Charging efficiency
+ eta_discharge=0.8, # Discharging efficiency
+ relative_loss_per_hour=0.05, # 5% loss per hour
+ prevent_simultaneous_charge_and_discharge=True,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check that all expected variables exist - linopy model variables are accessed by indexing
+ expected_variables = {
+ 'TestStorage(Q_th_in)|flow_rate',
+ 'TestStorage(Q_th_in)|total_flow_hours',
+ 'TestStorage(Q_th_out)|flow_rate',
+ 'TestStorage(Q_th_out)|total_flow_hours',
+ 'TestStorage|charge_state',
+ 'TestStorage|netto_discharge',
+ }
+ for var_name in expected_variables:
+ assert var_name in model.variables, f'Missing variable: {var_name}'
+
+ # Check that all expected constraints exist - linopy model constraints are accessed by indexing
+ expected_constraints = {
+ 'TestStorage(Q_th_in)|total_flow_hours',
+ 'TestStorage(Q_th_out)|total_flow_hours',
+ 'TestStorage|netto_discharge',
+ 'TestStorage|charge_state',
+ 'TestStorage|initial_charge_state',
+ }
+ for con_name in expected_constraints:
+ assert con_name in model.constraints, f'Missing constraint: {con_name}'
+
+ # Check variable properties
+ assert_var_equal(
+ model['TestStorage(Q_th_in)|flow_rate'], model.add_variables(lower=0, upper=20, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model['TestStorage(Q_th_out)|flow_rate'], model.add_variables(lower=0, upper=20, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model['TestStorage|charge_state'],
+ model.add_variables(lower=0, upper=30, coords=model.get_coords(extra_timestep=True)),
+ )
+
+ # Check constraint formulations
+ assert_conequal(
+ model.constraints['TestStorage|netto_discharge'],
+ model.variables['TestStorage|netto_discharge']
+ == model.variables['TestStorage(Q_th_out)|flow_rate'] - model.variables['TestStorage(Q_th_in)|flow_rate'],
+ )
+
+ charge_state = model.variables['TestStorage|charge_state']
+ rel_loss = 0.05
+ hours_per_step = model.hours_per_step
+ charge_rate = model.variables['TestStorage(Q_th_in)|flow_rate']
+ discharge_rate = model.variables['TestStorage(Q_th_out)|flow_rate']
+ eff_charge = 0.9
+ eff_discharge = 0.8
+
+ assert_conequal(
+ model.constraints['TestStorage|charge_state'],
+ charge_state.isel(time=slice(1, None))
+ == charge_state.isel(time=slice(None, -1)) * (1 - rel_loss) ** hours_per_step
+ + charge_rate * eff_charge * hours_per_step
+ - discharge_rate / eff_discharge * hours_per_step,
+ )
+
+ # Check initial charge state constraint
+ assert_conequal(
+ model.constraints['TestStorage|initial_charge_state'],
+ model.variables['TestStorage|charge_state'].isel(time=0) == 0,
+ )
+
+ def test_charge_state_bounds(self, basic_flow_system_linopy_coords, coords_config):
+ """Test that basic storage model variables and constraints are correctly generated."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create a simple storage
+ storage = fx.Storage(
+ 'TestStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=30, # 30 kWh storage capacity
+ initial_charge_state=3,
+ prevent_simultaneous_charge_and_discharge=True,
+ relative_maximum_charge_state=np.array([0.14, 0.22, 0.3, 0.38, 0.46, 0.54, 0.62, 0.7, 0.78, 0.86]),
+ relative_minimum_charge_state=np.array([0.07, 0.11, 0.15, 0.19, 0.23, 0.27, 0.31, 0.35, 0.39, 0.43]),
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check that all expected variables exist - linopy model variables are accessed by indexing
+ expected_variables = {
+ 'TestStorage(Q_th_in)|flow_rate',
+ 'TestStorage(Q_th_in)|total_flow_hours',
+ 'TestStorage(Q_th_out)|flow_rate',
+ 'TestStorage(Q_th_out)|total_flow_hours',
+ 'TestStorage|charge_state',
+ 'TestStorage|netto_discharge',
+ }
+ for var_name in expected_variables:
+ assert var_name in model.variables, f'Missing variable: {var_name}'
+
+ # Check that all expected constraints exist - linopy model constraints are accessed by indexing
+ expected_constraints = {
+ 'TestStorage(Q_th_in)|total_flow_hours',
+ 'TestStorage(Q_th_out)|total_flow_hours',
+ 'TestStorage|netto_discharge',
+ 'TestStorage|charge_state',
+ 'TestStorage|initial_charge_state',
+ }
+ for con_name in expected_constraints:
+ assert con_name in model.constraints, f'Missing constraint: {con_name}'
+
+ # Check variable properties
+ assert_var_equal(
+ model['TestStorage(Q_th_in)|flow_rate'], model.add_variables(lower=0, upper=20, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model['TestStorage(Q_th_out)|flow_rate'], model.add_variables(lower=0, upper=20, coords=model.get_coords())
+ )
+ assert_var_equal(
+ model['TestStorage|charge_state'],
+ model.add_variables(
+ lower=storage.relative_minimum_charge_state.reindex(
+ time=model.get_coords(extra_timestep=True)['time']
+ ).ffill('time')
+ * 30,
+ upper=storage.relative_maximum_charge_state.reindex(
+ time=model.get_coords(extra_timestep=True)['time']
+ ).ffill('time')
+ * 30,
+ coords=model.get_coords(extra_timestep=True),
+ ),
+ )
+
+ # Check constraint formulations
+ assert_conequal(
+ model.constraints['TestStorage|netto_discharge'],
+ model.variables['TestStorage|netto_discharge']
+ == model.variables['TestStorage(Q_th_out)|flow_rate'] - model.variables['TestStorage(Q_th_in)|flow_rate'],
+ )
+
+ charge_state = model.variables['TestStorage|charge_state']
+ assert_conequal(
+ model.constraints['TestStorage|charge_state'],
+ charge_state.isel(time=slice(1, None))
+ == charge_state.isel(time=slice(None, -1))
+ + model.variables['TestStorage(Q_th_in)|flow_rate'] * model.hours_per_step
+ - model.variables['TestStorage(Q_th_out)|flow_rate'] * model.hours_per_step,
+ )
+ # Check initial charge state constraint
+ assert_conequal(
+ model.constraints['TestStorage|initial_charge_state'],
+ model.variables['TestStorage|charge_state'].isel(time=0) == 3,
+ )
+
+ def test_storage_with_investment(self, basic_flow_system_linopy_coords, coords_config):
+ """Test storage with investment parameters."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create storage with investment parameters
+ storage = fx.Storage(
+ 'InvestStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=fx.InvestParameters(
+ effects_of_investment=100,
+ effects_of_investment_per_size=10,
+ minimum_size=20,
+ maximum_size=100,
+ mandatory=False,
+ ),
+ initial_charge_state=0,
+ eta_charge=0.9,
+ eta_discharge=0.9,
+ relative_loss_per_hour=0.05,
+ prevent_simultaneous_charge_and_discharge=True,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check investment variables exist
+ for var_name in {
+ 'InvestStorage|charge_state',
+ 'InvestStorage|size',
+ 'InvestStorage|invested',
+ }:
+ assert var_name in model.variables, f'Missing investment variable: {var_name}'
+
+ # Check investment constraints exist
+ for con_name in {'InvestStorage|size|ub', 'InvestStorage|size|lb'}:
+ assert con_name in model.constraints, f'Missing investment constraint: {con_name}'
+
+ # Check variable properties
+ assert_var_equal(
+ model['InvestStorage|size'],
+ model.add_variables(lower=0, upper=100, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_var_equal(
+ model['InvestStorage|invested'],
+ model.add_variables(binary=True, coords=model.get_coords(['period', 'scenario'])),
+ )
+ assert_conequal(
+ model.constraints['InvestStorage|size|ub'],
+ model.variables['InvestStorage|size'] <= model.variables['InvestStorage|invested'] * 100,
+ )
+ assert_conequal(
+ model.constraints['InvestStorage|size|lb'],
+ model.variables['InvestStorage|size'] >= model.variables['InvestStorage|invested'] * 20,
+ )
+
+ def test_storage_with_final_state_constraints(self, basic_flow_system_linopy_coords, coords_config):
+ """Test storage with final state constraints."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create storage with final state constraints
+ storage = fx.Storage(
+ 'FinalStateStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=30,
+ initial_charge_state=10, # Start with 10 kWh
+ minimal_final_charge_state=15, # End with at least 15 kWh
+ maximal_final_charge_state=25, # End with at most 25 kWh
+ eta_charge=0.9,
+ eta_discharge=0.9,
+ relative_loss_per_hour=0.05,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check final state constraints exist
+ expected_constraints = {
+ 'FinalStateStorage|final_charge_min',
+ 'FinalStateStorage|final_charge_max',
+ }
+
+ for con_name in expected_constraints:
+ assert con_name in model.constraints, f'Missing final state constraint: {con_name}'
+
+ assert_conequal(
+ model.constraints['FinalStateStorage|initial_charge_state'],
+ model.variables['FinalStateStorage|charge_state'].isel(time=0) == 10,
+ )
+
+ # Check final state constraint formulations
+ assert_conequal(
+ model.constraints['FinalStateStorage|final_charge_min'],
+ model.variables['FinalStateStorage|charge_state'].isel(time=-1) >= 15,
+ )
+ assert_conequal(
+ model.constraints['FinalStateStorage|final_charge_max'],
+ model.variables['FinalStateStorage|charge_state'].isel(time=-1) <= 25,
+ )
+
+ def test_storage_cyclic_initialization(self, basic_flow_system_linopy_coords, coords_config):
+ """Test storage with cyclic initialization."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create storage with cyclic initialization
+ storage = fx.Storage(
+ 'CyclicStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=30,
+ initial_charge_state='equals_final', # Cyclic initialization
+ eta_charge=0.9,
+ eta_discharge=0.9,
+ relative_loss_per_hour=0.05,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check cyclic constraint exists
+ assert 'CyclicStorage|initial_charge_state' in model.constraints, 'Missing cyclic initialization constraint'
+
+ # Check cyclic constraint formulation
+ assert_conequal(
+ model.constraints['CyclicStorage|initial_charge_state'],
+ model.variables['CyclicStorage|charge_state'].isel(time=0)
+ == model.variables['CyclicStorage|charge_state'].isel(time=-1),
+ )
+
+ @pytest.mark.parametrize(
+ 'prevent_simultaneous',
+ [True, False],
+ )
+ def test_simultaneous_charge_discharge(self, basic_flow_system_linopy_coords, coords_config, prevent_simultaneous):
+ """Test prevent_simultaneous_charge_and_discharge parameter."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create storage with or without simultaneous charge/discharge prevention
+ storage = fx.Storage(
+ 'SimultaneousStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=30,
+ initial_charge_state=0,
+ eta_charge=0.9,
+ eta_discharge=0.9,
+ relative_loss_per_hour=0.05,
+ prevent_simultaneous_charge_and_discharge=prevent_simultaneous,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Binary variables should exist when preventing simultaneous operation
+ if prevent_simultaneous:
+ binary_vars = {
+ 'SimultaneousStorage(Q_th_in)|status',
+ 'SimultaneousStorage(Q_th_out)|status',
+ }
+ for var_name in binary_vars:
+ assert var_name in model.variables, f'Missing binary variable: {var_name}'
+
+ # Check for constraints that enforce either charging or discharging
+ constraint_name = 'SimultaneousStorage|prevent_simultaneous_use'
+ assert constraint_name in model.constraints, 'Missing constraint to prevent simultaneous operation'
+
+ assert_conequal(
+ model.constraints['SimultaneousStorage|prevent_simultaneous_use'],
+ model.variables['SimultaneousStorage(Q_th_in)|status']
+ + model.variables['SimultaneousStorage(Q_th_out)|status']
+ <= 1,
+ )
+
+ @pytest.mark.parametrize(
+ 'mandatory,minimum_size,expected_vars,expected_constraints',
+ [
+ (False, None, {'InvestStorage|invested'}, {'InvestStorage|size|lb'}),
+ (False, 20, {'InvestStorage|invested'}, {'InvestStorage|size|lb'}),
+ (True, None, set(), set()),
+ (True, 20, set(), set()),
+ ],
+ )
+ def test_investment_parameters(
+ self,
+ basic_flow_system_linopy_coords,
+ coords_config,
+ mandatory,
+ minimum_size,
+ expected_vars,
+ expected_constraints,
+ ):
+ """Test different investment parameter combinations."""
+ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+
+ # Create investment parameters
+ invest_params = {
+ 'effects_of_investment': 100,
+ 'effects_of_investment_per_size': 10,
+ 'mandatory': mandatory,
+ 'maximum_size': 100,
+ }
+ if minimum_size is not None:
+ invest_params['minimum_size'] = minimum_size
+
+ # Create storage with specified investment parameters
+ storage = fx.Storage(
+ 'InvestStorage',
+ charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
+ discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ capacity_in_flow_hours=fx.InvestParameters(**invest_params),
+ initial_charge_state=0,
+ eta_charge=0.9,
+ eta_discharge=0.9,
+ relative_loss_per_hour=0.05,
+ )
+
+ flow_system.add_elements(storage)
+ model = create_linopy_model(flow_system)
+
+ # Check that expected variables exist
+ for var_name in expected_vars:
+ if not mandatory: # Optional investment (mandatory=False)
+ assert var_name in model.variables, f'Expected variable {var_name} not found'
+
+ # Check that expected constraints exist
+ for constraint_name in expected_constraints:
+ if not mandatory: # Optional investment (mandatory=False)
+ assert constraint_name in model.constraints, f'Expected constraint {constraint_name} not found'
+
+ # If mandatory is True, invested should be fixed to 1
+ if mandatory:
+ # Check that the invested variable exists and is fixed to 1
+ if 'InvestStorage|invested' in model.variables:
+ var = model.variables['InvestStorage|invested']
+ # Check if the lower and upper bounds are both 1
+ assert var.upper == 1 and var.lower == 1, 'invested variable should be fixed to 1 when mandatory=True'
diff --git a/tests/deprecated/test_timeseries.py b/tests/deprecated/test_timeseries.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/ressources/v4-api/00_minimal--flow_system.nc4 b/tests/ressources/v4-api/00_minimal--flow_system.nc4
new file mode 100644
index 000000000..41755e930
Binary files /dev/null and b/tests/ressources/v4-api/00_minimal--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/00_minimal--model_documentation.yaml b/tests/ressources/v4-api/00_minimal--model_documentation.yaml
new file mode 100644
index 000000000..f398d484d
--- /dev/null
+++ b/tests/ressources/v4-api/00_minimal--model_documentation.yaml
@@ -0,0 +1,213 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 Costs + 1 Penalty
+ Sense: min
+ Value: 4.0
+termination_condition: optimal
+status: ok
+nvars: 40
+nvarsbin: 0
+nvarscont: 40
+ncons: 25
+variables:
+ Costs(periodic): |-
+ Variable
+ --------
+ Costs(periodic) ∈ [-inf, inf]
+ Costs(temporal): |-
+ Variable
+ --------
+ Costs(temporal) ∈ [-inf, inf]
+ "Costs(temporal)|per_timestep": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Costs(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ Costs: |-
+ Variable
+ --------
+ Costs ∈ [-inf, inf]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "Boiler(Gas)|flow_rate": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Boiler(Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Boiler(Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ "Boiler(Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Boiler(Gas)|total_flow_hours ∈ [0, inf]
+ "Boiler(Heat)|flow_rate": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Heat)|flow_rate[2020-01-01 00:00:00] ∈ [0, 50]
+ [2020-01-01 01:00:00]: Boiler(Heat)|flow_rate[2020-01-01 01:00:00] ∈ [0, 50]
+ [2020-01-01 02:00:00]: Boiler(Heat)|flow_rate[2020-01-01 02:00:00] ∈ [0, 50]
+ "Boiler(Heat)|total_flow_hours": |-
+ Variable
+ --------
+ Boiler(Heat)|total_flow_hours ∈ [0, inf]
+ "Sink(Demand)|flow_rate": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Sink(Demand)|flow_rate[2020-01-01 00:00:00] ∈ [30, 30]
+ [2020-01-01 01:00:00]: Sink(Demand)|flow_rate[2020-01-01 01:00:00] ∈ [0, 0]
+ [2020-01-01 02:00:00]: Sink(Demand)|flow_rate[2020-01-01 02:00:00] ∈ [20, 20]
+ "Sink(Demand)|total_flow_hours": |-
+ Variable
+ --------
+ Sink(Demand)|total_flow_hours ∈ [0, inf]
+ "Source(Gas)|flow_rate": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Source(Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Source(Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Source(Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ "Source(Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Source(Gas)|total_flow_hours ∈ [0, inf]
+ "Source(Gas)->Costs(temporal)": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Source(Gas)->Costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Source(Gas)->Costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Source(Gas)->Costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ "Heat|excess_input": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Heat|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Heat|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Heat|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ "Heat|excess_output": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Heat|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Heat|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Heat|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ "Heat->Penalty": |-
+ Variable
+ --------
+ Heat->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 3)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ Costs(periodic): |-
+ Constraint `Costs(periodic)`
+ ----------------------------
+ +1 Costs(periodic) = -0.0
+ Costs(temporal): |-
+ Constraint `Costs(temporal)`
+ ----------------------------
+ +1 Costs(temporal) - 1 Costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Costs(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ "Costs(temporal)|per_timestep": |-
+ Constraint `Costs(temporal)|per_timestep`
+ [time: 3]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Source(Gas)->Costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Source(Gas)->Costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Costs(temporal)|per_timestep[2020-01-01 02:00:00] - 1 Source(Gas)->Costs(temporal)[2020-01-01 02:00:00] = -0.0
+ Costs: |-
+ Constraint `Costs`
+ ------------------
+ +1 Costs - 1 Costs(temporal) - 1 Costs(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Heat->Penalty - 1 Gas->Penalty = -0.0
+ "Boiler(Gas)|total_flow_hours": |-
+ Constraint `Boiler(Gas)|total_flow_hours`
+ -----------------------------------------
+ +1 Boiler(Gas)|total_flow_hours - 1 Boiler(Gas)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Gas)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ "Boiler(Heat)|total_flow_hours": |-
+ Constraint `Boiler(Heat)|total_flow_hours`
+ ------------------------------------------
+ +1 Boiler(Heat)|total_flow_hours - 1 Boiler(Heat)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Heat)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Heat)|flow_rate[2020-01-01 02:00:00] = -0.0
+ "Boiler|conversion_0": |-
+ Constraint `Boiler|conversion_0`
+ [time: 3]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 Boiler(Gas)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Heat)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 Boiler(Gas)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Heat)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 Boiler(Gas)|flow_rate[2020-01-01 02:00:00] - 1 Boiler(Heat)|flow_rate[2020-01-01 02:00:00] = -0.0
+ "Sink(Demand)|total_flow_hours": |-
+ Constraint `Sink(Demand)|total_flow_hours`
+ ------------------------------------------
+ +1 Sink(Demand)|total_flow_hours - 1 Sink(Demand)|flow_rate[2020-01-01 00:00:00] - 1 Sink(Demand)|flow_rate[2020-01-01 01:00:00] - 1 Sink(Demand)|flow_rate[2020-01-01 02:00:00] = -0.0
+ "Source(Gas)|total_flow_hours": |-
+ Constraint `Source(Gas)|total_flow_hours`
+ -----------------------------------------
+ +1 Source(Gas)|total_flow_hours - 1 Source(Gas)|flow_rate[2020-01-01 00:00:00] - 1 Source(Gas)|flow_rate[2020-01-01 01:00:00] - 1 Source(Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ "Source(Gas)->Costs(temporal)": |-
+ Constraint `Source(Gas)->Costs(temporal)`
+ [time: 3]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Source(Gas)->Costs(temporal)[2020-01-01 00:00:00] - 0.04 Source(Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Source(Gas)->Costs(temporal)[2020-01-01 01:00:00] - 0.04 Source(Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Source(Gas)->Costs(temporal)[2020-01-01 02:00:00] - 0.04 Source(Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ "Heat|balance": |-
+ Constraint `Heat|balance`
+ [time: 3]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Boiler(Heat)|flow_rate[2020-01-01 00:00:00] - 1 Sink(Demand)|flow_rate[2020-01-01 00:00:00] + 1 Heat|excess_input[2020-01-01 00:00:00] - 1 Heat|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Boiler(Heat)|flow_rate[2020-01-01 01:00:00] - 1 Sink(Demand)|flow_rate[2020-01-01 01:00:00] + 1 Heat|excess_input[2020-01-01 01:00:00] - 1 Heat|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Boiler(Heat)|flow_rate[2020-01-01 02:00:00] - 1 Sink(Demand)|flow_rate[2020-01-01 02:00:00] + 1 Heat|excess_input[2020-01-01 02:00:00] - 1 Heat|excess_output[2020-01-01 02:00:00] = -0.0
+ "Heat->Penalty": |-
+ Constraint `Heat->Penalty`
+ --------------------------
+ +1 Heat->Penalty - 1e+05 Heat|excess_input[2020-01-01 00:00:00] - 1e+05 Heat|excess_input[2020-01-01 01:00:00]... -1e+05 Heat|excess_output[2020-01-01 00:00:00] - 1e+05 Heat|excess_output[2020-01-01 01:00:00] - 1e+05 Heat|excess_output[2020-01-01 02:00:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 3]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 Source(Gas)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Gas)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Source(Gas)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Gas)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Source(Gas)|flow_rate[2020-01-01 02:00:00] - 1 Boiler(Gas)|flow_rate[2020-01-01 02:00:00] + 1 Gas|excess_input[2020-01-01 02:00:00] - 1 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00] - 1e+05 Gas|excess_input[2020-01-01 01:00:00]... -1e+05 Gas|excess_output[2020-01-01 00:00:00] - 1e+05 Gas|excess_output[2020-01-01 01:00:00] - 1e+05 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+binaries: []
+integers: []
+continuous:
+ - Costs(periodic)
+ - Costs(temporal)
+ - "Costs(temporal)|per_timestep"
+ - Costs
+ - Penalty
+ - "Boiler(Gas)|flow_rate"
+ - "Boiler(Gas)|total_flow_hours"
+ - "Boiler(Heat)|flow_rate"
+ - "Boiler(Heat)|total_flow_hours"
+ - "Sink(Demand)|flow_rate"
+ - "Sink(Demand)|total_flow_hours"
+ - "Source(Gas)|flow_rate"
+ - "Source(Gas)|total_flow_hours"
+ - "Source(Gas)->Costs(temporal)"
+ - "Heat|excess_input"
+ - "Heat|excess_output"
+ - "Heat->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/00_minimal--solution.nc4 b/tests/ressources/v4-api/00_minimal--solution.nc4
new file mode 100644
index 000000000..86f94e3b5
Binary files /dev/null and b/tests/ressources/v4-api/00_minimal--solution.nc4 differ
diff --git a/tests/ressources/v4-api/00_minimal--summary.yaml b/tests/ressources/v4-api/00_minimal--summary.yaml
new file mode 100644
index 000000000..598c501ed
--- /dev/null
+++ b/tests/ressources/v4-api/00_minimal--summary.yaml
@@ -0,0 +1,46 @@
+Name: 00_minimal
+Number of timesteps: 3
+Calculation Type: FullCalculation
+Constraints: 25
+Variables: 40
+Main Results:
+ Objective: 4.0
+ Penalty: -0.0
+ Effects:
+ Costs [€]:
+ temporal: 4.0
+ periodic: -0.0
+ total: 4.0
+ Invest-Decisions:
+ Invested: {}
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.39
+ solving: 0.17
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/01_simple--flow_system.nc4 b/tests/ressources/v4-api/01_simple--flow_system.nc4
new file mode 100644
index 000000000..ccc271a0e
Binary files /dev/null and b/tests/ressources/v4-api/01_simple--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/01_simple--model_documentation.yaml b/tests/ressources/v4-api/01_simple--model_documentation.yaml
new file mode 100644
index 000000000..947ddea6f
--- /dev/null
+++ b/tests/ressources/v4-api/01_simple--model_documentation.yaml
@@ -0,0 +1,848 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 costs + 1 Penalty
+ Sense: min
+ Value: 83.88394666666667
+termination_condition: optimal
+status: ok
+nvars: 259
+nvarsbin: 18
+nvarscont: 241
+ncons: 215
+variables:
+ costs(periodic): |-
+ Variable
+ --------
+ costs(periodic) ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable
+ --------
+ costs(temporal) ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: costs(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: costs(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: costs(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: costs(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: costs(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: costs(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: costs(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ costs: |-
+ Variable
+ --------
+ costs ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable
+ --------
+ CO2(periodic) ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable
+ --------
+ CO2(temporal) ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, 1000]
+ [2020-01-01 01:00:00]: CO2(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, 1000]
+ [2020-01-01 02:00:00]: CO2(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, 1000]
+ [2020-01-01 03:00:00]: CO2(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, 1000]
+ [2020-01-01 04:00:00]: CO2(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, 1000]
+ [2020-01-01 05:00:00]: CO2(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, 1000]
+ [2020-01-01 06:00:00]: CO2(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, 1000]
+ [2020-01-01 07:00:00]: CO2(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, 1000]
+ [2020-01-01 08:00:00]: CO2(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, 1000]
+ CO2: |-
+ Variable
+ --------
+ CO2 ∈ [-inf, inf]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "CO2(temporal)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Boiler(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ Boiler(Q_fu)|total_flow_hours ∈ [0, inf]
+ "Boiler(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [5, 50]
+ [2020-01-01 01:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [5, 50]
+ [2020-01-01 02:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [5, 50]
+ [2020-01-01 03:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [5, 50]
+ [2020-01-01 04:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [5, 50]
+ [2020-01-01 05:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [5, 50]
+ [2020-01-01 06:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [5, 50]
+ [2020-01-01 07:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [5, 50]
+ [2020-01-01 08:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [5, 50]
+ "Boiler(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ Boiler(Q_th)|total_flow_hours ∈ [0, inf]
+ "Storage(Q_th_load)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Storage(Q_th_load)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Storage(Q_th_load)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Storage(Q_th_load)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Storage(Q_th_load)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Storage(Q_th_load)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Storage(Q_th_load)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Storage(Q_th_load)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Storage(Q_th_load)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Storage(Q_th_load)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Storage(Q_th_load)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Storage(Q_th_load)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Storage(Q_th_load)|on_hours_total": |-
+ Variable
+ --------
+ Storage(Q_th_load)|on_hours_total ∈ [0, inf]
+ "Storage(Q_th_load)|total_flow_hours": |-
+ Variable
+ --------
+ Storage(Q_th_load)|total_flow_hours ∈ [0, inf]
+ "Storage(Q_th_unload)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Storage(Q_th_unload)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Storage(Q_th_unload)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Storage(Q_th_unload)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Storage(Q_th_unload)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Storage(Q_th_unload)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Storage(Q_th_unload)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Storage(Q_th_unload)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Storage(Q_th_unload)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Storage(Q_th_unload)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Storage(Q_th_unload)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Storage(Q_th_unload)|on_hours_total": |-
+ Variable
+ --------
+ Storage(Q_th_unload)|on_hours_total ∈ [0, inf]
+ "Storage(Q_th_unload)|total_flow_hours": |-
+ Variable
+ --------
+ Storage(Q_th_unload)|total_flow_hours ∈ [0, inf]
+ "Storage|charge_state": |-
+ Variable (time: 10)
+ -------------------
+ [2020-01-01 00:00:00]: Storage|charge_state[2020-01-01 00:00:00] ∈ [0, 8e+06]
+ [2020-01-01 01:00:00]: Storage|charge_state[2020-01-01 01:00:00] ∈ [0, 7e+06]
+ [2020-01-01 02:00:00]: Storage|charge_state[2020-01-01 02:00:00] ∈ [0, 8e+06]
+ [2020-01-01 03:00:00]: Storage|charge_state[2020-01-01 03:00:00] ∈ [0, 8e+06]
+ [2020-01-01 04:00:00]: Storage|charge_state[2020-01-01 04:00:00] ∈ [0, 8e+06]
+ [2020-01-01 05:00:00]: Storage|charge_state[2020-01-01 05:00:00] ∈ [0, 8e+06]
+ [2020-01-01 06:00:00]: Storage|charge_state[2020-01-01 06:00:00] ∈ [0, 8e+06]
+ [2020-01-01 07:00:00]: Storage|charge_state[2020-01-01 07:00:00] ∈ [0, 8e+06]
+ [2020-01-01 08:00:00]: Storage|charge_state[2020-01-01 08:00:00] ∈ [0, 8e+06]
+ [2020-01-01 09:00:00]: Storage|charge_state[2020-01-01 09:00:00] ∈ [0, 8e+06]
+ "Storage|netto_discharge": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Storage|netto_discharge[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Storage|netto_discharge[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Storage|netto_discharge[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Storage|netto_discharge[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Storage|netto_discharge[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Storage|netto_discharge[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Storage|netto_discharge[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Storage|netto_discharge[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Storage|netto_discharge[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Storage|size": |-
+ Variable
+ --------
+ Storage|size ∈ [30, 30]
+ "Storage->costs(periodic)": |-
+ Variable
+ --------
+ Storage->costs(periodic) ∈ [-inf, inf]
+ "CHP(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: CHP(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "CHP(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ CHP(Q_fu)|total_flow_hours ∈ [0, inf]
+ "CHP(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: CHP(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: CHP(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: CHP(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: CHP(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: CHP(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: CHP(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: CHP(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: CHP(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "CHP(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ CHP(Q_th)|total_flow_hours ∈ [0, inf]
+ "CHP(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [5, 60]
+ [2020-01-01 01:00:00]: CHP(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [5, 60]
+ [2020-01-01 02:00:00]: CHP(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [5, 60]
+ [2020-01-01 03:00:00]: CHP(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [5, 60]
+ [2020-01-01 04:00:00]: CHP(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [5, 60]
+ [2020-01-01 05:00:00]: CHP(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [5, 60]
+ [2020-01-01 06:00:00]: CHP(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [5, 60]
+ [2020-01-01 07:00:00]: CHP(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [5, 60]
+ [2020-01-01 08:00:00]: CHP(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [5, 60]
+ "CHP(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ CHP(P_el)|total_flow_hours ∈ [0, inf]
+ "Heat Demand(Q_th_Last)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 00:00:00] ∈ [30, 30]
+ [2020-01-01 01:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 01:00:00] ∈ [0, 0]
+ [2020-01-01 02:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 02:00:00] ∈ [90, 90]
+ [2020-01-01 03:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 03:00:00] ∈ [110, 110]
+ [2020-01-01 04:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 04:00:00] ∈ [110, 110]
+ [2020-01-01 05:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 05:00:00] ∈ [20, 20]
+ [2020-01-01 06:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 06:00:00] ∈ [20, 20]
+ [2020-01-01 07:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 07:00:00] ∈ [20, 20]
+ [2020-01-01 08:00:00]: Heat Demand(Q_th_Last)|flow_rate[2020-01-01 08:00:00] ∈ [20, 20]
+ "Heat Demand(Q_th_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Heat Demand(Q_th_Last)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Gastarif(Q_Gas)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Einspeisung(P_el)|total_flow_hours ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Strom|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ ----------------------------
+ +1 costs(periodic) - 1 Storage->costs(periodic) = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ ----------------------------
+ +1 costs(temporal) - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 9]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 05:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ costs: |-
+ Constraint `costs`
+ ------------------
+ +1 costs - 1 costs(temporal) - 1 costs(periodic) = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ --------------------------
+ +1 CO2(periodic) = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ --------------------------
+ +1 CO2(temporal) - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ ----------------
+ +1 CO2 - 1 CO2(temporal) - 1 CO2(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty = -0.0
+ "CO2(temporal)->costs(temporal)": |-
+ Constraint `CO2(temporal)->costs(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Constraint `Boiler(Q_fu)|total_flow_hours`
+ ------------------------------------------
+ +1 Boiler(Q_fu)|total_flow_hours - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Boiler(Q_th)|total_flow_hours": |-
+ Constraint `Boiler(Q_th)|total_flow_hours`
+ ------------------------------------------
+ +1 Boiler(Q_th)|total_flow_hours - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Boiler|conversion_0": |-
+ Constraint `Boiler|conversion_0`
+ [time: 9]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Storage(Q_th_load)|on_hours_total": |-
+ Constraint `Storage(Q_th_load)|on_hours_total`
+ ----------------------------------------------
+ +1 Storage(Q_th_load)|on_hours_total - 1 Storage(Q_th_load)|on[2020-01-01 00:00:00] - 1 Storage(Q_th_load)|on[2020-01-01 01:00:00]... -1 Storage(Q_th_load)|on[2020-01-01 06:00:00] - 1 Storage(Q_th_load)|on[2020-01-01 07:00:00] - 1 Storage(Q_th_load)|on[2020-01-01 08:00:00] = -0.0
+ "Storage(Q_th_load)|flow_rate|ub": |-
+ Constraint `Storage(Q_th_load)|flow_rate|ub`
+ [time: 9]:
+ -------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1000 Storage(Q_th_load)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Storage(Q_th_load)|flow_rate|lb": |-
+ Constraint `Storage(Q_th_load)|flow_rate|lb`
+ [time: 9]:
+ -------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Storage(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e-05 Storage(Q_th_load)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Storage(Q_th_load)|total_flow_hours": |-
+ Constraint `Storage(Q_th_load)|total_flow_hours`
+ ------------------------------------------------
+ +1 Storage(Q_th_load)|total_flow_hours - 1 Storage(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1 Storage(Q_th_load)|flow_rate[2020-01-01 01:00:00]... -1 Storage(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1 Storage(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1 Storage(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Storage(Q_th_unload)|on_hours_total": |-
+ Constraint `Storage(Q_th_unload)|on_hours_total`
+ ------------------------------------------------
+ +1 Storage(Q_th_unload)|on_hours_total - 1 Storage(Q_th_unload)|on[2020-01-01 00:00:00] - 1 Storage(Q_th_unload)|on[2020-01-01 01:00:00]... -1 Storage(Q_th_unload)|on[2020-01-01 06:00:00] - 1 Storage(Q_th_unload)|on[2020-01-01 07:00:00] - 1 Storage(Q_th_unload)|on[2020-01-01 08:00:00] = -0.0
+ "Storage(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Storage(Q_th_unload)|flow_rate|ub`
+ [time: 9]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1000 Storage(Q_th_unload)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Storage(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Storage(Q_th_unload)|flow_rate|lb`
+ [time: 9]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e-05 Storage(Q_th_unload)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Storage(Q_th_unload)|total_flow_hours": |-
+ Constraint `Storage(Q_th_unload)|total_flow_hours`
+ --------------------------------------------------
+ +1 Storage(Q_th_unload)|total_flow_hours - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00]... -1 Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Storage|prevent_simultaneous_use": |-
+ Constraint `Storage|prevent_simultaneous_use`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 00:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 01:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 02:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 03:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 04:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 05:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 06:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 07:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Storage(Q_th_load)|on[2020-01-01 08:00:00] + 1 Storage(Q_th_unload)|on[2020-01-01 08:00:00] ≤ 1.0
+ "Storage|netto_discharge": |-
+ Constraint `Storage|netto_discharge`
+ [time: 9]:
+ -----------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage|netto_discharge[2020-01-01 00:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Storage|netto_discharge[2020-01-01 01:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Storage|netto_discharge[2020-01-01 02:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Storage|netto_discharge[2020-01-01 03:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Storage|netto_discharge[2020-01-01 04:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Storage|netto_discharge[2020-01-01 05:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Storage|netto_discharge[2020-01-01 06:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Storage|netto_discharge[2020-01-01 07:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Storage|netto_discharge[2020-01-01 08:00:00] - 1 Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 Storage(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Storage|charge_state": |-
+ Constraint `Storage|charge_state`
+ [time: 9]:
+ --------------------------------------------
+ [2020-01-01 01:00:00]: +1 Storage|charge_state[2020-01-01 01:00:00] - 0.92 Storage|charge_state[2020-01-01 00:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Storage|charge_state[2020-01-01 02:00:00] - 0.92 Storage|charge_state[2020-01-01 01:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Storage|charge_state[2020-01-01 03:00:00] - 0.92 Storage|charge_state[2020-01-01 02:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Storage|charge_state[2020-01-01 04:00:00] - 0.92 Storage|charge_state[2020-01-01 03:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Storage|charge_state[2020-01-01 05:00:00] - 0.92 Storage|charge_state[2020-01-01 04:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Storage|charge_state[2020-01-01 06:00:00] - 0.92 Storage|charge_state[2020-01-01 05:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Storage|charge_state[2020-01-01 07:00:00] - 0.92 Storage|charge_state[2020-01-01 06:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Storage|charge_state[2020-01-01 08:00:00] - 0.92 Storage|charge_state[2020-01-01 07:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 09:00:00]: +1 Storage|charge_state[2020-01-01 09:00:00] - 0.92 Storage|charge_state[2020-01-01 08:00:00] - 0.9 Storage(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Storage->costs(periodic)": |-
+ Constraint `Storage->costs(periodic)`
+ -------------------------------------
+ +1 Storage->costs(periodic) = 20.0
+ "Storage|charge_state|ub": |-
+ Constraint `Storage|charge_state|ub`
+ [time: 10]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage|charge_state[2020-01-01 00:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Storage|charge_state[2020-01-01 01:00:00] - 0.7 Storage|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Storage|charge_state[2020-01-01 02:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Storage|charge_state[2020-01-01 03:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Storage|charge_state[2020-01-01 04:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Storage|charge_state[2020-01-01 05:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Storage|charge_state[2020-01-01 06:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Storage|charge_state[2020-01-01 07:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Storage|charge_state[2020-01-01 08:00:00] - 0.8 Storage|size ≤ -0.0
+ [2020-01-01 09:00:00]: +1 Storage|charge_state[2020-01-01 09:00:00] - 0.8 Storage|size ≤ -0.0
+ "Storage|charge_state|lb": |-
+ Constraint `Storage|charge_state|lb`
+ [time: 10]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Storage|charge_state[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Storage|charge_state[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Storage|charge_state[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Storage|charge_state[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Storage|charge_state[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Storage|charge_state[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Storage|charge_state[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Storage|charge_state[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Storage|charge_state[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 09:00:00]: +1 Storage|charge_state[2020-01-01 09:00:00] ≥ -0.0
+ "Storage|initial_charge_state": |-
+ Constraint `Storage|initial_charge_state`
+ -----------------------------------------
+ +1 Storage|charge_state[2020-01-01 00:00:00] = -0.0
+ "CHP(Q_fu)|total_flow_hours": |-
+ Constraint `CHP(Q_fu)|total_flow_hours`
+ ---------------------------------------
+ +1 CHP(Q_fu)|total_flow_hours - 1 CHP(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 CHP(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP(Q_th)|total_flow_hours": |-
+ Constraint `CHP(Q_th)|total_flow_hours`
+ ---------------------------------------
+ +1 CHP(Q_th)|total_flow_hours - 1 CHP(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 CHP(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP(P_el)|total_flow_hours": |-
+ Constraint `CHP(P_el)|total_flow_hours`
+ ---------------------------------------
+ +1 CHP(P_el)|total_flow_hours - 1 CHP(P_el)|flow_rate[2020-01-01 00:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 01:00:00]... -1 CHP(P_el)|flow_rate[2020-01-01 06:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 07:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP|conversion_0": |-
+ Constraint `CHP|conversion_0`
+ [time: 9]:
+ ----------------------------------------
+ [2020-01-01 00:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 CHP(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 CHP(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP|conversion_1": |-
+ Constraint `CHP|conversion_1`
+ [time: 9]:
+ ----------------------------------------
+ [2020-01-01 00:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.4 CHP(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 CHP(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Heat Demand(Q_th_Last)|total_flow_hours": |-
+ Constraint `Heat Demand(Q_th_Last)|total_flow_hours`
+ ----------------------------------------------------
+ +1 Heat Demand(Q_th_Last)|total_flow_hours - 1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 00:00:00] - 1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 01:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 06:00:00] - 1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 07:00:00] - 1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ ---------------------------------------------
+ +1 Gastarif(Q_Gas)|total_flow_hours - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ -----------------------------------------------
+ +1 Einspeisung(P_el)|total_flow_hours - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 9]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] + 1 Strom|excess_input[2020-01-01 00:00:00] - 1 Strom|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 01:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] + 1 Strom|excess_input[2020-01-01 01:00:00] - 1 Strom|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 02:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] + 1 Strom|excess_input[2020-01-01 02:00:00] - 1 Strom|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 03:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] + 1 Strom|excess_input[2020-01-01 03:00:00] - 1 Strom|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 04:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] + 1 Strom|excess_input[2020-01-01 04:00:00] - 1 Strom|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 05:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] + 1 Strom|excess_input[2020-01-01 05:00:00] - 1 Strom|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] + 1 Strom|excess_input[2020-01-01 06:00:00] - 1 Strom|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] + 1 Strom|excess_input[2020-01-01 07:00:00] - 1 Strom|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CHP(P_el)|flow_rate[2020-01-01 08:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] + 1 Strom|excess_input[2020-01-01 08:00:00] - 1 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 1e+05 Strom|excess_input[2020-01-01 00:00:00] - 1e+05 Strom|excess_input[2020-01-01 01:00:00]... -1e+05 Strom|excess_output[2020-01-01 06:00:00] - 1e+05 Strom|excess_output[2020-01-01 07:00:00] - 1e+05 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 9]:
+ -----------------------------------------
+ [2020-01-01 00:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 00:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 00:00:00] + 1 Fernwärme|excess_input[2020-01-01 00:00:00] - 1 Fernwärme|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 01:00:00] + 1 Fernwärme|excess_input[2020-01-01 01:00:00] - 1 Fernwärme|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 02:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 02:00:00] + 1 Fernwärme|excess_input[2020-01-01 02:00:00] - 1 Fernwärme|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 03:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 03:00:00] + 1 Fernwärme|excess_input[2020-01-01 03:00:00] - 1 Fernwärme|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 04:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 04:00:00] + 1 Fernwärme|excess_input[2020-01-01 04:00:00] - 1 Fernwärme|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 05:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 05:00:00] + 1 Fernwärme|excess_input[2020-01-01 05:00:00] - 1 Fernwärme|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 06:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 06:00:00] + 1 Fernwärme|excess_input[2020-01-01 06:00:00] - 1 Fernwärme|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 07:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 07:00:00] + 1 Fernwärme|excess_input[2020-01-01 07:00:00] - 1 Fernwärme|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] + 1 Storage(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 CHP(Q_th)|flow_rate[2020-01-01 08:00:00]... -1 Heat Demand(Q_th_Last)|flow_rate[2020-01-01 08:00:00] + 1 Fernwärme|excess_input[2020-01-01 08:00:00] - 1 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00] - 1e+05 Fernwärme|excess_input[2020-01-01 01:00:00]... -1e+05 Fernwärme|excess_output[2020-01-01 06:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 07:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 02:00:00] + 1 Gas|excess_input[2020-01-01 02:00:00] - 1 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 03:00:00] + 1 Gas|excess_input[2020-01-01 03:00:00] - 1 Gas|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 04:00:00] + 1 Gas|excess_input[2020-01-01 04:00:00] - 1 Gas|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 05:00:00] + 1 Gas|excess_input[2020-01-01 05:00:00] - 1 Gas|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 06:00:00] + 1 Gas|excess_input[2020-01-01 06:00:00] - 1 Gas|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 07:00:00] + 1 Gas|excess_input[2020-01-01 07:00:00] - 1 Gas|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 CHP(Q_fu)|flow_rate[2020-01-01 08:00:00] + 1 Gas|excess_input[2020-01-01 08:00:00] - 1 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00] - 1e+05 Gas|excess_input[2020-01-01 01:00:00]... -1e+05 Gas|excess_output[2020-01-01 06:00:00] - 1e+05 Gas|excess_output[2020-01-01 07:00:00] - 1e+05 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+binaries:
+ - "Storage(Q_th_load)|on"
+ - "Storage(Q_th_unload)|on"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - Penalty
+ - "CO2(temporal)->costs(temporal)"
+ - "Boiler(Q_fu)|flow_rate"
+ - "Boiler(Q_fu)|total_flow_hours"
+ - "Boiler(Q_th)|flow_rate"
+ - "Boiler(Q_th)|total_flow_hours"
+ - "Storage(Q_th_load)|flow_rate"
+ - "Storage(Q_th_load)|on_hours_total"
+ - "Storage(Q_th_load)|total_flow_hours"
+ - "Storage(Q_th_unload)|flow_rate"
+ - "Storage(Q_th_unload)|on_hours_total"
+ - "Storage(Q_th_unload)|total_flow_hours"
+ - "Storage|charge_state"
+ - "Storage|netto_discharge"
+ - "Storage|size"
+ - "Storage->costs(periodic)"
+ - "CHP(Q_fu)|flow_rate"
+ - "CHP(Q_fu)|total_flow_hours"
+ - "CHP(Q_th)|flow_rate"
+ - "CHP(Q_th)|total_flow_hours"
+ - "CHP(P_el)|flow_rate"
+ - "CHP(P_el)|total_flow_hours"
+ - "Heat Demand(Q_th_Last)|flow_rate"
+ - "Heat Demand(Q_th_Last)|total_flow_hours"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/01_simple--solution.nc4 b/tests/ressources/v4-api/01_simple--solution.nc4
new file mode 100644
index 000000000..4af34e23d
Binary files /dev/null and b/tests/ressources/v4-api/01_simple--solution.nc4 differ
diff --git a/tests/ressources/v4-api/01_simple--summary.yaml b/tests/ressources/v4-api/01_simple--summary.yaml
new file mode 100644
index 000000000..87984de57
--- /dev/null
+++ b/tests/ressources/v4-api/01_simple--summary.yaml
@@ -0,0 +1,51 @@
+Name: 01_simple
+Number of timesteps: 9
+Calculation Type: FullCalculation
+Constraints: 215
+Variables: 259
+Main Results:
+ Objective: 83.88
+ Penalty: 0.0
+ Effects:
+ CO2 [kg]:
+ temporal: 255.33
+ periodic: -0.0
+ total: 255.33
+ costs [€]:
+ temporal: 63.88
+ periodic: 20.0
+ total: 83.88
+ Invest-Decisions:
+ Invested:
+ Storage: 30.0
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.65
+ solving: 0.38
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/02_complex--flow_system.nc4 b/tests/ressources/v4-api/02_complex--flow_system.nc4
new file mode 100644
index 000000000..107f10a79
Binary files /dev/null and b/tests/ressources/v4-api/02_complex--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/02_complex--model_documentation.yaml b/tests/ressources/v4-api/02_complex--model_documentation.yaml
new file mode 100644
index 000000000..d77ed31f6
--- /dev/null
+++ b/tests/ressources/v4-api/02_complex--model_documentation.yaml
@@ -0,0 +1,1905 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 costs + 1 Penalty
+ Sense: min
+ Value: -10711.526565761338
+termination_condition: optimal
+status: ok
+nvars: 507
+nvarsbin: 146
+nvarscont: 361
+ncons: 589
+variables:
+ costs(periodic): |-
+ Variable
+ --------
+ costs(periodic) ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable
+ --------
+ costs(temporal) ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: costs(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: costs(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: costs(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: costs(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: costs(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: costs(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: costs(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ costs: |-
+ Variable
+ --------
+ costs ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable
+ --------
+ CO2(periodic) ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable
+ --------
+ CO2(temporal) ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ CO2: |-
+ Variable
+ --------
+ CO2 ∈ [-inf, inf]
+ PE(periodic): |-
+ Variable
+ --------
+ PE(periodic) ∈ [-inf, inf]
+ PE(temporal): |-
+ Variable
+ --------
+ PE(temporal) ∈ [-inf, inf]
+ "PE(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: PE(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: PE(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: PE(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: PE(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: PE(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: PE(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: PE(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: PE(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: PE(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ PE: |-
+ Variable
+ --------
+ PE ∈ [-inf, 3500]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "CO2(temporal)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 200]
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 200]
+ [2020-01-01 02:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 200]
+ [2020-01-01 03:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 200]
+ [2020-01-01 04:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 200]
+ [2020-01-01 05:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 200]
+ [2020-01-01 06:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 200]
+ [2020-01-01 07:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 200]
+ [2020-01-01 08:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 200]
+ "Kessel(Q_fu)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_fu)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_fu)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_fu)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_fu)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_fu)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_fu)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_fu)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_fu)|on_hours_total ∈ [0, inf]
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_fu)|total_flow_hours ∈ [0, inf]
+ "Kessel(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 50]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 50]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 50]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 50]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 50]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 50]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 50]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 50]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 50]
+ "Kessel(Q_th)|size": |-
+ Variable
+ --------
+ Kessel(Q_th)|size ∈ [50, 50]
+ "Kessel(Q_th)->costs(periodic)": |-
+ Variable
+ --------
+ Kessel(Q_th)->costs(periodic) ∈ [-inf, inf]
+ "Kessel(Q_th)->PE(periodic)": |-
+ Variable
+ --------
+ Kessel(Q_th)->PE(periodic) ∈ [-inf, inf]
+ "Kessel(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_th)|on_hours_total ∈ [0, 1000]
+ "Kessel(Q_th)|switch|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|switch|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|switch|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|switch|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|switch|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|switch|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|switch|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|switch|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|switch|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|switch|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|switch|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|switch|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|switch|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|switch|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|switch|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|switch|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|switch|count": |-
+ Variable
+ --------
+ Kessel(Q_th)|switch|count ∈ [0, 1000]
+ "Kessel(Q_th)|consecutive_on_hours": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] ∈ [0, 10]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] ∈ [0, 10]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] ∈ [0, 10]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] ∈ [0, 10]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] ∈ [0, 10]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] ∈ [0, 10]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] ∈ [0, 10]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] ∈ [0, 10]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] ∈ [0, 10]
+ "Kessel(Q_th)|consecutive_off_hours": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] ∈ [0, 10]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] ∈ [0, 10]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] ∈ [0, 10]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] ∈ [0, 10]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] ∈ [0, 10]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] ∈ [0, 10]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] ∈ [0, 10]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] ∈ [0, 10]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] ∈ [0, 10]
+ "Kessel(Q_th)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_th)|total_flow_hours ∈ [0, 1e+06]
+ "Kessel|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel|on_hours_total": |-
+ Variable
+ --------
+ Kessel|on_hours_total ∈ [0, inf]
+ "Kessel->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Wärmelast(Q_th_Last)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] ∈ [30, 30]
+ [2020-01-01 01:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00] ∈ [0, 0]
+ [2020-01-01 02:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00] ∈ [90, 90]
+ [2020-01-01 03:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 03:00:00] ∈ [110, 110]
+ [2020-01-01 04:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 04:00:00] ∈ [110, 110]
+ [2020-01-01 05:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 05:00:00] ∈ [20, 20]
+ [2020-01-01 06:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] ∈ [20, 20]
+ [2020-01-01 07:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] ∈ [20, 20]
+ [2020-01-01 08:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] ∈ [20, 20]
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Wärmelast(Q_th_Last)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Gastarif(Q_Gas)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Einspeisung(P_el)|total_flow_hours ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher(Q_th_load)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_load)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|total_flow_hours ∈ [0, inf]
+ "Speicher(Q_th_unload)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_unload)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|total_flow_hours ∈ [0, inf]
+ "Speicher|charge_state": |-
+ Variable (time: 10)
+ -------------------
+ [2020-01-01 00:00:00]: Speicher|charge_state[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Speicher|charge_state[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Speicher|charge_state[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Speicher|charge_state[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Speicher|charge_state[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Speicher|charge_state[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Speicher|charge_state[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Speicher|charge_state[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Speicher|charge_state[2020-01-01 08:00:00] ∈ [0, 1000]
+ [2020-01-01 09:00:00]: Speicher|charge_state[2020-01-01 09:00:00] ∈ [0, 1000]
+ "Speicher|netto_discharge": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher|netto_discharge[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Speicher|netto_discharge[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Speicher|netto_discharge[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Speicher|netto_discharge[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Speicher|netto_discharge[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Speicher|netto_discharge[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Speicher|netto_discharge[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Speicher|netto_discharge[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Speicher|netto_discharge[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher|size": |-
+ Variable
+ --------
+ Speicher|size ∈ [0, 1000]
+ "Speicher|PiecewiseEffects|costs": |-
+ Variable
+ --------
+ Speicher|PiecewiseEffects|costs ∈ [-inf, inf]
+ "Speicher|PiecewiseEffects|PE": |-
+ Variable
+ --------
+ Speicher|PiecewiseEffects|PE ∈ [-inf, inf]
+ "Speicher|Piece_0|inside_piece": |-
+ Variable
+ --------
+ Speicher|Piece_0|inside_piece ∈ {0, 1}
+ "Speicher|Piece_0|lambda0": |-
+ Variable
+ --------
+ Speicher|Piece_0|lambda0 ∈ [0, 1]
+ "Speicher|Piece_0|lambda1": |-
+ Variable
+ --------
+ Speicher|Piece_0|lambda1 ∈ [0, 1]
+ "Speicher|Piece_1|inside_piece": |-
+ Variable
+ --------
+ Speicher|Piece_1|inside_piece ∈ {0, 1}
+ "Speicher|Piece_1|lambda0": |-
+ Variable
+ --------
+ Speicher|Piece_1|lambda0 ∈ [0, 1]
+ "Speicher|Piece_1|lambda1": |-
+ Variable
+ --------
+ Speicher|Piece_1|lambda1 ∈ [0, 1]
+ "Speicher->costs(periodic)": |-
+ Variable
+ --------
+ Speicher->costs(periodic) ∈ [-inf, inf]
+ "Speicher->PE(periodic)": |-
+ Variable
+ --------
+ Speicher->PE(periodic) ∈ [-inf, inf]
+ "BHKW2(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "BHKW2(Q_fu)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2(Q_fu)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2(Q_fu)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2(Q_fu)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2(Q_fu)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2(Q_fu)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2(Q_fu)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2(Q_fu)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ BHKW2(Q_fu)|on_hours_total ∈ [0, inf]
+ "BHKW2(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ BHKW2(Q_fu)|total_flow_hours ∈ [0, inf]
+ "BHKW2(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 60]
+ [2020-01-01 01:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 60]
+ [2020-01-01 02:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 60]
+ [2020-01-01 03:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 60]
+ [2020-01-01 04:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 60]
+ [2020-01-01 05:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 60]
+ [2020-01-01 06:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 60]
+ [2020-01-01 07:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 60]
+ [2020-01-01 08:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 60]
+ "BHKW2(P_el)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2(P_el)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2(P_el)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2(P_el)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2(P_el)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2(P_el)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2(P_el)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2(P_el)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2(P_el)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2(P_el)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2(P_el)|on_hours_total": |-
+ Variable
+ --------
+ BHKW2(P_el)|on_hours_total ∈ [0, inf]
+ "BHKW2(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ BHKW2(P_el)|total_flow_hours ∈ [0, inf]
+ "BHKW2(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "BHKW2(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ BHKW2(Q_th)|on_hours_total ∈ [0, inf]
+ "BHKW2(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ BHKW2(Q_th)|total_flow_hours ∈ [0, inf]
+ "BHKW2|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2|on_hours_total": |-
+ Variable
+ --------
+ BHKW2|on_hours_total ∈ [0, inf]
+ "BHKW2|switch|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2|switch|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2|switch|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2|switch|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2|switch|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2|switch|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2|switch|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2|switch|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2|switch|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2|switch|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2|switch|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2|switch|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2|switch|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2|switch|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2|switch|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2|switch|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: BHKW2->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: BHKW2->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: BHKW2->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: BHKW2->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: BHKW2->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: BHKW2->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: BHKW2->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: BHKW2->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "BHKW2|Piece_0|inside_piece": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2|Piece_0|inside_piece[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2|Piece_0|lambda0": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: BHKW2|Piece_0|lambda0[2020-01-01 08:00:00] ∈ [0, 1]
+ "BHKW2|Piece_0|lambda1": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: BHKW2|Piece_0|lambda1[2020-01-01 08:00:00] ∈ [0, 1]
+ "BHKW2|Piece_1|inside_piece": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: BHKW2|Piece_1|inside_piece[2020-01-01 08:00:00] ∈ {0, 1}
+ "BHKW2|Piece_1|lambda0": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: BHKW2|Piece_1|lambda0[2020-01-01 08:00:00] ∈ [0, 1]
+ "BHKW2|Piece_1|lambda1": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: BHKW2|Piece_1|lambda1[2020-01-01 08:00:00] ∈ [0, 1]
+ "Strom|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ ----------------------------
+ +1 costs(periodic) - 1 Kessel(Q_th)->costs(periodic) - 1 Speicher->costs(periodic) = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ ----------------------------
+ +1 costs(temporal) - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 9]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 05:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 1 Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00]... -1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ costs: |-
+ Constraint `costs`
+ ------------------
+ +1 costs - 1 costs(temporal) - 1 costs(periodic) = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ --------------------------
+ +1 CO2(periodic) = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ --------------------------
+ +1 CO2(temporal) - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 02:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 03:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 04:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 05:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ ----------------
+ +1 CO2 - 1 CO2(temporal) - 1 CO2(periodic) = -0.0
+ PE(periodic): |-
+ Constraint `PE(periodic)`
+ -------------------------
+ +1 PE(periodic) - 1 Kessel(Q_th)->PE(periodic) - 1 Speicher->PE(periodic) = -0.0
+ PE(temporal): |-
+ Constraint `PE(temporal)`
+ -------------------------
+ +1 PE(temporal) - 1 PE(temporal)|per_timestep[2020-01-01 00:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 01:00:00]... -1 PE(temporal)|per_timestep[2020-01-01 06:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 07:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "PE(temporal)|per_timestep": |-
+ Constraint `PE(temporal)|per_timestep`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ PE: |-
+ Constraint `PE`
+ ---------------
+ +1 PE - 1 PE(temporal) - 1 PE(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty = -0.0
+ "CO2(temporal)->costs(temporal)": |-
+ Constraint `CO2(temporal)->costs(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_fu)|on_hours_total": |-
+ Constraint `Kessel(Q_fu)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_fu)|on_hours_total - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00]... -1 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_fu)|flow_rate|ub": |-
+ Constraint `Kessel(Q_fu)|flow_rate|ub`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_fu)|flow_rate|lb": |-
+ Constraint `Kessel(Q_fu)|flow_rate|lb`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Constraint `Kessel(Q_fu)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_fu)|total_flow_hours - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)->costs(periodic)": |-
+ Constraint `Kessel(Q_th)->costs(periodic)`
+ ------------------------------------------
+ +1 Kessel(Q_th)->costs(periodic) - 10 Kessel(Q_th)|size = 1000.0
+ "Kessel(Q_th)->PE(periodic)": |-
+ Constraint `Kessel(Q_th)->PE(periodic)`
+ ---------------------------------------
+ +1 Kessel(Q_th)->PE(periodic) - 2 Kessel(Q_th)|size = -0.0
+ "Kessel(Q_th)|complementary": |-
+ Constraint `Kessel(Q_th)|complementary`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|off[2020-01-01 00:00:00] = 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|off[2020-01-01 01:00:00] = 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|off[2020-01-01 02:00:00] = 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|off[2020-01-01 03:00:00] = 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|off[2020-01-01 04:00:00] = 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|off[2020-01-01 05:00:00] = 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|off[2020-01-01 06:00:00] = 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|off[2020-01-01 07:00:00] = 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|off[2020-01-01 08:00:00] = 1.0
+ "Kessel(Q_th)|on_hours_total": |-
+ Constraint `Kessel(Q_th)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_th)|on_hours_total - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00]... -1 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|switch|transition": |-
+ Constraint `Kessel(Q_th)|switch|transition`
+ [time: 8]:
+ ------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 08:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|on[2020-01-01 07:00:00] = -0.0
+ "Kessel(Q_th)|switch|initial": |-
+ Constraint `Kessel(Q_th)|switch|initial`
+ ----------------------------------------
+ +1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] = -1.0
+ "Kessel(Q_th)|switch|mutex": |-
+ Constraint `Kessel(Q_th)|switch|mutex`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 08:00:00] ≤ 1.0
+ "Kessel(Q_th)|switch|count": |-
+ Constraint `Kessel(Q_th)|switch|count`
+ --------------------------------------
+ +1 Kessel(Q_th)|switch|count - 1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00]... -1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|consecutive_on_hours|ub": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|ub`
+ [time: 9]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 10 Kessel(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 10 Kessel(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 10 Kessel(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 10 Kessel(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 10 Kessel(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 10 Kessel(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 10 Kessel(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 10 Kessel(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 10 Kessel(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|consecutive_on_hours|forward": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|forward`
+ [time: 8]:
+ -----------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] ≤ 1.0
+ "Kessel(Q_th)|consecutive_on_hours|backward": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|backward`
+ [time: 8]:
+ ------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 10 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -9.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 10 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -9.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 10 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -9.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 10 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -9.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 10 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -9.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 10 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -9.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 10 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -9.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 10 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -9.0
+ "Kessel(Q_th)|consecutive_on_hours|initial": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|initial`
+ ------------------------------------------------------
+ +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 2 Kessel(Q_th)|on[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_th)|consecutive_on_hours|lb": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|lb`
+ [time: 9]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 2 Kessel(Q_th)|on[2020-01-01 05:00:00] + 2 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 2 Kessel(Q_th)|on[2020-01-01 06:00:00] + 2 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 2 Kessel(Q_th)|on[2020-01-01 07:00:00] + 2 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_th)|consecutive_off_hours|ub": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|ub`
+ [time: 9]:
+ -------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 9 Kessel(Q_th)|off[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 9 Kessel(Q_th)|off[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 9 Kessel(Q_th)|off[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 9 Kessel(Q_th)|off[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 9 Kessel(Q_th)|off[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 9 Kessel(Q_th)|off[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 9 Kessel(Q_th)|off[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 9 Kessel(Q_th)|off[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 9 Kessel(Q_th)|off[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|consecutive_off_hours|forward": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|forward`
+ [time: 8]:
+ ------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] ≤ 1.0
+ "Kessel(Q_th)|consecutive_off_hours|backward": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|backward`
+ [time: 8]:
+ -------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 9 Kessel(Q_th)|off[2020-01-01 01:00:00] ≥ -8.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 9 Kessel(Q_th)|off[2020-01-01 02:00:00] ≥ -8.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 9 Kessel(Q_th)|off[2020-01-01 03:00:00] ≥ -8.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 9 Kessel(Q_th)|off[2020-01-01 04:00:00] ≥ -8.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 9 Kessel(Q_th)|off[2020-01-01 05:00:00] ≥ -8.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 9 Kessel(Q_th)|off[2020-01-01 06:00:00] ≥ -8.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 9 Kessel(Q_th)|off[2020-01-01 07:00:00] ≥ -8.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 9 Kessel(Q_th)|off[2020-01-01 08:00:00] ≥ -8.0
+ "Kessel(Q_th)|consecutive_off_hours|initial": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|initial`
+ -------------------------------------------------------
+ +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 1 Kessel(Q_th)|off[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_th)->costs(temporal)": |-
+ Constraint `Kessel(Q_th)->costs(temporal)`
+ [time: 9]:
+ -----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|flow_rate|lb2": |-
+ Constraint `Kessel(Q_th)|flow_rate|lb2`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 5 Kessel(Q_th)|on[2020-01-01 00:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] - 5 Kessel(Q_th)|on[2020-01-01 01:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] - 5 Kessel(Q_th)|on[2020-01-01 02:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] - 5 Kessel(Q_th)|on[2020-01-01 03:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] - 5 Kessel(Q_th)|on[2020-01-01 04:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] - 5 Kessel(Q_th)|on[2020-01-01 05:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 5 Kessel(Q_th)|on[2020-01-01 06:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 5 Kessel(Q_th)|on[2020-01-01 07:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] - 5 Kessel(Q_th)|on[2020-01-01 08:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ "Kessel(Q_th)|flow_rate|ub2": |-
+ Constraint `Kessel(Q_th)|flow_rate|ub2`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ "Kessel(Q_th)|flow_rate|ub1": |-
+ Constraint `Kessel(Q_th)|flow_rate|ub1`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +50 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +50 Kessel(Q_th)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +50 Kessel(Q_th)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +50 Kessel(Q_th)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +50 Kessel(Q_th)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +50 Kessel(Q_th)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +50 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +50 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +50 Kessel(Q_th)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_th)|flow_rate|lb1": |-
+ Constraint `Kessel(Q_th)|flow_rate|lb1`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +5 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +5 Kessel(Q_th)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +5 Kessel(Q_th)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +5 Kessel(Q_th)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +5 Kessel(Q_th)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +5 Kessel(Q_th)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +5 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +5 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +5 Kessel(Q_th)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|total_flow_hours": |-
+ Constraint `Kessel(Q_th)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|load_factor_max": |-
+ Constraint `Kessel(Q_th)|load_factor_max`
+ -----------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 9 Kessel(Q_th)|size ≤ -0.0
+ "Kessel(Q_th)|load_factor_min": |-
+ Constraint `Kessel(Q_th)|load_factor_min`
+ -----------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 0.9 Kessel(Q_th)|size ≥ -0.0
+ "Kessel|on|ub": |-
+ Constraint `Kessel|on|ub`
+ [time: 9]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 Kessel|on[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 02:00:00]: +1 Kessel|on[2020-01-01 02:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] ≤ 1e-05
+ [2020-01-01 03:00:00]: +1 Kessel|on[2020-01-01 03:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] ≤ 1e-05
+ [2020-01-01 04:00:00]: +1 Kessel|on[2020-01-01 04:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] ≤ 1e-05
+ [2020-01-01 05:00:00]: +1 Kessel|on[2020-01-01 05:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] ≤ 1e-05
+ [2020-01-01 06:00:00]: +1 Kessel|on[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] ≤ 1e-05
+ [2020-01-01 07:00:00]: +1 Kessel|on[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] ≤ 1e-05
+ [2020-01-01 08:00:00]: +1 Kessel|on[2020-01-01 08:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] ≤ 1e-05
+ "Kessel|on|lb": |-
+ Constraint `Kessel|on|lb`
+ [time: 9]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel|on[2020-01-01 00:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel|on[2020-01-01 01:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 01:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel|on[2020-01-01 02:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 02:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel|on[2020-01-01 03:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 03:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel|on[2020-01-01 04:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 04:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel|on[2020-01-01 05:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 05:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel|on[2020-01-01 06:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel|on[2020-01-01 07:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel|on[2020-01-01 08:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 08:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel|on_hours_total": |-
+ Constraint `Kessel|on_hours_total`
+ ----------------------------------
+ +1 Kessel|on_hours_total - 1 Kessel|on[2020-01-01 00:00:00] - 1 Kessel|on[2020-01-01 01:00:00]... -1 Kessel|on[2020-01-01 06:00:00] - 1 Kessel|on[2020-01-01 07:00:00] - 1 Kessel|on[2020-01-01 08:00:00] = -0.0
+ "Kessel->costs(temporal)": |-
+ Constraint `Kessel->costs(temporal)`
+ [time: 9]:
+ -----------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ "Kessel->CO2(temporal)": |-
+ Constraint `Kessel->CO2(temporal)`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 00:00:00] - 1000 Kessel|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 01:00:00] - 1000 Kessel|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 02:00:00] - 1000 Kessel|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 03:00:00] - 1000 Kessel|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 04:00:00] - 1000 Kessel|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 05:00:00] - 1000 Kessel|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 06:00:00] - 1000 Kessel|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 07:00:00] - 1000 Kessel|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 08:00:00] - 1000 Kessel|on[2020-01-01 08:00:00] = -0.0
+ "Kessel|conversion_0": |-
+ Constraint `Kessel|conversion_0`
+ [time: 9]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Constraint `Wärmelast(Q_th_Last)|total_flow_hours`
+ --------------------------------------------------
+ +1 Wärmelast(Q_th_Last)|total_flow_hours - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ ---------------------------------------------
+ +1 Gastarif(Q_Gas)|total_flow_hours - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ -----------------------------------------------
+ +1 Einspeisung(P_el)|total_flow_hours - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Constraint `Speicher(Q_th_load)|on_hours_total`
+ -----------------------------------------------
+ +1 Speicher(Q_th_load)|on_hours_total - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|ub`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_load)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|lb`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_load)|total_flow_hours`
+ -------------------------------------------------
+ +1 Speicher(Q_th_load)|total_flow_hours - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Constraint `Speicher(Q_th_unload)|on_hours_total`
+ -------------------------------------------------
+ +1 Speicher(Q_th_unload)|on_hours_total - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_unload)|total_flow_hours`
+ ---------------------------------------------------
+ +1 Speicher(Q_th_unload)|total_flow_hours - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|prevent_simultaneous_use": |-
+ Constraint `Speicher|prevent_simultaneous_use`
+ [time: 9]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ 1.0
+ "Speicher|netto_discharge": |-
+ Constraint `Speicher|netto_discharge`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|netto_discharge[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|netto_discharge[2020-01-01 01:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|netto_discharge[2020-01-01 02:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|netto_discharge[2020-01-01 03:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|netto_discharge[2020-01-01 04:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|netto_discharge[2020-01-01 05:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|netto_discharge[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|netto_discharge[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|netto_discharge[2020-01-01 08:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|charge_state": |-
+ Constraint `Speicher|charge_state`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 0.92 Speicher|charge_state[2020-01-01 00:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 0.92 Speicher|charge_state[2020-01-01 01:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 0.92 Speicher|charge_state[2020-01-01 02:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 0.92 Speicher|charge_state[2020-01-01 03:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 0.92 Speicher|charge_state[2020-01-01 04:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 0.92 Speicher|charge_state[2020-01-01 05:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 0.92 Speicher|charge_state[2020-01-01 06:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 0.92 Speicher|charge_state[2020-01-01 07:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 0.92 Speicher|charge_state[2020-01-01 08:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|Piece_0|inside_piece": |-
+ Constraint `Speicher|Piece_0|inside_piece`
+ ------------------------------------------
+ +1 Speicher|Piece_0|inside_piece - 1 Speicher|Piece_0|lambda0 - 1 Speicher|Piece_0|lambda1 = -0.0
+ "Speicher|Piece_1|inside_piece": |-
+ Constraint `Speicher|Piece_1|inside_piece`
+ ------------------------------------------
+ +1 Speicher|Piece_1|inside_piece - 1 Speicher|Piece_1|lambda0 - 1 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|size|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|size|lambda`
+ -----------------------------------------------------------
+ +1 Speicher|size - 5 Speicher|Piece_0|lambda0 - 25 Speicher|Piece_0|lambda1 - 25 Speicher|Piece_1|lambda0 - 100 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|size|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|size|single_segment`
+ -------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|lambda`
+ -----------------------------------------------------------------------------
+ +1 Speicher|PiecewiseEffects|costs - 50 Speicher|Piece_0|lambda0 - 250 Speicher|Piece_0|lambda1 - 250 Speicher|Piece_1|lambda0 - 800 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|single_segment`
+ -------------------------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|lambda`
+ --------------------------------------------------------------------------
+ +1 Speicher|PiecewiseEffects|PE - 5 Speicher|Piece_0|lambda0 - 25 Speicher|Piece_0|lambda1 - 25 Speicher|Piece_1|lambda0 - 100 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|single_segment`
+ ----------------------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher->costs(periodic)": |-
+ Constraint `Speicher->costs(periodic)`
+ --------------------------------------
+ +1 Speicher->costs(periodic) - 1 Speicher|PiecewiseEffects|costs = -0.0
+ "Speicher->PE(periodic)": |-
+ Constraint `Speicher->PE(periodic)`
+ -----------------------------------
+ +1 Speicher->PE(periodic) - 1 Speicher|PiecewiseEffects|PE = -0.0
+ "Speicher|charge_state|ub": |-
+ Constraint `Speicher|charge_state|ub`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 1 Speicher|size ≤ -0.0
+ "Speicher|charge_state|lb": |-
+ Constraint `Speicher|charge_state|lb`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] ≥ -0.0
+ "Speicher|initial_charge_state": |-
+ Constraint `Speicher|initial_charge_state`
+ ------------------------------------------
+ +1 Speicher|charge_state[2020-01-01 00:00:00] = -0.0
+ "Speicher|final_charge_max": |-
+ Constraint `Speicher|final_charge_max`
+ --------------------------------------
+ +1 Speicher|charge_state[2020-01-01 09:00:00] ≤ 10.0
+ "BHKW2(Q_fu)|on_hours_total": |-
+ Constraint `BHKW2(Q_fu)|on_hours_total`
+ ---------------------------------------
+ +1 BHKW2(Q_fu)|on_hours_total - 1 BHKW2(Q_fu)|on[2020-01-01 00:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 01:00:00]... -1 BHKW2(Q_fu)|on[2020-01-01 06:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 07:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 08:00:00] = -0.0
+ "BHKW2(Q_fu)|flow_rate|ub": |-
+ Constraint `BHKW2(Q_fu)|flow_rate|ub`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e+07 BHKW2(Q_fu)|on[2020-01-01 08:00:00] ≤ -0.0
+ "BHKW2(Q_fu)|flow_rate|lb": |-
+ Constraint `BHKW2(Q_fu)|flow_rate|lb`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e-05 BHKW2(Q_fu)|on[2020-01-01 08:00:00] ≥ -0.0
+ "BHKW2(Q_fu)|total_flow_hours": |-
+ Constraint `BHKW2(Q_fu)|total_flow_hours`
+ -----------------------------------------
+ +1 BHKW2(Q_fu)|total_flow_hours - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 BHKW2(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "BHKW2(P_el)|on_hours_total": |-
+ Constraint `BHKW2(P_el)|on_hours_total`
+ ---------------------------------------
+ +1 BHKW2(P_el)|on_hours_total - 1 BHKW2(P_el)|on[2020-01-01 00:00:00] - 1 BHKW2(P_el)|on[2020-01-01 01:00:00]... -1 BHKW2(P_el)|on[2020-01-01 06:00:00] - 1 BHKW2(P_el)|on[2020-01-01 07:00:00] - 1 BHKW2(P_el)|on[2020-01-01 08:00:00] = -0.0
+ "BHKW2(P_el)|flow_rate|ub": |-
+ Constraint `BHKW2(P_el)|flow_rate|ub`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 60 BHKW2(P_el)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 60 BHKW2(P_el)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 02:00:00] - 60 BHKW2(P_el)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 03:00:00] - 60 BHKW2(P_el)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 04:00:00] - 60 BHKW2(P_el)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 05:00:00] - 60 BHKW2(P_el)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 06:00:00] - 60 BHKW2(P_el)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 07:00:00] - 60 BHKW2(P_el)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 08:00:00] - 60 BHKW2(P_el)|on[2020-01-01 08:00:00] ≤ -0.0
+ "BHKW2(P_el)|flow_rate|lb": |-
+ Constraint `BHKW2(P_el)|flow_rate|lb`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 02:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 03:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 04:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 05:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 06:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 07:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 08:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 08:00:00] ≥ -0.0
+ "BHKW2(P_el)|total_flow_hours": |-
+ Constraint `BHKW2(P_el)|total_flow_hours`
+ -----------------------------------------
+ +1 BHKW2(P_el)|total_flow_hours - 1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00]... -1 BHKW2(P_el)|flow_rate[2020-01-01 06:00:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 07:00:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "BHKW2(Q_th)|on_hours_total": |-
+ Constraint `BHKW2(Q_th)|on_hours_total`
+ ---------------------------------------
+ +1 BHKW2(Q_th)|on_hours_total - 1 BHKW2(Q_th)|on[2020-01-01 00:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 01:00:00]... -1 BHKW2(Q_th)|on[2020-01-01 06:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 07:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "BHKW2(Q_th)|flow_rate|ub": |-
+ Constraint `BHKW2(Q_th)|flow_rate|ub`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 02:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 03:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 04:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 05:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 06:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 07:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 08:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "BHKW2(Q_th)|flow_rate|lb": |-
+ Constraint `BHKW2(Q_th)|flow_rate|lb`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 02:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 03:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 04:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 05:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 06:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 07:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 08:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "BHKW2(Q_th)|total_flow_hours": |-
+ Constraint `BHKW2(Q_th)|total_flow_hours`
+ -----------------------------------------
+ +1 BHKW2(Q_th)|total_flow_hours - 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 BHKW2(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "BHKW2|on|ub": |-
+ Constraint `BHKW2|on|ub`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|on[2020-01-01 00:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 00:00:00] - 1 BHKW2(P_el)|on[2020-01-01 00:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 BHKW2|on[2020-01-01 01:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 01:00:00] - 1 BHKW2(P_el)|on[2020-01-01 01:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 02:00:00]: +1 BHKW2|on[2020-01-01 02:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 02:00:00] - 1 BHKW2(P_el)|on[2020-01-01 02:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 02:00:00] ≤ 1e-05
+ [2020-01-01 03:00:00]: +1 BHKW2|on[2020-01-01 03:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 03:00:00] - 1 BHKW2(P_el)|on[2020-01-01 03:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 03:00:00] ≤ 1e-05
+ [2020-01-01 04:00:00]: +1 BHKW2|on[2020-01-01 04:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 04:00:00] - 1 BHKW2(P_el)|on[2020-01-01 04:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 04:00:00] ≤ 1e-05
+ [2020-01-01 05:00:00]: +1 BHKW2|on[2020-01-01 05:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 05:00:00] - 1 BHKW2(P_el)|on[2020-01-01 05:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 05:00:00] ≤ 1e-05
+ [2020-01-01 06:00:00]: +1 BHKW2|on[2020-01-01 06:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 06:00:00] - 1 BHKW2(P_el)|on[2020-01-01 06:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 06:00:00] ≤ 1e-05
+ [2020-01-01 07:00:00]: +1 BHKW2|on[2020-01-01 07:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 07:00:00] - 1 BHKW2(P_el)|on[2020-01-01 07:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 07:00:00] ≤ 1e-05
+ [2020-01-01 08:00:00]: +1 BHKW2|on[2020-01-01 08:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 08:00:00] - 1 BHKW2(P_el)|on[2020-01-01 08:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 08:00:00] ≤ 1e-05
+ "BHKW2|on|lb": |-
+ Constraint `BHKW2|on|lb`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|on[2020-01-01 00:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 00:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 00:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|on[2020-01-01 01:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 01:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 01:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|on[2020-01-01 02:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 02:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 02:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|on[2020-01-01 03:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 03:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 03:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|on[2020-01-01 04:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 04:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 04:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|on[2020-01-01 05:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 05:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 05:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|on[2020-01-01 06:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 06:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 06:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|on[2020-01-01 07:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 07:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 07:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|on[2020-01-01 08:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 08:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 08:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "BHKW2|on_hours_total": |-
+ Constraint `BHKW2|on_hours_total`
+ ---------------------------------
+ +1 BHKW2|on_hours_total - 1 BHKW2|on[2020-01-01 00:00:00] - 1 BHKW2|on[2020-01-01 01:00:00]... -1 BHKW2|on[2020-01-01 06:00:00] - 1 BHKW2|on[2020-01-01 07:00:00] - 1 BHKW2|on[2020-01-01 08:00:00] = -0.0
+ "BHKW2|switch|transition": |-
+ Constraint `BHKW2|switch|transition`
+ [time: 8]:
+ -----------------------------------------------
+ [2020-01-01 01:00:00]: +1 BHKW2|switch|on[2020-01-01 01:00:00] - 1 BHKW2|switch|off[2020-01-01 01:00:00] - 1 BHKW2|on[2020-01-01 01:00:00] + 1 BHKW2|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|switch|on[2020-01-01 02:00:00] - 1 BHKW2|switch|off[2020-01-01 02:00:00] - 1 BHKW2|on[2020-01-01 02:00:00] + 1 BHKW2|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|switch|on[2020-01-01 03:00:00] - 1 BHKW2|switch|off[2020-01-01 03:00:00] - 1 BHKW2|on[2020-01-01 03:00:00] + 1 BHKW2|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|switch|on[2020-01-01 04:00:00] - 1 BHKW2|switch|off[2020-01-01 04:00:00] - 1 BHKW2|on[2020-01-01 04:00:00] + 1 BHKW2|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|switch|on[2020-01-01 05:00:00] - 1 BHKW2|switch|off[2020-01-01 05:00:00] - 1 BHKW2|on[2020-01-01 05:00:00] + 1 BHKW2|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|switch|on[2020-01-01 06:00:00] - 1 BHKW2|switch|off[2020-01-01 06:00:00] - 1 BHKW2|on[2020-01-01 06:00:00] + 1 BHKW2|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|switch|on[2020-01-01 07:00:00] - 1 BHKW2|switch|off[2020-01-01 07:00:00] - 1 BHKW2|on[2020-01-01 07:00:00] + 1 BHKW2|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|switch|on[2020-01-01 08:00:00] - 1 BHKW2|switch|off[2020-01-01 08:00:00] - 1 BHKW2|on[2020-01-01 08:00:00] + 1 BHKW2|on[2020-01-01 07:00:00] = -0.0
+ "BHKW2|switch|initial": |-
+ Constraint `BHKW2|switch|initial`
+ ---------------------------------
+ +1 BHKW2|switch|on[2020-01-01 00:00:00] - 1 BHKW2|switch|off[2020-01-01 00:00:00] - 1 BHKW2|on[2020-01-01 00:00:00] = -1.0
+ "BHKW2|switch|mutex": |-
+ Constraint `BHKW2|switch|mutex`
+ [time: 9]:
+ ------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|switch|on[2020-01-01 00:00:00] + 1 BHKW2|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 BHKW2|switch|on[2020-01-01 01:00:00] + 1 BHKW2|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 BHKW2|switch|on[2020-01-01 02:00:00] + 1 BHKW2|switch|off[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 BHKW2|switch|on[2020-01-01 03:00:00] + 1 BHKW2|switch|off[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 BHKW2|switch|on[2020-01-01 04:00:00] + 1 BHKW2|switch|off[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 BHKW2|switch|on[2020-01-01 05:00:00] + 1 BHKW2|switch|off[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 BHKW2|switch|on[2020-01-01 06:00:00] + 1 BHKW2|switch|off[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 BHKW2|switch|on[2020-01-01 07:00:00] + 1 BHKW2|switch|off[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 BHKW2|switch|on[2020-01-01 08:00:00] + 1 BHKW2|switch|off[2020-01-01 08:00:00] ≤ 1.0
+ "BHKW2->costs(temporal)": |-
+ Constraint `BHKW2->costs(temporal)`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 00:00:00] - 0.01 BHKW2|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 01:00:00] - 0.01 BHKW2|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 02:00:00] - 0.01 BHKW2|switch|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 03:00:00] - 0.01 BHKW2|switch|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 04:00:00] - 0.01 BHKW2|switch|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 05:00:00] - 0.01 BHKW2|switch|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 06:00:00] - 0.01 BHKW2|switch|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 07:00:00] - 0.01 BHKW2|switch|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 08:00:00] - 0.01 BHKW2|switch|on[2020-01-01 08:00:00] = -0.0
+ "BHKW2|Piece_0|inside_piece": |-
+ Constraint `BHKW2|Piece_0|inside_piece`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 00:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 00:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 01:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 01:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 02:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 02:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 03:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 03:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 04:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 04:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 05:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 05:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 06:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 06:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 07:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 07:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 08:00:00] - 1 BHKW2|Piece_0|lambda0[2020-01-01 08:00:00] - 1 BHKW2|Piece_0|lambda1[2020-01-01 08:00:00] = -0.0
+ "BHKW2|Piece_1|inside_piece": |-
+ Constraint `BHKW2|Piece_1|inside_piece`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 00:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 01:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 02:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 03:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 04:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 05:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 06:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 07:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 BHKW2|Piece_1|lambda0[2020-01-01 08:00:00] - 1 BHKW2|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "BHKW2|BHKW2(P_el)|flow_rate|lambda": |-
+ Constraint `BHKW2|BHKW2(P_el)|flow_rate|lambda`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 00:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 00:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 00:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 01:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 01:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 01:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 02:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 02:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 02:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 02:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 03:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 03:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 03:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 03:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 04:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 04:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 04:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 04:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 05:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 05:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 05:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 05:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 06:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 06:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 06:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 06:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 07:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 07:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 07:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 07:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 08:00:00] - 5 BHKW2|Piece_0|lambda0[2020-01-01 08:00:00] - 30 BHKW2|Piece_0|lambda1[2020-01-01 08:00:00] - 40 BHKW2|Piece_1|lambda0[2020-01-01 08:00:00] - 60 BHKW2|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "BHKW2|BHKW2(P_el)|flow_rate|single_segment": |-
+ Constraint `BHKW2|BHKW2(P_el)|flow_rate|single_segment`
+ [time: 9]:
+ ------------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 00:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 BHKW2|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 01:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 BHKW2|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 02:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 BHKW2|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 03:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 BHKW2|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 04:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 BHKW2|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 05:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 BHKW2|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 06:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 BHKW2|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 07:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 BHKW2|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 08:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 BHKW2|on[2020-01-01 08:00:00] ≤ -0.0
+ "BHKW2|BHKW2(Q_th)|flow_rate|lambda": |-
+ Constraint `BHKW2|BHKW2(Q_th)|flow_rate|lambda`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 00:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 00:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 00:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 01:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 01:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 01:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 02:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 02:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 02:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 02:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 03:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 03:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 03:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 03:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 04:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 04:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 04:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 04:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 05:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 05:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 05:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 05:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 06:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 06:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 06:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 06:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 07:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 07:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 07:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 07:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 08:00:00] - 6 BHKW2|Piece_0|lambda0[2020-01-01 08:00:00] - 35 BHKW2|Piece_0|lambda1[2020-01-01 08:00:00] - 45 BHKW2|Piece_1|lambda0[2020-01-01 08:00:00] - 100 BHKW2|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "BHKW2|BHKW2(Q_th)|flow_rate|single_segment": |-
+ Constraint `BHKW2|BHKW2(Q_th)|flow_rate|single_segment`
+ [time: 9]:
+ ------------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 00:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 BHKW2|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 01:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 BHKW2|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 02:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 BHKW2|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 03:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 BHKW2|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 04:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 BHKW2|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 05:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 BHKW2|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 06:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 BHKW2|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 07:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 BHKW2|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 08:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 BHKW2|on[2020-01-01 08:00:00] ≤ -0.0
+ "BHKW2|BHKW2(Q_fu)|flow_rate|lambda": |-
+ Constraint `BHKW2|BHKW2(Q_fu)|flow_rate|lambda`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 00:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 00:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 00:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 01:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 01:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 01:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 02:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 02:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 02:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 02:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 03:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 03:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 03:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 03:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 04:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 04:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 04:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 04:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 05:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 05:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 05:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 05:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 06:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 06:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 06:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 06:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 07:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 07:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 07:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 07:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 08:00:00] - 12 BHKW2|Piece_0|lambda0[2020-01-01 08:00:00] - 70 BHKW2|Piece_0|lambda1[2020-01-01 08:00:00] - 90 BHKW2|Piece_1|lambda0[2020-01-01 08:00:00] - 200 BHKW2|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "BHKW2|BHKW2(Q_fu)|flow_rate|single_segment": |-
+ Constraint `BHKW2|BHKW2(Q_fu)|flow_rate|single_segment`
+ [time: 9]:
+ ------------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 00:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 BHKW2|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 01:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 BHKW2|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 02:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 BHKW2|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 03:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 BHKW2|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 04:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 BHKW2|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 05:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 BHKW2|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 06:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 BHKW2|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 07:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 BHKW2|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2|Piece_0|inside_piece[2020-01-01 08:00:00] + 1 BHKW2|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 BHKW2|on[2020-01-01 08:00:00] ≤ -0.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 9]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] + 1 Strom|excess_input[2020-01-01 00:00:00] - 1 Strom|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] + 1 Strom|excess_input[2020-01-01 01:00:00] - 1 Strom|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 02:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] + 1 Strom|excess_input[2020-01-01 02:00:00] - 1 Strom|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 03:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] + 1 Strom|excess_input[2020-01-01 03:00:00] - 1 Strom|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 04:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] + 1 Strom|excess_input[2020-01-01 04:00:00] - 1 Strom|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 05:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] + 1 Strom|excess_input[2020-01-01 05:00:00] - 1 Strom|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] + 1 Strom|excess_input[2020-01-01 06:00:00] - 1 Strom|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] + 1 Strom|excess_input[2020-01-01 07:00:00] - 1 Strom|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 08:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] + 1 Strom|excess_input[2020-01-01 08:00:00] - 1 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 1e+05 Strom|excess_input[2020-01-01 00:00:00] - 1e+05 Strom|excess_input[2020-01-01 01:00:00]... -1e+05 Strom|excess_output[2020-01-01 06:00:00] - 1e+05 Strom|excess_output[2020-01-01 07:00:00] - 1e+05 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 9]:
+ -----------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Fernwärme|excess_input[2020-01-01 00:00:00] - 1 Fernwärme|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Fernwärme|excess_input[2020-01-01 01:00:00] - 1 Fernwärme|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 02:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Fernwärme|excess_input[2020-01-01 02:00:00] - 1 Fernwärme|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 03:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Fernwärme|excess_input[2020-01-01 03:00:00] - 1 Fernwärme|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 04:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Fernwärme|excess_input[2020-01-01 04:00:00] - 1 Fernwärme|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 05:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Fernwärme|excess_input[2020-01-01 05:00:00] - 1 Fernwärme|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 06:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Fernwärme|excess_input[2020-01-01 06:00:00] - 1 Fernwärme|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 07:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Fernwärme|excess_input[2020-01-01 07:00:00] - 1 Fernwärme|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 08:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Fernwärme|excess_input[2020-01-01 08:00:00] - 1 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00] - 1e+05 Fernwärme|excess_input[2020-01-01 01:00:00]... -1e+05 Fernwärme|excess_output[2020-01-01 06:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 07:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 02:00:00] + 1 Gas|excess_input[2020-01-01 02:00:00] - 1 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 03:00:00] + 1 Gas|excess_input[2020-01-01 03:00:00] - 1 Gas|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 04:00:00] + 1 Gas|excess_input[2020-01-01 04:00:00] - 1 Gas|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 05:00:00] + 1 Gas|excess_input[2020-01-01 05:00:00] - 1 Gas|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 06:00:00] + 1 Gas|excess_input[2020-01-01 06:00:00] - 1 Gas|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 07:00:00] + 1 Gas|excess_input[2020-01-01 07:00:00] - 1 Gas|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 08:00:00] + 1 Gas|excess_input[2020-01-01 08:00:00] - 1 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00] - 1e+05 Gas|excess_input[2020-01-01 01:00:00]... -1e+05 Gas|excess_output[2020-01-01 06:00:00] - 1e+05 Gas|excess_output[2020-01-01 07:00:00] - 1e+05 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+binaries:
+ - "Kessel(Q_fu)|on"
+ - "Kessel(Q_th)|on"
+ - "Kessel(Q_th)|off"
+ - "Kessel(Q_th)|switch|on"
+ - "Kessel(Q_th)|switch|off"
+ - "Kessel|on"
+ - "Speicher(Q_th_load)|on"
+ - "Speicher(Q_th_unload)|on"
+ - "Speicher|Piece_0|inside_piece"
+ - "Speicher|Piece_1|inside_piece"
+ - "BHKW2(Q_fu)|on"
+ - "BHKW2(P_el)|on"
+ - "BHKW2(Q_th)|on"
+ - "BHKW2|on"
+ - "BHKW2|switch|on"
+ - "BHKW2|switch|off"
+ - "BHKW2|Piece_0|inside_piece"
+ - "BHKW2|Piece_1|inside_piece"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - PE(periodic)
+ - PE(temporal)
+ - "PE(temporal)|per_timestep"
+ - PE
+ - Penalty
+ - "CO2(temporal)->costs(temporal)"
+ - "Kessel(Q_fu)|flow_rate"
+ - "Kessel(Q_fu)|on_hours_total"
+ - "Kessel(Q_fu)|total_flow_hours"
+ - "Kessel(Q_th)|flow_rate"
+ - "Kessel(Q_th)|size"
+ - "Kessel(Q_th)->costs(periodic)"
+ - "Kessel(Q_th)->PE(periodic)"
+ - "Kessel(Q_th)|on_hours_total"
+ - "Kessel(Q_th)|switch|count"
+ - "Kessel(Q_th)|consecutive_on_hours"
+ - "Kessel(Q_th)|consecutive_off_hours"
+ - "Kessel(Q_th)->costs(temporal)"
+ - "Kessel(Q_th)|total_flow_hours"
+ - "Kessel|on_hours_total"
+ - "Kessel->costs(temporal)"
+ - "Kessel->CO2(temporal)"
+ - "Wärmelast(Q_th_Last)|flow_rate"
+ - "Wärmelast(Q_th_Last)|total_flow_hours"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "Speicher(Q_th_load)|flow_rate"
+ - "Speicher(Q_th_load)|on_hours_total"
+ - "Speicher(Q_th_load)|total_flow_hours"
+ - "Speicher(Q_th_unload)|flow_rate"
+ - "Speicher(Q_th_unload)|on_hours_total"
+ - "Speicher(Q_th_unload)|total_flow_hours"
+ - "Speicher|charge_state"
+ - "Speicher|netto_discharge"
+ - "Speicher|size"
+ - "Speicher|PiecewiseEffects|costs"
+ - "Speicher|PiecewiseEffects|PE"
+ - "Speicher|Piece_0|lambda0"
+ - "Speicher|Piece_0|lambda1"
+ - "Speicher|Piece_1|lambda0"
+ - "Speicher|Piece_1|lambda1"
+ - "Speicher->costs(periodic)"
+ - "Speicher->PE(periodic)"
+ - "BHKW2(Q_fu)|flow_rate"
+ - "BHKW2(Q_fu)|on_hours_total"
+ - "BHKW2(Q_fu)|total_flow_hours"
+ - "BHKW2(P_el)|flow_rate"
+ - "BHKW2(P_el)|on_hours_total"
+ - "BHKW2(P_el)|total_flow_hours"
+ - "BHKW2(Q_th)|flow_rate"
+ - "BHKW2(Q_th)|on_hours_total"
+ - "BHKW2(Q_th)|total_flow_hours"
+ - "BHKW2|on_hours_total"
+ - "BHKW2->costs(temporal)"
+ - "BHKW2|Piece_0|lambda0"
+ - "BHKW2|Piece_0|lambda1"
+ - "BHKW2|Piece_1|lambda0"
+ - "BHKW2|Piece_1|lambda1"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/02_complex--solution.nc4 b/tests/ressources/v4-api/02_complex--solution.nc4
new file mode 100644
index 000000000..7c9068c8b
Binary files /dev/null and b/tests/ressources/v4-api/02_complex--solution.nc4 differ
diff --git a/tests/ressources/v4-api/02_complex--summary.yaml b/tests/ressources/v4-api/02_complex--summary.yaml
new file mode 100644
index 000000000..bcb444f1a
--- /dev/null
+++ b/tests/ressources/v4-api/02_complex--summary.yaml
@@ -0,0 +1,56 @@
+Name: 02_complex
+Number of timesteps: 9
+Calculation Type: FullCalculation
+Constraints: 589
+Variables: 507
+Main Results:
+ Objective: -10711.53
+ Penalty: -0.0
+ Effects:
+ CO2 [kg]:
+ temporal: 1278.26
+ periodic: -0.0
+ total: 1278.26
+ costs [€]:
+ temporal: -12666.27
+ periodic: 1954.75
+ total: -10711.53
+ PE [kWh_PE]:
+ temporal: -0.0
+ periodic: 152.92
+ total: 152.92
+ Invest-Decisions:
+ Invested:
+ Kessel(Q_th): 50.0
+ Speicher: 52.92
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 1.1
+ solving: 0.86
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/04_scenarios--flow_system.nc4 b/tests/ressources/v4-api/04_scenarios--flow_system.nc4
new file mode 100644
index 000000000..3541faa1d
Binary files /dev/null and b/tests/ressources/v4-api/04_scenarios--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/04_scenarios--model_documentation.yaml b/tests/ressources/v4-api/04_scenarios--model_documentation.yaml
new file mode 100644
index 000000000..d646a5587
--- /dev/null
+++ b/tests/ressources/v4-api/04_scenarios--model_documentation.yaml
@@ -0,0 +1,339 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +0.5 costs[low] + 0.5 costs[high] + 1 Penalty
+ Sense: min
+ Value: 10.666666666666668
+termination_condition: optimal
+status: ok
+nvars: 117
+nvarsbin: 0
+nvarscont: 117
+ncons: 67
+variables:
+ costs(periodic): |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: costs(periodic)[low] ∈ [-inf, inf]
+ [high]: costs(periodic)[high] ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: costs(temporal)[low] ∈ [-inf, inf]
+ [high]: costs(temporal)[high] ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: costs(temporal)|per_timestep[2020-01-01 00:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, high]: costs(temporal)|per_timestep[2020-01-01 00:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, low]: costs(temporal)|per_timestep[2020-01-01 01:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, high]: costs(temporal)|per_timestep[2020-01-01 01:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, low]: costs(temporal)|per_timestep[2020-01-01 02:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, high]: costs(temporal)|per_timestep[2020-01-01 02:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 03:00:00, low]: costs(temporal)|per_timestep[2020-01-01 03:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 03:00:00, high]: costs(temporal)|per_timestep[2020-01-01 03:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 04:00:00, low]: costs(temporal)|per_timestep[2020-01-01 04:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 04:00:00, high]: costs(temporal)|per_timestep[2020-01-01 04:00:00, high] ∈ [-inf, inf]
+ costs: |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: costs[low] ∈ [-inf, inf]
+ [high]: costs[high] ∈ [-inf, inf]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "Boiler(Q_fu)|flow_rate": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, low] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, high]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, high] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, low]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, low] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, high]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, high] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00, low]: Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, low] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00, high]: Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, high] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00, low]: Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, low] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00, high]: Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, high] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00, low]: Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, low] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00, high]: Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, high] ∈ [0, 1e+07]
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: Boiler(Q_fu)|total_flow_hours[low] ∈ [0, inf]
+ [high]: Boiler(Q_fu)|total_flow_hours[high] ∈ [0, inf]
+ "Boiler(Q_th)|flow_rate": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, low] ∈ [0, 100]
+ [2020-01-01 00:00:00, high]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, high] ∈ [0, 100]
+ [2020-01-01 01:00:00, low]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, low] ∈ [0, 100]
+ [2020-01-01 01:00:00, high]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, high] ∈ [0, 100]
+ [2020-01-01 02:00:00, low]: Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, low] ∈ [0, 100]
+ [2020-01-01 02:00:00, high]: Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, high] ∈ [0, 100]
+ [2020-01-01 03:00:00, low]: Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, low] ∈ [0, 100]
+ [2020-01-01 03:00:00, high]: Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, high] ∈ [0, 100]
+ [2020-01-01 04:00:00, low]: Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, low] ∈ [0, 100]
+ [2020-01-01 04:00:00, high]: Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, high] ∈ [0, 100]
+ "Boiler(Q_th)|total_flow_hours": |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: Boiler(Q_th)|total_flow_hours[low] ∈ [0, inf]
+ [high]: Boiler(Q_th)|total_flow_hours[high] ∈ [0, inf]
+ "HeatLoad(Q_th)|flow_rate": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: HeatLoad(Q_th)|flow_rate[2020-01-01 00:00:00, low] ∈ [30, 30]
+ [2020-01-01 00:00:00, high]: HeatLoad(Q_th)|flow_rate[2020-01-01 00:00:00, high] ∈ [50, 50]
+ [2020-01-01 01:00:00, low]: HeatLoad(Q_th)|flow_rate[2020-01-01 01:00:00, low] ∈ [40, 40]
+ [2020-01-01 01:00:00, high]: HeatLoad(Q_th)|flow_rate[2020-01-01 01:00:00, high] ∈ [60, 60]
+ [2020-01-01 02:00:00, low]: HeatLoad(Q_th)|flow_rate[2020-01-01 02:00:00, low] ∈ [50, 50]
+ [2020-01-01 02:00:00, high]: HeatLoad(Q_th)|flow_rate[2020-01-01 02:00:00, high] ∈ [70, 70]
+ [2020-01-01 03:00:00, low]: HeatLoad(Q_th)|flow_rate[2020-01-01 03:00:00, low] ∈ [40, 40]
+ [2020-01-01 03:00:00, high]: HeatLoad(Q_th)|flow_rate[2020-01-01 03:00:00, high] ∈ [60, 60]
+ [2020-01-01 04:00:00, low]: HeatLoad(Q_th)|flow_rate[2020-01-01 04:00:00, low] ∈ [30, 30]
+ [2020-01-01 04:00:00, high]: HeatLoad(Q_th)|flow_rate[2020-01-01 04:00:00, high] ∈ [50, 50]
+ "HeatLoad(Q_th)|total_flow_hours": |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: HeatLoad(Q_th)|total_flow_hours[low] ∈ [0, inf]
+ [high]: HeatLoad(Q_th)|total_flow_hours[high] ∈ [0, inf]
+ "GasSource(Q_Gas)|flow_rate": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, low] ∈ [0, 1000]
+ [2020-01-01 00:00:00, high]: GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, high] ∈ [0, 1000]
+ [2020-01-01 01:00:00, low]: GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, low] ∈ [0, 1000]
+ [2020-01-01 01:00:00, high]: GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, high] ∈ [0, 1000]
+ [2020-01-01 02:00:00, low]: GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, low] ∈ [0, 1000]
+ [2020-01-01 02:00:00, high]: GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, high] ∈ [0, 1000]
+ [2020-01-01 03:00:00, low]: GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, low] ∈ [0, 1000]
+ [2020-01-01 03:00:00, high]: GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, high] ∈ [0, 1000]
+ [2020-01-01 04:00:00, low]: GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, low] ∈ [0, 1000]
+ [2020-01-01 04:00:00, high]: GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, high] ∈ [0, 1000]
+ "GasSource(Q_Gas)|total_flow_hours": |-
+ Variable (scenario: 2)
+ ----------------------
+ [low]: GasSource(Q_Gas)|total_flow_hours[low] ∈ [0, inf]
+ [high]: GasSource(Q_Gas)|total_flow_hours[high] ∈ [0, inf]
+ "GasSource(Q_Gas)->costs(temporal)": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, high]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, low]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, high]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, low]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, high]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 03:00:00, low]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 03:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 03:00:00, high]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 03:00:00, high] ∈ [-inf, inf]
+ [2020-01-01 04:00:00, low]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 04:00:00, low] ∈ [-inf, inf]
+ [2020-01-01 04:00:00, high]: GasSource(Q_Gas)->costs(temporal)[2020-01-01 04:00:00, high] ∈ [-inf, inf]
+ "Heat|excess_input": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: Heat|excess_input[2020-01-01 00:00:00, low] ∈ [0, inf]
+ [2020-01-01 00:00:00, high]: Heat|excess_input[2020-01-01 00:00:00, high] ∈ [0, inf]
+ [2020-01-01 01:00:00, low]: Heat|excess_input[2020-01-01 01:00:00, low] ∈ [0, inf]
+ [2020-01-01 01:00:00, high]: Heat|excess_input[2020-01-01 01:00:00, high] ∈ [0, inf]
+ [2020-01-01 02:00:00, low]: Heat|excess_input[2020-01-01 02:00:00, low] ∈ [0, inf]
+ [2020-01-01 02:00:00, high]: Heat|excess_input[2020-01-01 02:00:00, high] ∈ [0, inf]
+ [2020-01-01 03:00:00, low]: Heat|excess_input[2020-01-01 03:00:00, low] ∈ [0, inf]
+ [2020-01-01 03:00:00, high]: Heat|excess_input[2020-01-01 03:00:00, high] ∈ [0, inf]
+ [2020-01-01 04:00:00, low]: Heat|excess_input[2020-01-01 04:00:00, low] ∈ [0, inf]
+ [2020-01-01 04:00:00, high]: Heat|excess_input[2020-01-01 04:00:00, high] ∈ [0, inf]
+ "Heat|excess_output": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: Heat|excess_output[2020-01-01 00:00:00, low] ∈ [0, inf]
+ [2020-01-01 00:00:00, high]: Heat|excess_output[2020-01-01 00:00:00, high] ∈ [0, inf]
+ [2020-01-01 01:00:00, low]: Heat|excess_output[2020-01-01 01:00:00, low] ∈ [0, inf]
+ [2020-01-01 01:00:00, high]: Heat|excess_output[2020-01-01 01:00:00, high] ∈ [0, inf]
+ [2020-01-01 02:00:00, low]: Heat|excess_output[2020-01-01 02:00:00, low] ∈ [0, inf]
+ [2020-01-01 02:00:00, high]: Heat|excess_output[2020-01-01 02:00:00, high] ∈ [0, inf]
+ [2020-01-01 03:00:00, low]: Heat|excess_output[2020-01-01 03:00:00, low] ∈ [0, inf]
+ [2020-01-01 03:00:00, high]: Heat|excess_output[2020-01-01 03:00:00, high] ∈ [0, inf]
+ [2020-01-01 04:00:00, low]: Heat|excess_output[2020-01-01 04:00:00, low] ∈ [0, inf]
+ [2020-01-01 04:00:00, high]: Heat|excess_output[2020-01-01 04:00:00, high] ∈ [0, inf]
+ "Heat->Penalty": |-
+ Variable
+ --------
+ Heat->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: Gas|excess_input[2020-01-01 00:00:00, low] ∈ [0, inf]
+ [2020-01-01 00:00:00, high]: Gas|excess_input[2020-01-01 00:00:00, high] ∈ [0, inf]
+ [2020-01-01 01:00:00, low]: Gas|excess_input[2020-01-01 01:00:00, low] ∈ [0, inf]
+ [2020-01-01 01:00:00, high]: Gas|excess_input[2020-01-01 01:00:00, high] ∈ [0, inf]
+ [2020-01-01 02:00:00, low]: Gas|excess_input[2020-01-01 02:00:00, low] ∈ [0, inf]
+ [2020-01-01 02:00:00, high]: Gas|excess_input[2020-01-01 02:00:00, high] ∈ [0, inf]
+ [2020-01-01 03:00:00, low]: Gas|excess_input[2020-01-01 03:00:00, low] ∈ [0, inf]
+ [2020-01-01 03:00:00, high]: Gas|excess_input[2020-01-01 03:00:00, high] ∈ [0, inf]
+ [2020-01-01 04:00:00, low]: Gas|excess_input[2020-01-01 04:00:00, low] ∈ [0, inf]
+ [2020-01-01 04:00:00, high]: Gas|excess_input[2020-01-01 04:00:00, high] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 5, scenario: 2)
+ -------------------------------
+ [2020-01-01 00:00:00, low]: Gas|excess_output[2020-01-01 00:00:00, low] ∈ [0, inf]
+ [2020-01-01 00:00:00, high]: Gas|excess_output[2020-01-01 00:00:00, high] ∈ [0, inf]
+ [2020-01-01 01:00:00, low]: Gas|excess_output[2020-01-01 01:00:00, low] ∈ [0, inf]
+ [2020-01-01 01:00:00, high]: Gas|excess_output[2020-01-01 01:00:00, high] ∈ [0, inf]
+ [2020-01-01 02:00:00, low]: Gas|excess_output[2020-01-01 02:00:00, low] ∈ [0, inf]
+ [2020-01-01 02:00:00, high]: Gas|excess_output[2020-01-01 02:00:00, high] ∈ [0, inf]
+ [2020-01-01 03:00:00, low]: Gas|excess_output[2020-01-01 03:00:00, low] ∈ [0, inf]
+ [2020-01-01 03:00:00, high]: Gas|excess_output[2020-01-01 03:00:00, high] ∈ [0, inf]
+ [2020-01-01 04:00:00, low]: Gas|excess_output[2020-01-01 04:00:00, low] ∈ [0, inf]
+ [2020-01-01 04:00:00, high]: Gas|excess_output[2020-01-01 04:00:00, high] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ [scenario: 2]:
+ -------------------------------------------
+ [low]: +1 costs(periodic)[low] = -0.0
+ [high]: +1 costs(periodic)[high] = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ [scenario: 2]:
+ -------------------------------------------
+ [low]: +1 costs(temporal)[low] - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00, low] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00, low] - 1 costs(temporal)|per_timestep[2020-01-01 02:00:00, low] - 1 costs(temporal)|per_timestep[2020-01-01 03:00:00, low] - 1 costs(temporal)|per_timestep[2020-01-01 04:00:00, low] = -0.0
+ [high]: +1 costs(temporal)[high] - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00, high] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00, high] - 1 costs(temporal)|per_timestep[2020-01-01 02:00:00, high] - 1 costs(temporal)|per_timestep[2020-01-01 03:00:00, high] - 1 costs(temporal)|per_timestep[2020-01-01 04:00:00, high] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 5, scenario: 2]:
+ -----------------------------------------------------------------
+ [2020-01-01 00:00:00, low]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00, low] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, low] = -0.0
+ [2020-01-01 00:00:00, high]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00, high] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, high] = -0.0
+ [2020-01-01 01:00:00, low]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00, low] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, low] = -0.0
+ [2020-01-01 01:00:00, high]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00, high] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, high] = -0.0
+ [2020-01-01 02:00:00, low]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00, low] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, low] = -0.0
+ [2020-01-01 02:00:00, high]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00, high] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, high] = -0.0
+ [2020-01-01 03:00:00, low]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00, low] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 03:00:00, low] = -0.0
+ [2020-01-01 03:00:00, high]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00, high] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 03:00:00, high] = -0.0
+ [2020-01-01 04:00:00, low]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00, low] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 04:00:00, low] = -0.0
+ [2020-01-01 04:00:00, high]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00, high] - 1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 04:00:00, high] = -0.0
+ costs: |-
+ Constraint `costs`
+ [scenario: 2]:
+ ---------------------------------
+ [low]: +1 costs[low] - 1 costs(temporal)[low] - 1 costs(periodic)[low] = -0.0
+ [high]: +1 costs[high] - 1 costs(temporal)[high] - 1 costs(periodic)[high] = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Heat->Penalty - 1 Gas->Penalty = -0.0
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Constraint `Boiler(Q_fu)|total_flow_hours`
+ [scenario: 2]:
+ ---------------------------------------------------------
+ [low]: +1 Boiler(Q_fu)|total_flow_hours[low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, low] = -0.0
+ [high]: +1 Boiler(Q_fu)|total_flow_hours[high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, high] = -0.0
+ "Boiler(Q_th)|total_flow_hours": |-
+ Constraint `Boiler(Q_th)|total_flow_hours`
+ [scenario: 2]:
+ ---------------------------------------------------------
+ [low]: +1 Boiler(Q_th)|total_flow_hours[low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, low] = -0.0
+ [high]: +1 Boiler(Q_th)|total_flow_hours[high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, high] = -0.0
+ "Boiler|conversion_0": |-
+ Constraint `Boiler|conversion_0`
+ [time: 5, scenario: 2]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00, low]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, low] = -0.0
+ [2020-01-01 00:00:00, high]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, high] = -0.0
+ [2020-01-01 01:00:00, low]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, low] = -0.0
+ [2020-01-01 01:00:00, high]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, high] = -0.0
+ [2020-01-01 02:00:00, low]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, low] = -0.0
+ [2020-01-01 02:00:00, high]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, high] = -0.0
+ [2020-01-01 03:00:00, low]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, low] = -0.0
+ [2020-01-01 03:00:00, high]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, high] = -0.0
+ [2020-01-01 04:00:00, low]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, low] - 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, low] = -0.0
+ [2020-01-01 04:00:00, high]: +0.9 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, high] - 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, high] = -0.0
+ "HeatLoad(Q_th)|total_flow_hours": |-
+ Constraint `HeatLoad(Q_th)|total_flow_hours`
+ [scenario: 2]:
+ -----------------------------------------------------------
+ [low]: +1 HeatLoad(Q_th)|total_flow_hours[low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 00:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 01:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 02:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 03:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 04:00:00, low] = -0.0
+ [high]: +1 HeatLoad(Q_th)|total_flow_hours[high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 00:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 01:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 02:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 03:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 04:00:00, high] = -0.0
+ "GasSource(Q_Gas)|total_flow_hours": |-
+ Constraint `GasSource(Q_Gas)|total_flow_hours`
+ [scenario: 2]:
+ -------------------------------------------------------------
+ [low]: +1 GasSource(Q_Gas)|total_flow_hours[low] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, low] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, low] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, low] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, low] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, low] = -0.0
+ [high]: +1 GasSource(Q_Gas)|total_flow_hours[high] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, high] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, high] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, high] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, high] - 1 GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, high] = -0.0
+ "GasSource(Q_Gas)->costs(temporal)": |-
+ Constraint `GasSource(Q_Gas)->costs(temporal)`
+ [time: 5, scenario: 2]:
+ ----------------------------------------------------------------------
+ [2020-01-01 00:00:00, low]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, low] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, low] = -0.0
+ [2020-01-01 00:00:00, high]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, high] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, high] = -0.0
+ [2020-01-01 01:00:00, low]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, low] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, low] = -0.0
+ [2020-01-01 01:00:00, high]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, high] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, high] = -0.0
+ [2020-01-01 02:00:00, low]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, low] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, low] = -0.0
+ [2020-01-01 02:00:00, high]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, high] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, high] = -0.0
+ [2020-01-01 03:00:00, low]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 03:00:00, low] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, low] = -0.0
+ [2020-01-01 03:00:00, high]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 03:00:00, high] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, high] = -0.0
+ [2020-01-01 04:00:00, low]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 04:00:00, low] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, low] = -0.0
+ [2020-01-01 04:00:00, high]: +1 GasSource(Q_Gas)->costs(temporal)[2020-01-01 04:00:00, high] - 0.04 GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, high] = -0.0
+ "Heat|balance": |-
+ Constraint `Heat|balance`
+ [time: 5, scenario: 2]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00, low]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 00:00:00, low] + 1 Heat|excess_input[2020-01-01 00:00:00, low] - 1 Heat|excess_output[2020-01-01 00:00:00, low] = -0.0
+ [2020-01-01 00:00:00, high]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 00:00:00, high] + 1 Heat|excess_input[2020-01-01 00:00:00, high] - 1 Heat|excess_output[2020-01-01 00:00:00, high] = -0.0
+ [2020-01-01 01:00:00, low]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 01:00:00, low] + 1 Heat|excess_input[2020-01-01 01:00:00, low] - 1 Heat|excess_output[2020-01-01 01:00:00, low] = -0.0
+ [2020-01-01 01:00:00, high]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 01:00:00, high] + 1 Heat|excess_input[2020-01-01 01:00:00, high] - 1 Heat|excess_output[2020-01-01 01:00:00, high] = -0.0
+ [2020-01-01 02:00:00, low]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 02:00:00, low] + 1 Heat|excess_input[2020-01-01 02:00:00, low] - 1 Heat|excess_output[2020-01-01 02:00:00, low] = -0.0
+ [2020-01-01 02:00:00, high]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 02:00:00, high] + 1 Heat|excess_input[2020-01-01 02:00:00, high] - 1 Heat|excess_output[2020-01-01 02:00:00, high] = -0.0
+ [2020-01-01 03:00:00, low]: +1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 03:00:00, low] + 1 Heat|excess_input[2020-01-01 03:00:00, low] - 1 Heat|excess_output[2020-01-01 03:00:00, low] = -0.0
+ [2020-01-01 03:00:00, high]: +1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 03:00:00, high] + 1 Heat|excess_input[2020-01-01 03:00:00, high] - 1 Heat|excess_output[2020-01-01 03:00:00, high] = -0.0
+ [2020-01-01 04:00:00, low]: +1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, low] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 04:00:00, low] + 1 Heat|excess_input[2020-01-01 04:00:00, low] - 1 Heat|excess_output[2020-01-01 04:00:00, low] = -0.0
+ [2020-01-01 04:00:00, high]: +1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00, high] - 1 HeatLoad(Q_th)|flow_rate[2020-01-01 04:00:00, high] + 1 Heat|excess_input[2020-01-01 04:00:00, high] - 1 Heat|excess_output[2020-01-01 04:00:00, high] = -0.0
+ "Heat->Penalty": |-
+ Constraint `Heat->Penalty`
+ --------------------------
+ +1 Heat->Penalty - 1e+05 Heat|excess_input[2020-01-01 00:00:00, low] - 1e+05 Heat|excess_input[2020-01-01 00:00:00, high]... -1e+05 Heat|excess_output[2020-01-01 03:00:00, high] - 1e+05 Heat|excess_output[2020-01-01 04:00:00, low] - 1e+05 Heat|excess_output[2020-01-01 04:00:00, high] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 5, scenario: 2]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00, low]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, low] + 1 Gas|excess_input[2020-01-01 00:00:00, low] - 1 Gas|excess_output[2020-01-01 00:00:00, low] = -0.0
+ [2020-01-01 00:00:00, high]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 00:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, high] + 1 Gas|excess_input[2020-01-01 00:00:00, high] - 1 Gas|excess_output[2020-01-01 00:00:00, high] = -0.0
+ [2020-01-01 01:00:00, low]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, low] + 1 Gas|excess_input[2020-01-01 01:00:00, low] - 1 Gas|excess_output[2020-01-01 01:00:00, low] = -0.0
+ [2020-01-01 01:00:00, high]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 01:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, high] + 1 Gas|excess_input[2020-01-01 01:00:00, high] - 1 Gas|excess_output[2020-01-01 01:00:00, high] = -0.0
+ [2020-01-01 02:00:00, low]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, low] + 1 Gas|excess_input[2020-01-01 02:00:00, low] - 1 Gas|excess_output[2020-01-01 02:00:00, low] = -0.0
+ [2020-01-01 02:00:00, high]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 02:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, high] + 1 Gas|excess_input[2020-01-01 02:00:00, high] - 1 Gas|excess_output[2020-01-01 02:00:00, high] = -0.0
+ [2020-01-01 03:00:00, low]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, low] + 1 Gas|excess_input[2020-01-01 03:00:00, low] - 1 Gas|excess_output[2020-01-01 03:00:00, low] = -0.0
+ [2020-01-01 03:00:00, high]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 03:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00, high] + 1 Gas|excess_input[2020-01-01 03:00:00, high] - 1 Gas|excess_output[2020-01-01 03:00:00, high] = -0.0
+ [2020-01-01 04:00:00, low]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, low] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, low] + 1 Gas|excess_input[2020-01-01 04:00:00, low] - 1 Gas|excess_output[2020-01-01 04:00:00, low] = -0.0
+ [2020-01-01 04:00:00, high]: +1 GasSource(Q_Gas)|flow_rate[2020-01-01 04:00:00, high] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00, high] + 1 Gas|excess_input[2020-01-01 04:00:00, high] - 1 Gas|excess_output[2020-01-01 04:00:00, high] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00, low] - 1e+05 Gas|excess_input[2020-01-01 00:00:00, high]... -1e+05 Gas|excess_output[2020-01-01 03:00:00, high] - 1e+05 Gas|excess_output[2020-01-01 04:00:00, low] - 1e+05 Gas|excess_output[2020-01-01 04:00:00, high] = -0.0
+binaries: []
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - Penalty
+ - "Boiler(Q_fu)|flow_rate"
+ - "Boiler(Q_fu)|total_flow_hours"
+ - "Boiler(Q_th)|flow_rate"
+ - "Boiler(Q_th)|total_flow_hours"
+ - "HeatLoad(Q_th)|flow_rate"
+ - "HeatLoad(Q_th)|total_flow_hours"
+ - "GasSource(Q_Gas)|flow_rate"
+ - "GasSource(Q_Gas)|total_flow_hours"
+ - "GasSource(Q_Gas)->costs(temporal)"
+ - "Heat|excess_input"
+ - "Heat|excess_output"
+ - "Heat->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/04_scenarios--solution.nc4 b/tests/ressources/v4-api/04_scenarios--solution.nc4
new file mode 100644
index 000000000..bc664ef0f
Binary files /dev/null and b/tests/ressources/v4-api/04_scenarios--solution.nc4 differ
diff --git a/tests/ressources/v4-api/04_scenarios--summary.yaml b/tests/ressources/v4-api/04_scenarios--summary.yaml
new file mode 100644
index 000000000..d30f0efcb
--- /dev/null
+++ b/tests/ressources/v4-api/04_scenarios--summary.yaml
@@ -0,0 +1,46 @@
+Name: 04_scenarios
+Number of timesteps: 5
+Calculation Type: FullCalculation
+Constraints: 67
+Variables: 117
+Main Results:
+ Objective: 10.67
+ Penalty: -0.0
+ Effects:
+ costs [€]:
+ temporal: [8.44, 12.89]
+ periodic: [-0.0, -0.0]
+ total: [8.44, 12.89]
+ Invest-Decisions:
+ Invested: {}
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.21
+ solving: 0.12
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/io_flow_system_base--flow_system.nc4 b/tests/ressources/v4-api/io_flow_system_base--flow_system.nc4
new file mode 100644
index 000000000..32b0705dd
Binary files /dev/null and b/tests/ressources/v4-api/io_flow_system_base--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/io_flow_system_base--model_documentation.yaml b/tests/ressources/v4-api/io_flow_system_base--model_documentation.yaml
new file mode 100644
index 000000000..5976668ef
--- /dev/null
+++ b/tests/ressources/v4-api/io_flow_system_base--model_documentation.yaml
@@ -0,0 +1,1758 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 costs + 1 Penalty
+ Sense: min
+ Value: -11597.873624489208
+termination_condition: optimal
+status: ok
+nvars: 454
+nvarsbin: 128
+nvarscont: 326
+ncons: 536
+variables:
+ costs(periodic): |-
+ Variable
+ --------
+ costs(periodic) ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable
+ --------
+ costs(temporal) ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: costs(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: costs(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: costs(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: costs(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: costs(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: costs(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: costs(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ costs: |-
+ Variable
+ --------
+ costs ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable
+ --------
+ CO2(periodic) ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable
+ --------
+ CO2(temporal) ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ CO2: |-
+ Variable
+ --------
+ CO2 ∈ [-inf, inf]
+ PE(periodic): |-
+ Variable
+ --------
+ PE(periodic) ∈ [-inf, inf]
+ PE(temporal): |-
+ Variable
+ --------
+ PE(temporal) ∈ [-inf, inf]
+ "PE(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: PE(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: PE(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: PE(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: PE(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: PE(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: PE(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: PE(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: PE(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: PE(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ PE: |-
+ Variable
+ --------
+ PE ∈ [-inf, 3500]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "CO2(temporal)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Wärmelast(Q_th_Last)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] ∈ [30, 30]
+ [2020-01-01 01:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00] ∈ [0, 0]
+ [2020-01-01 02:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00] ∈ [90, 90]
+ [2020-01-01 03:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 03:00:00] ∈ [110, 110]
+ [2020-01-01 04:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 04:00:00] ∈ [110, 110]
+ [2020-01-01 05:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 05:00:00] ∈ [20, 20]
+ [2020-01-01 06:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] ∈ [20, 20]
+ [2020-01-01 07:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] ∈ [20, 20]
+ [2020-01-01 08:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] ∈ [20, 20]
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Wärmelast(Q_th_Last)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Gastarif(Q_Gas)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Einspeisung(P_el)|total_flow_hours ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 200]
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 200]
+ [2020-01-01 02:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 200]
+ [2020-01-01 03:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 200]
+ [2020-01-01 04:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 200]
+ [2020-01-01 05:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 200]
+ [2020-01-01 06:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 200]
+ [2020-01-01 07:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 200]
+ [2020-01-01 08:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 200]
+ "Kessel(Q_fu)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_fu)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_fu)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_fu)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_fu)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_fu)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_fu)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_fu)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_fu)|on_hours_total ∈ [0, inf]
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_fu)|total_flow_hours ∈ [0, inf]
+ "Kessel(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 50]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 50]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 50]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 50]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 50]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 50]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 50]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 50]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 50]
+ "Kessel(Q_th)|size": |-
+ Variable
+ --------
+ Kessel(Q_th)|size ∈ [50, 50]
+ "Kessel(Q_th)->costs(periodic)": |-
+ Variable
+ --------
+ Kessel(Q_th)->costs(periodic) ∈ [-inf, inf]
+ "Kessel(Q_th)->PE(periodic)": |-
+ Variable
+ --------
+ Kessel(Q_th)->PE(periodic) ∈ [-inf, inf]
+ "Kessel(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_th)|on_hours_total ∈ [0, 1000]
+ "Kessel(Q_th)|switch|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|switch|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|switch|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|switch|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|switch|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|switch|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|switch|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|switch|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|switch|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|switch|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|switch|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|switch|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|switch|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|switch|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|switch|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|switch|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|switch|count": |-
+ Variable
+ --------
+ Kessel(Q_th)|switch|count ∈ [0, 1000]
+ "Kessel(Q_th)|consecutive_on_hours": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] ∈ [0, 10]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] ∈ [0, 10]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] ∈ [0, 10]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] ∈ [0, 10]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] ∈ [0, 10]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] ∈ [0, 10]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] ∈ [0, 10]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] ∈ [0, 10]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] ∈ [0, 10]
+ "Kessel(Q_th)|consecutive_off_hours": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] ∈ [0, 10]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] ∈ [0, 10]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] ∈ [0, 10]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] ∈ [0, 10]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] ∈ [0, 10]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] ∈ [0, 10]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] ∈ [0, 10]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] ∈ [0, 10]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] ∈ [0, 10]
+ "Kessel(Q_th)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_th)|total_flow_hours ∈ [0, 1e+06]
+ "Kessel|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel|on_hours_total": |-
+ Variable
+ --------
+ Kessel|on_hours_total ∈ [0, inf]
+ "Kessel->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher(Q_th_load)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_load)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|total_flow_hours ∈ [0, inf]
+ "Speicher(Q_th_unload)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_unload)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|total_flow_hours ∈ [0, inf]
+ "Speicher|charge_state": |-
+ Variable (time: 10)
+ -------------------
+ [2020-01-01 00:00:00]: Speicher|charge_state[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Speicher|charge_state[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Speicher|charge_state[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Speicher|charge_state[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Speicher|charge_state[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Speicher|charge_state[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Speicher|charge_state[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Speicher|charge_state[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Speicher|charge_state[2020-01-01 08:00:00] ∈ [0, 1000]
+ [2020-01-01 09:00:00]: Speicher|charge_state[2020-01-01 09:00:00] ∈ [0, 1000]
+ "Speicher|netto_discharge": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher|netto_discharge[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Speicher|netto_discharge[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Speicher|netto_discharge[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Speicher|netto_discharge[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Speicher|netto_discharge[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Speicher|netto_discharge[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Speicher|netto_discharge[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Speicher|netto_discharge[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Speicher|netto_discharge[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher|size": |-
+ Variable
+ --------
+ Speicher|size ∈ [0, 1000]
+ "Speicher->costs(periodic)": |-
+ Variable
+ --------
+ Speicher->costs(periodic) ∈ [-inf, inf]
+ "Speicher->CO2(periodic)": |-
+ Variable
+ --------
+ Speicher->CO2(periodic) ∈ [-inf, inf]
+ "Speicher|PiecewiseEffects|costs": |-
+ Variable
+ --------
+ Speicher|PiecewiseEffects|costs ∈ [-inf, inf]
+ "Speicher|PiecewiseEffects|PE": |-
+ Variable
+ --------
+ Speicher|PiecewiseEffects|PE ∈ [-inf, inf]
+ "Speicher|Piece_0|inside_piece": |-
+ Variable
+ --------
+ Speicher|Piece_0|inside_piece ∈ {0, 1}
+ "Speicher|Piece_0|lambda0": |-
+ Variable
+ --------
+ Speicher|Piece_0|lambda0 ∈ [0, 1]
+ "Speicher|Piece_0|lambda1": |-
+ Variable
+ --------
+ Speicher|Piece_0|lambda1 ∈ [0, 1]
+ "Speicher|Piece_1|inside_piece": |-
+ Variable
+ --------
+ Speicher|Piece_1|inside_piece ∈ {0, 1}
+ "Speicher|Piece_1|lambda0": |-
+ Variable
+ --------
+ Speicher|Piece_1|lambda0 ∈ [0, 1]
+ "Speicher|Piece_1|lambda1": |-
+ Variable
+ --------
+ Speicher|Piece_1|lambda1 ∈ [0, 1]
+ "Speicher->PE(periodic)": |-
+ Variable
+ --------
+ Speicher->PE(periodic) ∈ [-inf, inf]
+ "KWK(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "KWK(Q_fu)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK(Q_fu)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK(Q_fu)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK(Q_fu)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK(Q_fu)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK(Q_fu)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK(Q_fu)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK(Q_fu)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ KWK(Q_fu)|on_hours_total ∈ [0, inf]
+ "KWK(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ KWK(Q_fu)|total_flow_hours ∈ [0, inf]
+ "KWK(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: KWK(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: KWK(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: KWK(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: KWK(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: KWK(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: KWK(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: KWK(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: KWK(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "KWK(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ KWK(Q_th)|on_hours_total ∈ [0, inf]
+ "KWK(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ KWK(Q_th)|total_flow_hours ∈ [0, inf]
+ "KWK(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 60]
+ [2020-01-01 01:00:00]: KWK(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 60]
+ [2020-01-01 02:00:00]: KWK(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 60]
+ [2020-01-01 03:00:00]: KWK(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 60]
+ [2020-01-01 04:00:00]: KWK(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 60]
+ [2020-01-01 05:00:00]: KWK(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 60]
+ [2020-01-01 06:00:00]: KWK(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 60]
+ [2020-01-01 07:00:00]: KWK(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 60]
+ [2020-01-01 08:00:00]: KWK(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 60]
+ "KWK(P_el)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(P_el)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK(P_el)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK(P_el)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK(P_el)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK(P_el)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK(P_el)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK(P_el)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK(P_el)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK(P_el)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK(P_el)|on_hours_total": |-
+ Variable
+ --------
+ KWK(P_el)|on_hours_total ∈ [0, inf]
+ "KWK(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ KWK(P_el)|total_flow_hours ∈ [0, inf]
+ "KWK|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK|on_hours_total": |-
+ Variable
+ --------
+ KWK|on_hours_total ∈ [0, inf]
+ "KWK|switch|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|switch|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|switch|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|switch|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|switch|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|switch|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|switch|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|switch|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK|switch|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|switch|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|switch|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|switch|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|switch|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|switch|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|switch|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|switch|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: KWK->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: KWK->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: KWK->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: KWK->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: KWK->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: KWK->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: KWK->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: KWK->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Strom|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ ----------------------------
+ +1 costs(periodic) - 1 Kessel(Q_th)->costs(periodic) - 1 Speicher->costs(periodic) = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ ----------------------------
+ +1 costs(temporal) - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 9]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] - 1 Kessel->costs(temporal)[2020-01-01 00:00:00] - 1 KWK->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] - 1 Kessel->costs(temporal)[2020-01-01 01:00:00] - 1 KWK->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] - 1 Kessel->costs(temporal)[2020-01-01 02:00:00] - 1 KWK->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] - 1 Kessel->costs(temporal)[2020-01-01 03:00:00] - 1 KWK->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] - 1 Kessel->costs(temporal)[2020-01-01 04:00:00] - 1 KWK->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 05:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] - 1 Kessel->costs(temporal)[2020-01-01 05:00:00] - 1 KWK->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] - 1 Kessel->costs(temporal)[2020-01-01 06:00:00] - 1 KWK->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] - 1 Kessel->costs(temporal)[2020-01-01 07:00:00] - 1 KWK->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] - 1 Kessel->costs(temporal)[2020-01-01 08:00:00] - 1 KWK->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ costs: |-
+ Constraint `costs`
+ ------------------
+ +1 costs - 1 costs(temporal) - 1 costs(periodic) = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ --------------------------
+ +1 CO2(periodic) - 1 Speicher->CO2(periodic) = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ --------------------------
+ +1 CO2(temporal) - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 08:00:00] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ ----------------
+ +1 CO2 - 1 CO2(temporal) - 1 CO2(periodic) = -0.0
+ PE(periodic): |-
+ Constraint `PE(periodic)`
+ -------------------------
+ +1 PE(periodic) - 1 Kessel(Q_th)->PE(periodic) - 1 Speicher->PE(periodic) = -0.0
+ PE(temporal): |-
+ Constraint `PE(temporal)`
+ -------------------------
+ +1 PE(temporal) - 1 PE(temporal)|per_timestep[2020-01-01 00:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 01:00:00]... -1 PE(temporal)|per_timestep[2020-01-01 06:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 07:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "PE(temporal)|per_timestep": |-
+ Constraint `PE(temporal)|per_timestep`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ PE: |-
+ Constraint `PE`
+ ---------------
+ +1 PE - 1 PE(temporal) - 1 PE(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty = -0.0
+ "CO2(temporal)->costs(temporal)": |-
+ Constraint `CO2(temporal)->costs(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Constraint `Wärmelast(Q_th_Last)|total_flow_hours`
+ --------------------------------------------------
+ +1 Wärmelast(Q_th_Last)|total_flow_hours - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ ---------------------------------------------
+ +1 Gastarif(Q_Gas)|total_flow_hours - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ -----------------------------------------------
+ +1 Einspeisung(P_el)|total_flow_hours - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_fu)|on_hours_total": |-
+ Constraint `Kessel(Q_fu)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_fu)|on_hours_total - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00]... -1 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_fu)|flow_rate|ub": |-
+ Constraint `Kessel(Q_fu)|flow_rate|ub`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_fu)|flow_rate|lb": |-
+ Constraint `Kessel(Q_fu)|flow_rate|lb`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Constraint `Kessel(Q_fu)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_fu)|total_flow_hours - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)->costs(periodic)": |-
+ Constraint `Kessel(Q_th)->costs(periodic)`
+ ------------------------------------------
+ +1 Kessel(Q_th)->costs(periodic) - 10 Kessel(Q_th)|size = 1000.0
+ "Kessel(Q_th)->PE(periodic)": |-
+ Constraint `Kessel(Q_th)->PE(periodic)`
+ ---------------------------------------
+ +1 Kessel(Q_th)->PE(periodic) - 2 Kessel(Q_th)|size = -0.0
+ "Kessel(Q_th)|complementary": |-
+ Constraint `Kessel(Q_th)|complementary`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|off[2020-01-01 00:00:00] = 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|off[2020-01-01 01:00:00] = 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|off[2020-01-01 02:00:00] = 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|off[2020-01-01 03:00:00] = 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|off[2020-01-01 04:00:00] = 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|off[2020-01-01 05:00:00] = 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|off[2020-01-01 06:00:00] = 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|off[2020-01-01 07:00:00] = 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|off[2020-01-01 08:00:00] = 1.0
+ "Kessel(Q_th)|on_hours_total": |-
+ Constraint `Kessel(Q_th)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_th)|on_hours_total - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00]... -1 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|switch|transition": |-
+ Constraint `Kessel(Q_th)|switch|transition`
+ [time: 8]:
+ ------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 08:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|on[2020-01-01 07:00:00] = -0.0
+ "Kessel(Q_th)|switch|initial": |-
+ Constraint `Kessel(Q_th)|switch|initial`
+ ----------------------------------------
+ +1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] = -1.0
+ "Kessel(Q_th)|switch|mutex": |-
+ Constraint `Kessel(Q_th)|switch|mutex`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 08:00:00] ≤ 1.0
+ "Kessel(Q_th)|switch|count": |-
+ Constraint `Kessel(Q_th)|switch|count`
+ --------------------------------------
+ +1 Kessel(Q_th)|switch|count - 1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00]... -1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|consecutive_on_hours|ub": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|ub`
+ [time: 9]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 10 Kessel(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 10 Kessel(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 10 Kessel(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 10 Kessel(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 10 Kessel(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 10 Kessel(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 10 Kessel(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 10 Kessel(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 10 Kessel(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|consecutive_on_hours|forward": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|forward`
+ [time: 8]:
+ -----------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] ≤ 1.0
+ "Kessel(Q_th)|consecutive_on_hours|backward": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|backward`
+ [time: 8]:
+ ------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 10 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -9.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 10 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -9.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 10 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -9.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 10 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -9.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 10 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -9.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 10 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -9.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 10 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -9.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 10 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -9.0
+ "Kessel(Q_th)|consecutive_on_hours|initial": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|initial`
+ ------------------------------------------------------
+ +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 2 Kessel(Q_th)|on[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_th)|consecutive_on_hours|lb": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|lb`
+ [time: 9]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_th)|consecutive_off_hours|ub": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|ub`
+ [time: 9]:
+ -------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 9 Kessel(Q_th)|off[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 9 Kessel(Q_th)|off[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 9 Kessel(Q_th)|off[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 9 Kessel(Q_th)|off[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 9 Kessel(Q_th)|off[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 9 Kessel(Q_th)|off[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 9 Kessel(Q_th)|off[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 9 Kessel(Q_th)|off[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 9 Kessel(Q_th)|off[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|consecutive_off_hours|forward": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|forward`
+ [time: 8]:
+ ------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] ≤ 1.0
+ "Kessel(Q_th)|consecutive_off_hours|backward": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|backward`
+ [time: 8]:
+ -------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 9 Kessel(Q_th)|off[2020-01-01 01:00:00] ≥ -8.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 9 Kessel(Q_th)|off[2020-01-01 02:00:00] ≥ -8.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 9 Kessel(Q_th)|off[2020-01-01 03:00:00] ≥ -8.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 9 Kessel(Q_th)|off[2020-01-01 04:00:00] ≥ -8.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 9 Kessel(Q_th)|off[2020-01-01 05:00:00] ≥ -8.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 9 Kessel(Q_th)|off[2020-01-01 06:00:00] ≥ -8.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 9 Kessel(Q_th)|off[2020-01-01 07:00:00] ≥ -8.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 9 Kessel(Q_th)|off[2020-01-01 08:00:00] ≥ -8.0
+ "Kessel(Q_th)|consecutive_off_hours|initial": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|initial`
+ -------------------------------------------------------
+ +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 1 Kessel(Q_th)|off[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_th)->costs(temporal)": |-
+ Constraint `Kessel(Q_th)->costs(temporal)`
+ [time: 9]:
+ -----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|flow_rate|lb2": |-
+ Constraint `Kessel(Q_th)|flow_rate|lb2`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 5 Kessel(Q_th)|on[2020-01-01 00:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] - 5 Kessel(Q_th)|on[2020-01-01 01:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] - 5 Kessel(Q_th)|on[2020-01-01 02:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] - 5 Kessel(Q_th)|on[2020-01-01 03:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] - 5 Kessel(Q_th)|on[2020-01-01 04:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] - 5 Kessel(Q_th)|on[2020-01-01 05:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 5 Kessel(Q_th)|on[2020-01-01 06:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 5 Kessel(Q_th)|on[2020-01-01 07:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] - 5 Kessel(Q_th)|on[2020-01-01 08:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ "Kessel(Q_th)|flow_rate|ub2": |-
+ Constraint `Kessel(Q_th)|flow_rate|ub2`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ "Kessel(Q_th)|flow_rate|ub1": |-
+ Constraint `Kessel(Q_th)|flow_rate|ub1`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +50 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +50 Kessel(Q_th)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +50 Kessel(Q_th)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +50 Kessel(Q_th)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +50 Kessel(Q_th)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +50 Kessel(Q_th)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +50 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +50 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +50 Kessel(Q_th)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_th)|flow_rate|lb1": |-
+ Constraint `Kessel(Q_th)|flow_rate|lb1`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +5 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +5 Kessel(Q_th)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +5 Kessel(Q_th)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +5 Kessel(Q_th)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +5 Kessel(Q_th)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +5 Kessel(Q_th)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +5 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +5 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +5 Kessel(Q_th)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|total_flow_hours": |-
+ Constraint `Kessel(Q_th)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|load_factor_max": |-
+ Constraint `Kessel(Q_th)|load_factor_max`
+ -----------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 9 Kessel(Q_th)|size ≤ -0.0
+ "Kessel(Q_th)|load_factor_min": |-
+ Constraint `Kessel(Q_th)|load_factor_min`
+ -----------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 0.9 Kessel(Q_th)|size ≥ -0.0
+ "Kessel|on|ub": |-
+ Constraint `Kessel|on|ub`
+ [time: 9]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 Kessel|on[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 02:00:00]: +1 Kessel|on[2020-01-01 02:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] ≤ 1e-05
+ [2020-01-01 03:00:00]: +1 Kessel|on[2020-01-01 03:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] ≤ 1e-05
+ [2020-01-01 04:00:00]: +1 Kessel|on[2020-01-01 04:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] ≤ 1e-05
+ [2020-01-01 05:00:00]: +1 Kessel|on[2020-01-01 05:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] ≤ 1e-05
+ [2020-01-01 06:00:00]: +1 Kessel|on[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] ≤ 1e-05
+ [2020-01-01 07:00:00]: +1 Kessel|on[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] ≤ 1e-05
+ [2020-01-01 08:00:00]: +1 Kessel|on[2020-01-01 08:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] ≤ 1e-05
+ "Kessel|on|lb": |-
+ Constraint `Kessel|on|lb`
+ [time: 9]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel|on[2020-01-01 00:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel|on[2020-01-01 01:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 01:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel|on[2020-01-01 02:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 02:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel|on[2020-01-01 03:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 03:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel|on[2020-01-01 04:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 04:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel|on[2020-01-01 05:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 05:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel|on[2020-01-01 06:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel|on[2020-01-01 07:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel|on[2020-01-01 08:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 08:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel|on_hours_total": |-
+ Constraint `Kessel|on_hours_total`
+ ----------------------------------
+ +1 Kessel|on_hours_total - 1 Kessel|on[2020-01-01 00:00:00] - 1 Kessel|on[2020-01-01 01:00:00]... -1 Kessel|on[2020-01-01 06:00:00] - 1 Kessel|on[2020-01-01 07:00:00] - 1 Kessel|on[2020-01-01 08:00:00] = -0.0
+ "Kessel->costs(temporal)": |-
+ Constraint `Kessel->costs(temporal)`
+ [time: 9]:
+ -----------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ "Kessel->CO2(temporal)": |-
+ Constraint `Kessel->CO2(temporal)`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 00:00:00] - 1000 Kessel|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 01:00:00] - 1000 Kessel|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 02:00:00] - 1000 Kessel|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 03:00:00] - 1000 Kessel|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 04:00:00] - 1000 Kessel|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 05:00:00] - 1000 Kessel|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 06:00:00] - 1000 Kessel|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 07:00:00] - 1000 Kessel|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 08:00:00] - 1000 Kessel|on[2020-01-01 08:00:00] = -0.0
+ "Kessel|conversion_0": |-
+ Constraint `Kessel|conversion_0`
+ [time: 9]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Constraint `Speicher(Q_th_load)|on_hours_total`
+ -----------------------------------------------
+ +1 Speicher(Q_th_load)|on_hours_total - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|ub`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_load)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|lb`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_load)|total_flow_hours`
+ -------------------------------------------------
+ +1 Speicher(Q_th_load)|total_flow_hours - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Constraint `Speicher(Q_th_unload)|on_hours_total`
+ -------------------------------------------------
+ +1 Speicher(Q_th_unload)|on_hours_total - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_unload)|total_flow_hours`
+ ---------------------------------------------------
+ +1 Speicher(Q_th_unload)|total_flow_hours - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|prevent_simultaneous_use": |-
+ Constraint `Speicher|prevent_simultaneous_use`
+ [time: 9]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ 1.0
+ "Speicher|netto_discharge": |-
+ Constraint `Speicher|netto_discharge`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|netto_discharge[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|netto_discharge[2020-01-01 01:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|netto_discharge[2020-01-01 02:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|netto_discharge[2020-01-01 03:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|netto_discharge[2020-01-01 04:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|netto_discharge[2020-01-01 05:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|netto_discharge[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|netto_discharge[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|netto_discharge[2020-01-01 08:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|charge_state": |-
+ Constraint `Speicher|charge_state`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 0.92 Speicher|charge_state[2020-01-01 00:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 0.92 Speicher|charge_state[2020-01-01 01:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 0.92 Speicher|charge_state[2020-01-01 02:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 0.92 Speicher|charge_state[2020-01-01 03:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 0.92 Speicher|charge_state[2020-01-01 04:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 0.92 Speicher|charge_state[2020-01-01 05:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 0.92 Speicher|charge_state[2020-01-01 06:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 0.92 Speicher|charge_state[2020-01-01 07:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 0.92 Speicher|charge_state[2020-01-01 08:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher->costs(periodic)": |-
+ Constraint `Speicher->costs(periodic)`
+ --------------------------------------
+ +1 Speicher->costs(periodic) - 0.01 Speicher|size - 1 Speicher|PiecewiseEffects|costs = -0.0
+ "Speicher->CO2(periodic)": |-
+ Constraint `Speicher->CO2(periodic)`
+ ------------------------------------
+ +1 Speicher->CO2(periodic) - 0.01 Speicher|size = -0.0
+ "Speicher|Piece_0|inside_piece": |-
+ Constraint `Speicher|Piece_0|inside_piece`
+ ------------------------------------------
+ +1 Speicher|Piece_0|inside_piece - 1 Speicher|Piece_0|lambda0 - 1 Speicher|Piece_0|lambda1 = -0.0
+ "Speicher|Piece_1|inside_piece": |-
+ Constraint `Speicher|Piece_1|inside_piece`
+ ------------------------------------------
+ +1 Speicher|Piece_1|inside_piece - 1 Speicher|Piece_1|lambda0 - 1 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|size|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|size|lambda`
+ -----------------------------------------------------------
+ +1 Speicher|size - 5 Speicher|Piece_0|lambda0 - 25 Speicher|Piece_0|lambda1 - 25 Speicher|Piece_1|lambda0 - 100 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|size|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|size|single_segment`
+ -------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|lambda`
+ -----------------------------------------------------------------------------
+ +1 Speicher|PiecewiseEffects|costs - 50 Speicher|Piece_0|lambda0 - 250 Speicher|Piece_0|lambda1 - 250 Speicher|Piece_1|lambda0 - 800 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|single_segment`
+ -------------------------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|lambda`
+ --------------------------------------------------------------------------
+ +1 Speicher|PiecewiseEffects|PE - 5 Speicher|Piece_0|lambda0 - 25 Speicher|Piece_0|lambda1 - 25 Speicher|Piece_1|lambda0 - 100 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|single_segment`
+ ----------------------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher->PE(periodic)": |-
+ Constraint `Speicher->PE(periodic)`
+ -----------------------------------
+ +1 Speicher->PE(periodic) - 1 Speicher|PiecewiseEffects|PE = -0.0
+ "Speicher|charge_state|ub": |-
+ Constraint `Speicher|charge_state|ub`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 1 Speicher|size ≤ -0.0
+ "Speicher|charge_state|lb": |-
+ Constraint `Speicher|charge_state|lb`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] ≥ -0.0
+ "Speicher|initial_charge_state": |-
+ Constraint `Speicher|initial_charge_state`
+ ------------------------------------------
+ +1 Speicher|charge_state[2020-01-01 00:00:00] = -0.0
+ "Speicher|final_charge_max": |-
+ Constraint `Speicher|final_charge_max`
+ --------------------------------------
+ +1 Speicher|charge_state[2020-01-01 09:00:00] ≤ 10.0
+ "KWK(Q_fu)|on_hours_total": |-
+ Constraint `KWK(Q_fu)|on_hours_total`
+ -------------------------------------
+ +1 KWK(Q_fu)|on_hours_total - 1 KWK(Q_fu)|on[2020-01-01 00:00:00] - 1 KWK(Q_fu)|on[2020-01-01 01:00:00]... -1 KWK(Q_fu)|on[2020-01-01 06:00:00] - 1 KWK(Q_fu)|on[2020-01-01 07:00:00] - 1 KWK(Q_fu)|on[2020-01-01 08:00:00] = -0.0
+ "KWK(Q_fu)|flow_rate|ub": |-
+ Constraint `KWK(Q_fu)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1000 KWK(Q_fu)|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK(Q_fu)|flow_rate|lb": |-
+ Constraint `KWK(Q_fu)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK(Q_fu)|total_flow_hours": |-
+ Constraint `KWK(Q_fu)|total_flow_hours`
+ ---------------------------------------
+ +1 KWK(Q_fu)|total_flow_hours - 1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK(Q_th)|on_hours_total": |-
+ Constraint `KWK(Q_th)|on_hours_total`
+ -------------------------------------
+ +1 KWK(Q_th)|on_hours_total - 1 KWK(Q_th)|on[2020-01-01 00:00:00] - 1 KWK(Q_th)|on[2020-01-01 01:00:00]... -1 KWK(Q_th)|on[2020-01-01 06:00:00] - 1 KWK(Q_th)|on[2020-01-01 07:00:00] - 1 KWK(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "KWK(Q_th)|flow_rate|ub": |-
+ Constraint `KWK(Q_th)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 1000 KWK(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00] - 1000 KWK(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00] - 1000 KWK(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00] - 1000 KWK(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00] - 1000 KWK(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00] - 1000 KWK(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 1000 KWK(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 1000 KWK(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] - 1000 KWK(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK(Q_th)|flow_rate|lb": |-
+ Constraint `KWK(Q_th)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK(Q_th)|total_flow_hours": |-
+ Constraint `KWK(Q_th)|total_flow_hours`
+ ---------------------------------------
+ +1 KWK(Q_th)|total_flow_hours - 1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK(P_el)|on_hours_total": |-
+ Constraint `KWK(P_el)|on_hours_total`
+ -------------------------------------
+ +1 KWK(P_el)|on_hours_total - 1 KWK(P_el)|on[2020-01-01 00:00:00] - 1 KWK(P_el)|on[2020-01-01 01:00:00]... -1 KWK(P_el)|on[2020-01-01 06:00:00] - 1 KWK(P_el)|on[2020-01-01 07:00:00] - 1 KWK(P_el)|on[2020-01-01 08:00:00] = -0.0
+ "KWK(P_el)|flow_rate|ub": |-
+ Constraint `KWK(P_el)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 60 KWK(P_el)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 60 KWK(P_el)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 60 KWK(P_el)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 60 KWK(P_el)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 60 KWK(P_el)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 60 KWK(P_el)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 60 KWK(P_el)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 60 KWK(P_el)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 60 KWK(P_el)|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK(P_el)|flow_rate|lb": |-
+ Constraint `KWK(P_el)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 5 KWK(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 5 KWK(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 5 KWK(P_el)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 5 KWK(P_el)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 5 KWK(P_el)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 5 KWK(P_el)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 5 KWK(P_el)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 5 KWK(P_el)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 5 KWK(P_el)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK(P_el)|total_flow_hours": |-
+ Constraint `KWK(P_el)|total_flow_hours`
+ ---------------------------------------
+ +1 KWK(P_el)|total_flow_hours - 1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 01:00:00]... -1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK|on|ub": |-
+ Constraint `KWK|on|ub`
+ [time: 9]:
+ ---------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|on[2020-01-01 00:00:00] - 1 KWK(Q_fu)|on[2020-01-01 00:00:00] - 1 KWK(Q_th)|on[2020-01-01 00:00:00] - 1 KWK(P_el)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 KWK|on[2020-01-01 01:00:00] - 1 KWK(Q_fu)|on[2020-01-01 01:00:00] - 1 KWK(Q_th)|on[2020-01-01 01:00:00] - 1 KWK(P_el)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 02:00:00]: +1 KWK|on[2020-01-01 02:00:00] - 1 KWK(Q_fu)|on[2020-01-01 02:00:00] - 1 KWK(Q_th)|on[2020-01-01 02:00:00] - 1 KWK(P_el)|on[2020-01-01 02:00:00] ≤ 1e-05
+ [2020-01-01 03:00:00]: +1 KWK|on[2020-01-01 03:00:00] - 1 KWK(Q_fu)|on[2020-01-01 03:00:00] - 1 KWK(Q_th)|on[2020-01-01 03:00:00] - 1 KWK(P_el)|on[2020-01-01 03:00:00] ≤ 1e-05
+ [2020-01-01 04:00:00]: +1 KWK|on[2020-01-01 04:00:00] - 1 KWK(Q_fu)|on[2020-01-01 04:00:00] - 1 KWK(Q_th)|on[2020-01-01 04:00:00] - 1 KWK(P_el)|on[2020-01-01 04:00:00] ≤ 1e-05
+ [2020-01-01 05:00:00]: +1 KWK|on[2020-01-01 05:00:00] - 1 KWK(Q_fu)|on[2020-01-01 05:00:00] - 1 KWK(Q_th)|on[2020-01-01 05:00:00] - 1 KWK(P_el)|on[2020-01-01 05:00:00] ≤ 1e-05
+ [2020-01-01 06:00:00]: +1 KWK|on[2020-01-01 06:00:00] - 1 KWK(Q_fu)|on[2020-01-01 06:00:00] - 1 KWK(Q_th)|on[2020-01-01 06:00:00] - 1 KWK(P_el)|on[2020-01-01 06:00:00] ≤ 1e-05
+ [2020-01-01 07:00:00]: +1 KWK|on[2020-01-01 07:00:00] - 1 KWK(Q_fu)|on[2020-01-01 07:00:00] - 1 KWK(Q_th)|on[2020-01-01 07:00:00] - 1 KWK(P_el)|on[2020-01-01 07:00:00] ≤ 1e-05
+ [2020-01-01 08:00:00]: +1 KWK|on[2020-01-01 08:00:00] - 1 KWK(Q_fu)|on[2020-01-01 08:00:00] - 1 KWK(Q_th)|on[2020-01-01 08:00:00] - 1 KWK(P_el)|on[2020-01-01 08:00:00] ≤ 1e-05
+ "KWK|on|lb": |-
+ Constraint `KWK|on|lb`
+ [time: 9]:
+ ---------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|on[2020-01-01 00:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 00:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 00:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK|on[2020-01-01 01:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 01:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 01:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK|on[2020-01-01 02:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 02:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 02:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK|on[2020-01-01 03:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 03:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 03:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK|on[2020-01-01 04:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 04:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 04:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK|on[2020-01-01 05:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 05:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 05:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK|on[2020-01-01 06:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 06:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 06:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK|on[2020-01-01 07:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 07:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 07:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK|on[2020-01-01 08:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 08:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 08:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK|on_hours_total": |-
+ Constraint `KWK|on_hours_total`
+ -------------------------------
+ +1 KWK|on_hours_total - 1 KWK|on[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 01:00:00]... -1 KWK|on[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 08:00:00] = -0.0
+ "KWK|switch|transition": |-
+ Constraint `KWK|switch|transition`
+ [time: 8]:
+ ---------------------------------------------
+ [2020-01-01 01:00:00]: +1 KWK|switch|on[2020-01-01 01:00:00] - 1 KWK|switch|off[2020-01-01 01:00:00] - 1 KWK|on[2020-01-01 01:00:00] + 1 KWK|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK|switch|on[2020-01-01 02:00:00] - 1 KWK|switch|off[2020-01-01 02:00:00] - 1 KWK|on[2020-01-01 02:00:00] + 1 KWK|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK|switch|on[2020-01-01 03:00:00] - 1 KWK|switch|off[2020-01-01 03:00:00] - 1 KWK|on[2020-01-01 03:00:00] + 1 KWK|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK|switch|on[2020-01-01 04:00:00] - 1 KWK|switch|off[2020-01-01 04:00:00] - 1 KWK|on[2020-01-01 04:00:00] + 1 KWK|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK|switch|on[2020-01-01 05:00:00] - 1 KWK|switch|off[2020-01-01 05:00:00] - 1 KWK|on[2020-01-01 05:00:00] + 1 KWK|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK|switch|on[2020-01-01 06:00:00] - 1 KWK|switch|off[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 06:00:00] + 1 KWK|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK|switch|on[2020-01-01 07:00:00] - 1 KWK|switch|off[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 07:00:00] + 1 KWK|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK|switch|on[2020-01-01 08:00:00] - 1 KWK|switch|off[2020-01-01 08:00:00] - 1 KWK|on[2020-01-01 08:00:00] + 1 KWK|on[2020-01-01 07:00:00] = -0.0
+ "KWK|switch|initial": |-
+ Constraint `KWK|switch|initial`
+ -------------------------------
+ +1 KWK|switch|on[2020-01-01 00:00:00] - 1 KWK|switch|off[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 00:00:00] = -1.0
+ "KWK|switch|mutex": |-
+ Constraint `KWK|switch|mutex`
+ [time: 9]:
+ ----------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|switch|on[2020-01-01 00:00:00] + 1 KWK|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 KWK|switch|on[2020-01-01 01:00:00] + 1 KWK|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 KWK|switch|on[2020-01-01 02:00:00] + 1 KWK|switch|off[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 KWK|switch|on[2020-01-01 03:00:00] + 1 KWK|switch|off[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 KWK|switch|on[2020-01-01 04:00:00] + 1 KWK|switch|off[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 KWK|switch|on[2020-01-01 05:00:00] + 1 KWK|switch|off[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 KWK|switch|on[2020-01-01 06:00:00] + 1 KWK|switch|off[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 KWK|switch|on[2020-01-01 07:00:00] + 1 KWK|switch|off[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 KWK|switch|on[2020-01-01 08:00:00] + 1 KWK|switch|off[2020-01-01 08:00:00] ≤ 1.0
+ "KWK->costs(temporal)": |-
+ Constraint `KWK->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK->costs(temporal)[2020-01-01 00:00:00] - 0.01 KWK|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK->costs(temporal)[2020-01-01 01:00:00] - 0.01 KWK|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK->costs(temporal)[2020-01-01 02:00:00] - 0.01 KWK|switch|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK->costs(temporal)[2020-01-01 03:00:00] - 0.01 KWK|switch|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK->costs(temporal)[2020-01-01 04:00:00] - 0.01 KWK|switch|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK->costs(temporal)[2020-01-01 05:00:00] - 0.01 KWK|switch|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK->costs(temporal)[2020-01-01 06:00:00] - 0.01 KWK|switch|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK->costs(temporal)[2020-01-01 07:00:00] - 0.01 KWK|switch|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK->costs(temporal)[2020-01-01 08:00:00] - 0.01 KWK|switch|on[2020-01-01 08:00:00] = -0.0
+ "KWK|conversion_0": |-
+ Constraint `KWK|conversion_0`
+ [time: 9]:
+ ----------------------------------------
+ [2020-01-01 00:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK|conversion_1": |-
+ Constraint `KWK|conversion_1`
+ [time: 9]:
+ ----------------------------------------
+ [2020-01-01 00:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.4 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 9]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] + 1 Strom|excess_input[2020-01-01 00:00:00] - 1 Strom|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] + 1 Strom|excess_input[2020-01-01 01:00:00] - 1 Strom|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] + 1 Strom|excess_input[2020-01-01 02:00:00] - 1 Strom|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] + 1 Strom|excess_input[2020-01-01 03:00:00] - 1 Strom|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] + 1 Strom|excess_input[2020-01-01 04:00:00] - 1 Strom|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] + 1 Strom|excess_input[2020-01-01 05:00:00] - 1 Strom|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] + 1 Strom|excess_input[2020-01-01 06:00:00] - 1 Strom|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] + 1 Strom|excess_input[2020-01-01 07:00:00] - 1 Strom|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] + 1 Strom|excess_input[2020-01-01 08:00:00] - 1 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 1e+05 Strom|excess_input[2020-01-01 00:00:00] - 1e+05 Strom|excess_input[2020-01-01 01:00:00]... -1e+05 Strom|excess_output[2020-01-01 06:00:00] - 1e+05 Strom|excess_output[2020-01-01 07:00:00] - 1e+05 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 9]:
+ -----------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Fernwärme|excess_input[2020-01-01 00:00:00] - 1 Fernwärme|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Fernwärme|excess_input[2020-01-01 01:00:00] - 1 Fernwärme|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Fernwärme|excess_input[2020-01-01 02:00:00] - 1 Fernwärme|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Fernwärme|excess_input[2020-01-01 03:00:00] - 1 Fernwärme|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Fernwärme|excess_input[2020-01-01 04:00:00] - 1 Fernwärme|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Fernwärme|excess_input[2020-01-01 05:00:00] - 1 Fernwärme|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Fernwärme|excess_input[2020-01-01 06:00:00] - 1 Fernwärme|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Fernwärme|excess_input[2020-01-01 07:00:00] - 1 Fernwärme|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Fernwärme|excess_input[2020-01-01 08:00:00] - 1 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00] - 1e+05 Fernwärme|excess_input[2020-01-01 01:00:00]... -1e+05 Fernwärme|excess_output[2020-01-01 06:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 07:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] + 1 Gas|excess_input[2020-01-01 02:00:00] - 1 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] + 1 Gas|excess_input[2020-01-01 03:00:00] - 1 Gas|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] + 1 Gas|excess_input[2020-01-01 04:00:00] - 1 Gas|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] + 1 Gas|excess_input[2020-01-01 05:00:00] - 1 Gas|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] + 1 Gas|excess_input[2020-01-01 06:00:00] - 1 Gas|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] + 1 Gas|excess_input[2020-01-01 07:00:00] - 1 Gas|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] + 1 Gas|excess_input[2020-01-01 08:00:00] - 1 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00] - 1e+05 Gas|excess_input[2020-01-01 01:00:00]... -1e+05 Gas|excess_output[2020-01-01 06:00:00] - 1e+05 Gas|excess_output[2020-01-01 07:00:00] - 1e+05 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+binaries:
+ - "Kessel(Q_fu)|on"
+ - "Kessel(Q_th)|on"
+ - "Kessel(Q_th)|off"
+ - "Kessel(Q_th)|switch|on"
+ - "Kessel(Q_th)|switch|off"
+ - "Kessel|on"
+ - "Speicher(Q_th_load)|on"
+ - "Speicher(Q_th_unload)|on"
+ - "Speicher|Piece_0|inside_piece"
+ - "Speicher|Piece_1|inside_piece"
+ - "KWK(Q_fu)|on"
+ - "KWK(Q_th)|on"
+ - "KWK(P_el)|on"
+ - "KWK|on"
+ - "KWK|switch|on"
+ - "KWK|switch|off"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - PE(periodic)
+ - PE(temporal)
+ - "PE(temporal)|per_timestep"
+ - PE
+ - Penalty
+ - "CO2(temporal)->costs(temporal)"
+ - "Wärmelast(Q_th_Last)|flow_rate"
+ - "Wärmelast(Q_th_Last)|total_flow_hours"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "Kessel(Q_fu)|flow_rate"
+ - "Kessel(Q_fu)|on_hours_total"
+ - "Kessel(Q_fu)|total_flow_hours"
+ - "Kessel(Q_th)|flow_rate"
+ - "Kessel(Q_th)|size"
+ - "Kessel(Q_th)->costs(periodic)"
+ - "Kessel(Q_th)->PE(periodic)"
+ - "Kessel(Q_th)|on_hours_total"
+ - "Kessel(Q_th)|switch|count"
+ - "Kessel(Q_th)|consecutive_on_hours"
+ - "Kessel(Q_th)|consecutive_off_hours"
+ - "Kessel(Q_th)->costs(temporal)"
+ - "Kessel(Q_th)|total_flow_hours"
+ - "Kessel|on_hours_total"
+ - "Kessel->costs(temporal)"
+ - "Kessel->CO2(temporal)"
+ - "Speicher(Q_th_load)|flow_rate"
+ - "Speicher(Q_th_load)|on_hours_total"
+ - "Speicher(Q_th_load)|total_flow_hours"
+ - "Speicher(Q_th_unload)|flow_rate"
+ - "Speicher(Q_th_unload)|on_hours_total"
+ - "Speicher(Q_th_unload)|total_flow_hours"
+ - "Speicher|charge_state"
+ - "Speicher|netto_discharge"
+ - "Speicher|size"
+ - "Speicher->costs(periodic)"
+ - "Speicher->CO2(periodic)"
+ - "Speicher|PiecewiseEffects|costs"
+ - "Speicher|PiecewiseEffects|PE"
+ - "Speicher|Piece_0|lambda0"
+ - "Speicher|Piece_0|lambda1"
+ - "Speicher|Piece_1|lambda0"
+ - "Speicher|Piece_1|lambda1"
+ - "Speicher->PE(periodic)"
+ - "KWK(Q_fu)|flow_rate"
+ - "KWK(Q_fu)|on_hours_total"
+ - "KWK(Q_fu)|total_flow_hours"
+ - "KWK(Q_th)|flow_rate"
+ - "KWK(Q_th)|on_hours_total"
+ - "KWK(Q_th)|total_flow_hours"
+ - "KWK(P_el)|flow_rate"
+ - "KWK(P_el)|on_hours_total"
+ - "KWK(P_el)|total_flow_hours"
+ - "KWK|on_hours_total"
+ - "KWK->costs(temporal)"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/io_flow_system_base--solution.nc4 b/tests/ressources/v4-api/io_flow_system_base--solution.nc4
new file mode 100644
index 000000000..6137859bc
Binary files /dev/null and b/tests/ressources/v4-api/io_flow_system_base--solution.nc4 differ
diff --git a/tests/ressources/v4-api/io_flow_system_base--summary.yaml b/tests/ressources/v4-api/io_flow_system_base--summary.yaml
new file mode 100644
index 000000000..cb5ecf49c
--- /dev/null
+++ b/tests/ressources/v4-api/io_flow_system_base--summary.yaml
@@ -0,0 +1,56 @@
+Name: io_flow_system_base
+Number of timesteps: 9
+Calculation Type: FullCalculation
+Constraints: 536
+Variables: 454
+Main Results:
+ Objective: -11597.87
+ Penalty: 0.0
+ Effects:
+ CO2 [kg]:
+ temporal: 1293.19
+ periodic: 1.0
+ total: 1294.19
+ costs [€]:
+ temporal: -13898.87
+ periodic: 2301.0
+ total: -11597.87
+ PE [kWh_PE]:
+ temporal: -0.0
+ periodic: 200.0
+ total: 200.0
+ Invest-Decisions:
+ Invested:
+ Kessel(Q_th): 50.0
+ Speicher: 100.0
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.98
+ solving: 1.63
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/io_flow_system_long--flow_system.nc4 b/tests/ressources/v4-api/io_flow_system_long--flow_system.nc4
new file mode 100644
index 000000000..12d5400da
Binary files /dev/null and b/tests/ressources/v4-api/io_flow_system_long--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/io_flow_system_long--model_documentation.yaml b/tests/ressources/v4-api/io_flow_system_long--model_documentation.yaml
new file mode 100644
index 000000000..c04ba651a
--- /dev/null
+++ b/tests/ressources/v4-api/io_flow_system_long--model_documentation.yaml
@@ -0,0 +1,1978 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 costs + 1 Penalty
+ Sense: min
+ Value: 343613.2950319929
+termination_condition: optimal
+status: ok
+nvars: 13283
+nvarsbin: 3168
+nvarscont: 10115
+ncons: 11557
+variables:
+ costs(periodic): |-
+ Variable
+ --------
+ costs(periodic) ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable
+ --------
+ costs(temporal) ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: costs(temporal)|per_timestep[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: costs(temporal)|per_timestep[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: costs(temporal)|per_timestep[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: costs(temporal)|per_timestep[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: costs(temporal)|per_timestep[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: costs(temporal)|per_timestep[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: costs(temporal)|per_timestep[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: costs(temporal)|per_timestep[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: costs(temporal)|per_timestep[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: costs(temporal)|per_timestep[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: costs(temporal)|per_timestep[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: costs(temporal)|per_timestep[2020-01-03 23:45:00] ∈ [-inf, inf]
+ costs: |-
+ Variable
+ --------
+ costs ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable
+ --------
+ CO2(periodic) ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable
+ --------
+ CO2(temporal) ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: CO2(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: CO2(temporal)|per_timestep[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: CO2(temporal)|per_timestep[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: CO2(temporal)|per_timestep[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: CO2(temporal)|per_timestep[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: CO2(temporal)|per_timestep[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: CO2(temporal)|per_timestep[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: CO2(temporal)|per_timestep[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: CO2(temporal)|per_timestep[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: CO2(temporal)|per_timestep[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: CO2(temporal)|per_timestep[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: CO2(temporal)|per_timestep[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: CO2(temporal)|per_timestep[2020-01-03 23:45:00] ∈ [-inf, inf]
+ CO2: |-
+ Variable
+ --------
+ CO2 ∈ [-inf, inf]
+ PE(periodic): |-
+ Variable
+ --------
+ PE(periodic) ∈ [-inf, inf]
+ PE(temporal): |-
+ Variable
+ --------
+ PE(temporal) ∈ [-inf, inf]
+ "PE(temporal)|per_timestep": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: PE(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: PE(temporal)|per_timestep[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: PE(temporal)|per_timestep[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: PE(temporal)|per_timestep[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: PE(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: PE(temporal)|per_timestep[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: PE(temporal)|per_timestep[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: PE(temporal)|per_timestep[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: PE(temporal)|per_timestep[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: PE(temporal)|per_timestep[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: PE(temporal)|per_timestep[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: PE(temporal)|per_timestep[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: PE(temporal)|per_timestep[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: PE(temporal)|per_timestep[2020-01-03 23:45:00] ∈ [-inf, inf]
+ PE: |-
+ Variable
+ --------
+ PE ∈ [-inf, inf]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "Wärmelast(Q_th_Last)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] ∈ [127.1, 127.1]
+ [2020-01-01 00:15:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:15:00] ∈ [122.2, 122.2]
+ [2020-01-01 00:30:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:30:00] ∈ [124.4, 124.4]
+ [2020-01-01 00:45:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:45:00] ∈ [127.7, 127.7]
+ [2020-01-01 01:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00] ∈ [130.7, 130.7]
+ [2020-01-01 01:15:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:15:00] ∈ [132.2, 132.2]
+ [2020-01-01 01:30:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:30:00] ∈ [132.4, 132.4]
+ ...
+ [2020-01-03 22:15:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 22:15:00] ∈ [168.9, 168.9]
+ [2020-01-03 22:30:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 22:30:00] ∈ [161.6, 161.6]
+ [2020-01-03 22:45:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 22:45:00] ∈ [157, 157]
+ [2020-01-03 23:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:00:00] ∈ [149.8, 149.8]
+ [2020-01-03 23:15:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:15:00] ∈ [146, 146]
+ [2020-01-03 23:30:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:30:00] ∈ [144.8, 144.8]
+ [2020-01-03 23:45:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:45:00] ∈ [143.5, 143.5]
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Wärmelast(Q_th_Last)|total_flow_hours ∈ [0, inf]
+ "Stromlast(P_el_Last)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 00:00:00] ∈ [58.39, 58.39]
+ [2020-01-01 00:15:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 00:15:00] ∈ [58.36, 58.36]
+ [2020-01-01 00:30:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 00:30:00] ∈ [58.11, 58.11]
+ [2020-01-01 00:45:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 00:45:00] ∈ [57.71, 57.71]
+ [2020-01-01 01:00:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 01:00:00] ∈ [55.53, 55.53]
+ [2020-01-01 01:15:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 01:15:00] ∈ [56.24, 56.24]
+ [2020-01-01 01:30:00]: Stromlast(P_el_Last)|flow_rate[2020-01-01 01:30:00] ∈ [55.17, 55.17]
+ ...
+ [2020-01-03 22:15:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 22:15:00] ∈ [102.2, 102.2]
+ [2020-01-03 22:30:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 22:30:00] ∈ [100, 100]
+ [2020-01-03 22:45:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 22:45:00] ∈ [96.9, 96.9]
+ [2020-01-03 23:00:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 23:00:00] ∈ [89.83, 89.83]
+ [2020-01-03 23:15:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 23:15:00] ∈ [91.91, 91.91]
+ [2020-01-03 23:30:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 23:30:00] ∈ [88.18, 88.18]
+ [2020-01-03 23:45:00]: Stromlast(P_el_Last)|flow_rate[2020-01-03 23:45:00] ∈ [85.54, 85.54]
+ "Stromlast(P_el_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Stromlast(P_el_Last)|total_flow_hours ∈ [0, inf]
+ "Kohletarif(Q_Kohle)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 00:15:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1000]
+ [2020-01-01 00:30:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1000]
+ [2020-01-01 00:45:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 01:15:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1000]
+ [2020-01-01 01:30:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1000]
+ ...
+ [2020-01-03 22:15:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1000]
+ [2020-01-03 22:30:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1000]
+ [2020-01-03 22:45:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1000]
+ [2020-01-03 23:00:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1000]
+ [2020-01-03 23:15:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1000]
+ [2020-01-03 23:30:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1000]
+ [2020-01-03 23:45:00]: Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1000]
+ "Kohletarif(Q_Kohle)|total_flow_hours": |-
+ Variable
+ --------
+ Kohletarif(Q_Kohle)|total_flow_hours ∈ [0, inf]
+ "Kohletarif(Q_Kohle)->costs(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Kohletarif(Q_Kohle)->CO2(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 00:15:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1000]
+ [2020-01-01 00:30:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1000]
+ [2020-01-01 00:45:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 01:15:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1000]
+ [2020-01-01 01:30:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1000]
+ ...
+ [2020-01-03 22:15:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1000]
+ [2020-01-03 22:30:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1000]
+ [2020-01-03 22:45:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1000]
+ [2020-01-03 23:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1000]
+ [2020-01-03 23:15:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1000]
+ [2020-01-03 23:30:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1000]
+ [2020-01-03 23:45:00]: Gastarif(Q_Gas)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Gastarif(Q_Gas)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 00:15:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1000]
+ [2020-01-01 00:30:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1000]
+ [2020-01-01 00:45:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 01:15:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1000]
+ [2020-01-01 01:30:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1000]
+ ...
+ [2020-01-03 22:15:00]: Einspeisung(P_el)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1000]
+ [2020-01-03 22:30:00]: Einspeisung(P_el)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1000]
+ [2020-01-03 22:45:00]: Einspeisung(P_el)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1000]
+ [2020-01-03 23:00:00]: Einspeisung(P_el)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1000]
+ [2020-01-03 23:15:00]: Einspeisung(P_el)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1000]
+ [2020-01-03 23:30:00]: Einspeisung(P_el)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1000]
+ [2020-01-03 23:45:00]: Einspeisung(P_el)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1000]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Einspeisung(P_el)|total_flow_hours ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Einspeisung(P_el)->costs(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Stromtarif(P_el)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Stromtarif(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 00:15:00]: Stromtarif(P_el)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1000]
+ [2020-01-01 00:30:00]: Stromtarif(P_el)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1000]
+ [2020-01-01 00:45:00]: Stromtarif(P_el)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Stromtarif(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 01:15:00]: Stromtarif(P_el)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1000]
+ [2020-01-01 01:30:00]: Stromtarif(P_el)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1000]
+ ...
+ [2020-01-03 22:15:00]: Stromtarif(P_el)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1000]
+ [2020-01-03 22:30:00]: Stromtarif(P_el)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1000]
+ [2020-01-03 22:45:00]: Stromtarif(P_el)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1000]
+ [2020-01-03 23:00:00]: Stromtarif(P_el)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1000]
+ [2020-01-03 23:15:00]: Stromtarif(P_el)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1000]
+ [2020-01-03 23:30:00]: Stromtarif(P_el)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1000]
+ [2020-01-03 23:45:00]: Stromtarif(P_el)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1000]
+ "Stromtarif(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Stromtarif(P_el)|total_flow_hours ∈ [0, inf]
+ "Stromtarif(P_el)->costs(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Stromtarif(P_el)->costs(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Stromtarif(P_el)->costs(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Stromtarif(P_el)->CO2(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Kessel(Q_fu)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 95]
+ [2020-01-01 00:15:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:15:00] ∈ [0, 95]
+ [2020-01-01 00:30:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:30:00] ∈ [0, 95]
+ [2020-01-01 00:45:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:45:00] ∈ [0, 95]
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 95]
+ [2020-01-01 01:15:00]: Kessel(Q_fu)|flow_rate[2020-01-01 01:15:00] ∈ [0, 95]
+ [2020-01-01 01:30:00]: Kessel(Q_fu)|flow_rate[2020-01-01 01:30:00] ∈ [0, 95]
+ ...
+ [2020-01-03 22:15:00]: Kessel(Q_fu)|flow_rate[2020-01-03 22:15:00] ∈ [0, 95]
+ [2020-01-03 22:30:00]: Kessel(Q_fu)|flow_rate[2020-01-03 22:30:00] ∈ [0, 95]
+ [2020-01-03 22:45:00]: Kessel(Q_fu)|flow_rate[2020-01-03 22:45:00] ∈ [0, 95]
+ [2020-01-03 23:00:00]: Kessel(Q_fu)|flow_rate[2020-01-03 23:00:00] ∈ [0, 95]
+ [2020-01-03 23:15:00]: Kessel(Q_fu)|flow_rate[2020-01-03 23:15:00] ∈ [0, 95]
+ [2020-01-03 23:30:00]: Kessel(Q_fu)|flow_rate[2020-01-03 23:30:00] ∈ [0, 95]
+ [2020-01-03 23:45:00]: Kessel(Q_fu)|flow_rate[2020-01-03 23:45:00] ∈ [0, 95]
+ "Kessel(Q_fu)|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: Kessel(Q_fu)|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: Kessel(Q_fu)|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: Kessel(Q_fu)|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: Kessel(Q_fu)|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: Kessel(Q_fu)|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: Kessel(Q_fu)|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: Kessel(Q_fu)|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: Kessel(Q_fu)|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: Kessel(Q_fu)|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: Kessel(Q_fu)|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: Kessel(Q_fu)|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: Kessel(Q_fu)|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "Kessel(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_fu)|on_hours_total ∈ [0, inf]
+ "Kessel(Q_fu)|switch|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: Kessel(Q_fu)|switch|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: Kessel(Q_fu)|switch|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: Kessel(Q_fu)|switch|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: Kessel(Q_fu)|switch|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: Kessel(Q_fu)|switch|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: Kessel(Q_fu)|switch|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: Kessel(Q_fu)|switch|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: Kessel(Q_fu)|switch|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: Kessel(Q_fu)|switch|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: Kessel(Q_fu)|switch|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: Kessel(Q_fu)|switch|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: Kessel(Q_fu)|switch|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "Kessel(Q_fu)|switch|off": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: Kessel(Q_fu)|switch|off[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: Kessel(Q_fu)|switch|off[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: Kessel(Q_fu)|switch|off[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: Kessel(Q_fu)|switch|off[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: Kessel(Q_fu)|switch|off[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: Kessel(Q_fu)|switch|off[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: Kessel(Q_fu)|switch|off[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: Kessel(Q_fu)|switch|off[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: Kessel(Q_fu)|switch|off[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: Kessel(Q_fu)|switch|off[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: Kessel(Q_fu)|switch|off[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: Kessel(Q_fu)|switch|off[2020-01-03 23:45:00] ∈ {0, 1}
+ "Kessel(Q_fu)->costs(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Kessel(Q_fu)->costs(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Kessel(Q_fu)->costs(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_fu)|total_flow_hours ∈ [0, inf]
+ "Kessel(Q_th)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 00:15:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1e+07]
+ [2020-01-01 00:30:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1e+07]
+ [2020-01-01 00:45:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:15:00]: Kessel(Q_th)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1e+07]
+ [2020-01-01 01:30:00]: Kessel(Q_th)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1e+07]
+ ...
+ [2020-01-03 22:15:00]: Kessel(Q_th)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1e+07]
+ [2020-01-03 22:30:00]: Kessel(Q_th)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1e+07]
+ [2020-01-03 22:45:00]: Kessel(Q_th)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1e+07]
+ [2020-01-03 23:00:00]: Kessel(Q_th)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1e+07]
+ [2020-01-03 23:15:00]: Kessel(Q_th)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1e+07]
+ [2020-01-03 23:30:00]: Kessel(Q_th)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1e+07]
+ [2020-01-03 23:45:00]: Kessel(Q_th)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1e+07]
+ "Kessel(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_th)|total_flow_hours ∈ [0, inf]
+ "BHKW2(Q_fu)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 288]
+ [2020-01-01 00:15:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00] ∈ [0, 288]
+ [2020-01-01 00:30:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 00:30:00] ∈ [0, 288]
+ [2020-01-01 00:45:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 00:45:00] ∈ [0, 288]
+ [2020-01-01 01:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 288]
+ [2020-01-01 01:15:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 01:15:00] ∈ [0, 288]
+ [2020-01-01 01:30:00]: BHKW2(Q_fu)|flow_rate[2020-01-01 01:30:00] ∈ [0, 288]
+ ...
+ [2020-01-03 22:15:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 22:15:00] ∈ [0, 288]
+ [2020-01-03 22:30:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 22:30:00] ∈ [0, 288]
+ [2020-01-03 22:45:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 22:45:00] ∈ [0, 288]
+ [2020-01-03 23:00:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 23:00:00] ∈ [0, 288]
+ [2020-01-03 23:15:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] ∈ [0, 288]
+ [2020-01-03 23:30:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] ∈ [0, 288]
+ [2020-01-03 23:45:00]: BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] ∈ [0, 288]
+ "BHKW2(Q_fu)|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: BHKW2(Q_fu)|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: BHKW2(Q_fu)|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: BHKW2(Q_fu)|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: BHKW2(Q_fu)|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: BHKW2(Q_fu)|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: BHKW2(Q_fu)|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: BHKW2(Q_fu)|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: BHKW2(Q_fu)|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: BHKW2(Q_fu)|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: BHKW2(Q_fu)|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: BHKW2(Q_fu)|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: BHKW2(Q_fu)|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "BHKW2(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ BHKW2(Q_fu)|on_hours_total ∈ [0, inf]
+ "BHKW2(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ BHKW2(Q_fu)|total_flow_hours ∈ [0, inf]
+ "BHKW2(Q_th)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 00:15:00]: BHKW2(Q_th)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1e+07]
+ [2020-01-01 00:30:00]: BHKW2(Q_th)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1e+07]
+ [2020-01-01 00:45:00]: BHKW2(Q_th)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:15:00]: BHKW2(Q_th)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1e+07]
+ [2020-01-01 01:30:00]: BHKW2(Q_th)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1e+07]
+ ...
+ [2020-01-03 22:15:00]: BHKW2(Q_th)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1e+07]
+ [2020-01-03 22:30:00]: BHKW2(Q_th)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1e+07]
+ [2020-01-03 22:45:00]: BHKW2(Q_th)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1e+07]
+ [2020-01-03 23:00:00]: BHKW2(Q_th)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1e+07]
+ [2020-01-03 23:15:00]: BHKW2(Q_th)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1e+07]
+ [2020-01-03 23:30:00]: BHKW2(Q_th)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1e+07]
+ [2020-01-03 23:45:00]: BHKW2(Q_th)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1e+07]
+ "BHKW2(Q_th)|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: BHKW2(Q_th)|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: BHKW2(Q_th)|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: BHKW2(Q_th)|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: BHKW2(Q_th)|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: BHKW2(Q_th)|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: BHKW2(Q_th)|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: BHKW2(Q_th)|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: BHKW2(Q_th)|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: BHKW2(Q_th)|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: BHKW2(Q_th)|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: BHKW2(Q_th)|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: BHKW2(Q_th)|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "BHKW2(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ BHKW2(Q_th)|on_hours_total ∈ [0, inf]
+ "BHKW2(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ BHKW2(Q_th)|total_flow_hours ∈ [0, inf]
+ "BHKW2(P_el)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 00:15:00]: BHKW2(P_el)|flow_rate[2020-01-01 00:15:00] ∈ [0, 1e+07]
+ [2020-01-01 00:30:00]: BHKW2(P_el)|flow_rate[2020-01-01 00:30:00] ∈ [0, 1e+07]
+ [2020-01-01 00:45:00]: BHKW2(P_el)|flow_rate[2020-01-01 00:45:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:15:00]: BHKW2(P_el)|flow_rate[2020-01-01 01:15:00] ∈ [0, 1e+07]
+ [2020-01-01 01:30:00]: BHKW2(P_el)|flow_rate[2020-01-01 01:30:00] ∈ [0, 1e+07]
+ ...
+ [2020-01-03 22:15:00]: BHKW2(P_el)|flow_rate[2020-01-03 22:15:00] ∈ [0, 1e+07]
+ [2020-01-03 22:30:00]: BHKW2(P_el)|flow_rate[2020-01-03 22:30:00] ∈ [0, 1e+07]
+ [2020-01-03 22:45:00]: BHKW2(P_el)|flow_rate[2020-01-03 22:45:00] ∈ [0, 1e+07]
+ [2020-01-03 23:00:00]: BHKW2(P_el)|flow_rate[2020-01-03 23:00:00] ∈ [0, 1e+07]
+ [2020-01-03 23:15:00]: BHKW2(P_el)|flow_rate[2020-01-03 23:15:00] ∈ [0, 1e+07]
+ [2020-01-03 23:30:00]: BHKW2(P_el)|flow_rate[2020-01-03 23:30:00] ∈ [0, 1e+07]
+ [2020-01-03 23:45:00]: BHKW2(P_el)|flow_rate[2020-01-03 23:45:00] ∈ [0, 1e+07]
+ "BHKW2(P_el)|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2(P_el)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: BHKW2(P_el)|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: BHKW2(P_el)|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: BHKW2(P_el)|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2(P_el)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: BHKW2(P_el)|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: BHKW2(P_el)|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: BHKW2(P_el)|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: BHKW2(P_el)|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: BHKW2(P_el)|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: BHKW2(P_el)|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: BHKW2(P_el)|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: BHKW2(P_el)|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: BHKW2(P_el)|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "BHKW2(P_el)|on_hours_total": |-
+ Variable
+ --------
+ BHKW2(P_el)|on_hours_total ∈ [0, inf]
+ "BHKW2(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ BHKW2(P_el)|total_flow_hours ∈ [0, inf]
+ "BHKW2|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: BHKW2|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: BHKW2|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: BHKW2|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: BHKW2|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: BHKW2|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: BHKW2|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: BHKW2|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: BHKW2|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: BHKW2|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: BHKW2|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: BHKW2|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: BHKW2|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "BHKW2|on_hours_total": |-
+ Variable
+ --------
+ BHKW2|on_hours_total ∈ [0, inf]
+ "BHKW2|switch|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: BHKW2|switch|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: BHKW2|switch|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: BHKW2|switch|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: BHKW2|switch|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: BHKW2|switch|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: BHKW2|switch|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: BHKW2|switch|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: BHKW2|switch|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: BHKW2|switch|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: BHKW2|switch|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: BHKW2|switch|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: BHKW2|switch|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "BHKW2|switch|off": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: BHKW2|switch|off[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: BHKW2|switch|off[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: BHKW2|switch|off[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: BHKW2|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: BHKW2|switch|off[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: BHKW2|switch|off[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: BHKW2|switch|off[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: BHKW2|switch|off[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: BHKW2|switch|off[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: BHKW2|switch|off[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: BHKW2|switch|off[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: BHKW2|switch|off[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: BHKW2|switch|off[2020-01-03 23:45:00] ∈ {0, 1}
+ "BHKW2->costs(temporal)": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: BHKW2->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: BHKW2->costs(temporal)[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: BHKW2->costs(temporal)[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: BHKW2->costs(temporal)[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: BHKW2->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: BHKW2->costs(temporal)[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: BHKW2->costs(temporal)[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: BHKW2->costs(temporal)[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: BHKW2->costs(temporal)[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: BHKW2->costs(temporal)[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: BHKW2->costs(temporal)[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: BHKW2->costs(temporal)[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: BHKW2->costs(temporal)[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: BHKW2->costs(temporal)[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Speicher(Q_th_load)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] ∈ [0, 137]
+ [2020-01-01 00:15:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00] ∈ [0, 137]
+ [2020-01-01 00:30:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:30:00] ∈ [0, 137]
+ [2020-01-01 00:45:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:45:00] ∈ [0, 137]
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] ∈ [0, 137]
+ [2020-01-01 01:15:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:15:00] ∈ [0, 137]
+ [2020-01-01 01:30:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:30:00] ∈ [0, 137]
+ ...
+ [2020-01-03 22:15:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 22:15:00] ∈ [0, 137]
+ [2020-01-03 22:30:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 22:30:00] ∈ [0, 137]
+ [2020-01-03 22:45:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 22:45:00] ∈ [0, 137]
+ [2020-01-03 23:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 23:00:00] ∈ [0, 137]
+ [2020-01-03 23:15:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] ∈ [0, 137]
+ [2020-01-03 23:30:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] ∈ [0, 137]
+ [2020-01-03 23:45:00]: Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] ∈ [0, 137]
+ "Speicher(Q_th_load)|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: Speicher(Q_th_load)|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: Speicher(Q_th_load)|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: Speicher(Q_th_load)|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: Speicher(Q_th_load)|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: Speicher(Q_th_load)|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: Speicher(Q_th_load)|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: Speicher(Q_th_load)|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: Speicher(Q_th_load)|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: Speicher(Q_th_load)|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: Speicher(Q_th_load)|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: Speicher(Q_th_load)|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: Speicher(Q_th_load)|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|total_flow_hours ∈ [0, inf]
+ "Speicher(Q_th_unload)|flow_rate": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] ∈ [0, 158]
+ [2020-01-01 00:15:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00] ∈ [0, 158]
+ [2020-01-01 00:30:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:30:00] ∈ [0, 158]
+ [2020-01-01 00:45:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:45:00] ∈ [0, 158]
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] ∈ [0, 158]
+ [2020-01-01 01:15:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:15:00] ∈ [0, 158]
+ [2020-01-01 01:30:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:30:00] ∈ [0, 158]
+ ...
+ [2020-01-03 22:15:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 22:15:00] ∈ [0, 158]
+ [2020-01-03 22:30:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 22:30:00] ∈ [0, 158]
+ [2020-01-03 22:45:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 22:45:00] ∈ [0, 158]
+ [2020-01-03 23:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 23:00:00] ∈ [0, 158]
+ [2020-01-03 23:15:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00] ∈ [0, 158]
+ [2020-01-03 23:30:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00] ∈ [0, 158]
+ [2020-01-03 23:45:00]: Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00] ∈ [0, 158]
+ "Speicher(Q_th_unload)|on": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 00:15:00]: Speicher(Q_th_unload)|on[2020-01-01 00:15:00] ∈ {0, 1}
+ [2020-01-01 00:30:00]: Speicher(Q_th_unload)|on[2020-01-01 00:30:00] ∈ {0, 1}
+ [2020-01-01 00:45:00]: Speicher(Q_th_unload)|on[2020-01-01 00:45:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 01:15:00]: Speicher(Q_th_unload)|on[2020-01-01 01:15:00] ∈ {0, 1}
+ [2020-01-01 01:30:00]: Speicher(Q_th_unload)|on[2020-01-01 01:30:00] ∈ {0, 1}
+ ...
+ [2020-01-03 22:15:00]: Speicher(Q_th_unload)|on[2020-01-03 22:15:00] ∈ {0, 1}
+ [2020-01-03 22:30:00]: Speicher(Q_th_unload)|on[2020-01-03 22:30:00] ∈ {0, 1}
+ [2020-01-03 22:45:00]: Speicher(Q_th_unload)|on[2020-01-03 22:45:00] ∈ {0, 1}
+ [2020-01-03 23:00:00]: Speicher(Q_th_unload)|on[2020-01-03 23:00:00] ∈ {0, 1}
+ [2020-01-03 23:15:00]: Speicher(Q_th_unload)|on[2020-01-03 23:15:00] ∈ {0, 1}
+ [2020-01-03 23:30:00]: Speicher(Q_th_unload)|on[2020-01-03 23:30:00] ∈ {0, 1}
+ [2020-01-03 23:45:00]: Speicher(Q_th_unload)|on[2020-01-03 23:45:00] ∈ {0, 1}
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|total_flow_hours ∈ [0, inf]
+ "Speicher|charge_state": |-
+ Variable (time: 289)
+ --------------------
+ [2020-01-01 00:00:00]: Speicher|charge_state[2020-01-01 00:00:00] ∈ [0, 684]
+ [2020-01-01 00:15:00]: Speicher|charge_state[2020-01-01 00:15:00] ∈ [0, 684]
+ [2020-01-01 00:30:00]: Speicher|charge_state[2020-01-01 00:30:00] ∈ [0, 684]
+ [2020-01-01 00:45:00]: Speicher|charge_state[2020-01-01 00:45:00] ∈ [0, 684]
+ [2020-01-01 01:00:00]: Speicher|charge_state[2020-01-01 01:00:00] ∈ [0, 684]
+ [2020-01-01 01:15:00]: Speicher|charge_state[2020-01-01 01:15:00] ∈ [0, 684]
+ [2020-01-01 01:30:00]: Speicher|charge_state[2020-01-01 01:30:00] ∈ [0, 684]
+ ...
+ [2020-01-03 22:30:00]: Speicher|charge_state[2020-01-03 22:30:00] ∈ [0, 684]
+ [2020-01-03 22:45:00]: Speicher|charge_state[2020-01-03 22:45:00] ∈ [0, 684]
+ [2020-01-03 23:00:00]: Speicher|charge_state[2020-01-03 23:00:00] ∈ [0, 684]
+ [2020-01-03 23:15:00]: Speicher|charge_state[2020-01-03 23:15:00] ∈ [0, 684]
+ [2020-01-03 23:30:00]: Speicher|charge_state[2020-01-03 23:30:00] ∈ [0, 684]
+ [2020-01-03 23:45:00]: Speicher|charge_state[2020-01-03 23:45:00] ∈ [0, 684]
+ [2020-01-04 00:00:00]: Speicher|charge_state[2020-01-04 00:00:00] ∈ [0, 684]
+ "Speicher|netto_discharge": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Speicher|netto_discharge[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 00:15:00]: Speicher|netto_discharge[2020-01-01 00:15:00] ∈ [-inf, inf]
+ [2020-01-01 00:30:00]: Speicher|netto_discharge[2020-01-01 00:30:00] ∈ [-inf, inf]
+ [2020-01-01 00:45:00]: Speicher|netto_discharge[2020-01-01 00:45:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Speicher|netto_discharge[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:15:00]: Speicher|netto_discharge[2020-01-01 01:15:00] ∈ [-inf, inf]
+ [2020-01-01 01:30:00]: Speicher|netto_discharge[2020-01-01 01:30:00] ∈ [-inf, inf]
+ ...
+ [2020-01-03 22:15:00]: Speicher|netto_discharge[2020-01-03 22:15:00] ∈ [-inf, inf]
+ [2020-01-03 22:30:00]: Speicher|netto_discharge[2020-01-03 22:30:00] ∈ [-inf, inf]
+ [2020-01-03 22:45:00]: Speicher|netto_discharge[2020-01-03 22:45:00] ∈ [-inf, inf]
+ [2020-01-03 23:00:00]: Speicher|netto_discharge[2020-01-03 23:00:00] ∈ [-inf, inf]
+ [2020-01-03 23:15:00]: Speicher|netto_discharge[2020-01-03 23:15:00] ∈ [-inf, inf]
+ [2020-01-03 23:30:00]: Speicher|netto_discharge[2020-01-03 23:30:00] ∈ [-inf, inf]
+ [2020-01-03 23:45:00]: Speicher|netto_discharge[2020-01-03 23:45:00] ∈ [-inf, inf]
+ "Strom|excess_input": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Strom|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Strom|excess_input[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Strom|excess_input[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Strom|excess_input[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Strom|excess_input[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Strom|excess_input[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Strom|excess_input[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Strom|excess_input[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Strom|excess_input[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Strom|excess_input[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Strom|excess_input[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Strom|excess_input[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Strom|excess_input[2020-01-03 23:45:00] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Strom|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Strom|excess_output[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Strom|excess_output[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Strom|excess_output[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Strom|excess_output[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Strom|excess_output[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Strom|excess_output[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Strom|excess_output[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Strom|excess_output[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Strom|excess_output[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Strom|excess_output[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Strom|excess_output[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Strom|excess_output[2020-01-03 23:45:00] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Fernwärme|excess_input[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Fernwärme|excess_input[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Fernwärme|excess_input[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Fernwärme|excess_input[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Fernwärme|excess_input[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Fernwärme|excess_input[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Fernwärme|excess_input[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Fernwärme|excess_input[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Fernwärme|excess_input[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Fernwärme|excess_input[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Fernwärme|excess_input[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Fernwärme|excess_input[2020-01-03 23:45:00] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Fernwärme|excess_output[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Fernwärme|excess_output[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Fernwärme|excess_output[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Fernwärme|excess_output[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Fernwärme|excess_output[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Fernwärme|excess_output[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Fernwärme|excess_output[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Fernwärme|excess_output[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Fernwärme|excess_output[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Fernwärme|excess_output[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Fernwärme|excess_output[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Fernwärme|excess_output[2020-01-03 23:45:00] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Gas|excess_input[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Gas|excess_input[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Gas|excess_input[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Gas|excess_input[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Gas|excess_input[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Gas|excess_input[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Gas|excess_input[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Gas|excess_input[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Gas|excess_input[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Gas|excess_input[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Gas|excess_input[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Gas|excess_input[2020-01-03 23:45:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Gas|excess_output[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Gas|excess_output[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Gas|excess_output[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Gas|excess_output[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Gas|excess_output[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Gas|excess_output[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Gas|excess_output[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Gas|excess_output[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Gas|excess_output[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Gas|excess_output[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Gas|excess_output[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Gas|excess_output[2020-01-03 23:45:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+ "Kohle|excess_input": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kohle|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Kohle|excess_input[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Kohle|excess_input[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Kohle|excess_input[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Kohle|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Kohle|excess_input[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Kohle|excess_input[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Kohle|excess_input[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Kohle|excess_input[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Kohle|excess_input[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Kohle|excess_input[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Kohle|excess_input[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Kohle|excess_input[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Kohle|excess_input[2020-01-03 23:45:00] ∈ [0, inf]
+ "Kohle|excess_output": |-
+ Variable (time: 288)
+ --------------------
+ [2020-01-01 00:00:00]: Kohle|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 00:15:00]: Kohle|excess_output[2020-01-01 00:15:00] ∈ [0, inf]
+ [2020-01-01 00:30:00]: Kohle|excess_output[2020-01-01 00:30:00] ∈ [0, inf]
+ [2020-01-01 00:45:00]: Kohle|excess_output[2020-01-01 00:45:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Kohle|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 01:15:00]: Kohle|excess_output[2020-01-01 01:15:00] ∈ [0, inf]
+ [2020-01-01 01:30:00]: Kohle|excess_output[2020-01-01 01:30:00] ∈ [0, inf]
+ ...
+ [2020-01-03 22:15:00]: Kohle|excess_output[2020-01-03 22:15:00] ∈ [0, inf]
+ [2020-01-03 22:30:00]: Kohle|excess_output[2020-01-03 22:30:00] ∈ [0, inf]
+ [2020-01-03 22:45:00]: Kohle|excess_output[2020-01-03 22:45:00] ∈ [0, inf]
+ [2020-01-03 23:00:00]: Kohle|excess_output[2020-01-03 23:00:00] ∈ [0, inf]
+ [2020-01-03 23:15:00]: Kohle|excess_output[2020-01-03 23:15:00] ∈ [0, inf]
+ [2020-01-03 23:30:00]: Kohle|excess_output[2020-01-03 23:30:00] ∈ [0, inf]
+ [2020-01-03 23:45:00]: Kohle|excess_output[2020-01-03 23:45:00] ∈ [0, inf]
+ "Kohle->Penalty": |-
+ Variable
+ --------
+ Kohle->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ ----------------------------
+ +1 costs(periodic) = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ ----------------------------
+ +1 costs(temporal) - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 00:15:00]... -1 costs(temporal)|per_timestep[2020-01-03 23:15:00] - 1 costs(temporal)|per_timestep[2020-01-03 23:30:00] - 1 costs(temporal)|per_timestep[2020-01-03 23:45:00] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 288]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:00:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:15:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:15:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:15:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:15:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:15:00] - 1 BHKW2->costs(temporal)[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:30:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:30:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:30:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:30:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:30:00] - 1 BHKW2->costs(temporal)[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:45:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:45:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:45:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:45:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:45:00] - 1 BHKW2->costs(temporal)[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 01:00:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 01:00:00] - 1 BHKW2->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:15:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:15:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:15:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 01:15:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 01:15:00] - 1 BHKW2->costs(temporal)[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:30:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:30:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:30:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-01 01:30:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-01 01:30:00] - 1 BHKW2->costs(temporal)[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 costs(temporal)|per_timestep[2020-01-03 22:15:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:15:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:15:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 22:15:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 22:15:00] - 1 BHKW2->costs(temporal)[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 costs(temporal)|per_timestep[2020-01-03 22:30:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:30:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:30:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 22:30:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 22:30:00] - 1 BHKW2->costs(temporal)[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 costs(temporal)|per_timestep[2020-01-03 22:45:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:45:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:45:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 22:45:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 22:45:00] - 1 BHKW2->costs(temporal)[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 costs(temporal)|per_timestep[2020-01-03 23:00:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:00:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:00:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:00:00] - 1 BHKW2->costs(temporal)[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 costs(temporal)|per_timestep[2020-01-03 23:15:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:15:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:15:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:15:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:15:00] - 1 BHKW2->costs(temporal)[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 costs(temporal)|per_timestep[2020-01-03 23:30:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:30:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:30:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:30:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:30:00] - 1 BHKW2->costs(temporal)[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 costs(temporal)|per_timestep[2020-01-03 23:45:00] - 1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:45:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:45:00]... -1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:45:00] - 1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:45:00] - 1 BHKW2->costs(temporal)[2020-01-03 23:45:00] = -0.0
+ costs: |-
+ Constraint `costs`
+ ------------------
+ +1 costs - 1 costs(temporal) - 1 costs(periodic) = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ --------------------------
+ +1 CO2(periodic) = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ --------------------------
+ +1 CO2(temporal) - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 00:15:00]... -1 CO2(temporal)|per_timestep[2020-01-03 23:15:00] - 1 CO2(temporal)|per_timestep[2020-01-03 23:30:00] - 1 CO2(temporal)|per_timestep[2020-01-03 23:45:00] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 288]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:15:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:15:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:15:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:30:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:30:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:30:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:45:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:45:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:45:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:15:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:15:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:15:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:30:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:30:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:30:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 CO2(temporal)|per_timestep[2020-01-03 22:15:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:15:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:15:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 CO2(temporal)|per_timestep[2020-01-03 22:30:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:30:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:30:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 CO2(temporal)|per_timestep[2020-01-03 22:45:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:45:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:45:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 CO2(temporal)|per_timestep[2020-01-03 23:00:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:00:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 CO2(temporal)|per_timestep[2020-01-03 23:15:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:15:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:15:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 CO2(temporal)|per_timestep[2020-01-03 23:30:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:30:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:30:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 CO2(temporal)|per_timestep[2020-01-03 23:45:00] - 1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:45:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:45:00] - 1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:45:00] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ ----------------
+ +1 CO2 - 1 CO2(temporal) - 1 CO2(periodic) = -0.0
+ PE(periodic): |-
+ Constraint `PE(periodic)`
+ -------------------------
+ +1 PE(periodic) = -0.0
+ PE(temporal): |-
+ Constraint `PE(temporal)`
+ -------------------------
+ +1 PE(temporal) - 1 PE(temporal)|per_timestep[2020-01-01 00:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 00:15:00]... -1 PE(temporal)|per_timestep[2020-01-03 23:15:00] - 1 PE(temporal)|per_timestep[2020-01-03 23:30:00] - 1 PE(temporal)|per_timestep[2020-01-03 23:45:00] = -0.0
+ "PE(temporal)|per_timestep": |-
+ Constraint `PE(temporal)|per_timestep`
+ [time: 288]:
+ ---------------------------------------------------
+ [2020-01-01 00:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 PE(temporal)|per_timestep[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 PE(temporal)|per_timestep[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 PE(temporal)|per_timestep[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 PE(temporal)|per_timestep[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 PE(temporal)|per_timestep[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 PE(temporal)|per_timestep[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 PE(temporal)|per_timestep[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 PE(temporal)|per_timestep[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 PE(temporal)|per_timestep[2020-01-03 23:45:00] = -0.0
+ PE: |-
+ Constraint `PE`
+ ---------------
+ +1 PE - 1 PE(temporal) - 1 PE(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty - 1 Kohle->Penalty = -0.0
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Constraint `Wärmelast(Q_th_Last)|total_flow_hours`
+ --------------------------------------------------
+ +1 Wärmelast(Q_th_Last)|total_flow_hours - 0.25 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] - 0.25 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:15:00]... -0.25 Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:15:00] - 0.25 Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:30:00] - 0.25 Wärmelast(Q_th_Last)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Stromlast(P_el_Last)|total_flow_hours": |-
+ Constraint `Stromlast(P_el_Last)|total_flow_hours`
+ --------------------------------------------------
+ +1 Stromlast(P_el_Last)|total_flow_hours - 0.25 Stromlast(P_el_Last)|flow_rate[2020-01-01 00:00:00] - 0.25 Stromlast(P_el_Last)|flow_rate[2020-01-01 00:15:00]... -0.25 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:15:00] - 0.25 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:30:00] - 0.25 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Kohletarif(Q_Kohle)|total_flow_hours": |-
+ Constraint `Kohletarif(Q_Kohle)|total_flow_hours`
+ -------------------------------------------------
+ +1 Kohletarif(Q_Kohle)|total_flow_hours - 0.25 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:00:00] - 0.25 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:15:00]... -0.25 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:15:00] - 0.25 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:30:00] - 0.25 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Kohletarif(Q_Kohle)->costs(temporal)": |-
+ Constraint `Kohletarif(Q_Kohle)->costs(temporal)`
+ [time: 288]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:00:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:15:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:30:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 00:45:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:00:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:15:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-01 01:30:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:15:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:30:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 22:45:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:00:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:15:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:30:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Kohletarif(Q_Kohle)->costs(temporal)[2020-01-03 23:45:00] - 1.15 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Kohletarif(Q_Kohle)->CO2(temporal)": |-
+ Constraint `Kohletarif(Q_Kohle)->CO2(temporal)`
+ [time: 288]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:00:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:15:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:30:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 00:45:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:00:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:15:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-01 01:30:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:15:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:30:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 22:45:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:00:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:15:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:30:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Kohletarif(Q_Kohle)->CO2(temporal)[2020-01-03 23:45:00] - 0.075 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ ---------------------------------------------
+ +1 Gastarif(Q_Gas)|total_flow_hours - 0.25 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 0.25 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:15:00]... -0.25 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:15:00] - 0.25 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:30:00] - 0.25 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 288]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:15:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:30:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:45:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:15:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:30:00] - 8.115 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:15:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:30:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 22:45:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:00:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:15:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:30:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-03 23:45:00] - 8.16 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 288]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:15:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:30:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:45:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:15:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:30:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:15:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:30:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 22:45:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:00:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:15:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:30:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-03 23:45:00] - 0.075 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ -----------------------------------------------
+ +1 Einspeisung(P_el)|total_flow_hours - 0.25 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] - 0.25 Einspeisung(P_el)|flow_rate[2020-01-01 00:15:00]... -0.25 Einspeisung(P_el)|flow_rate[2020-01-03 23:15:00] - 0.25 Einspeisung(P_el)|flow_rate[2020-01-03 23:30:00] - 0.25 Einspeisung(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 288]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] + 1.74 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:15:00] + 1.74 Einspeisung(P_el)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:30:00] + 1.74 Einspeisung(P_el)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:45:00] + 1.74 Einspeisung(P_el)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] + 0.5375 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:15:00] + 0.5375 Einspeisung(P_el)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:30:00] + 0.5375 Einspeisung(P_el)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 22:15:00] + 13.05 Einspeisung(P_el)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 22:30:00] + 13.05 Einspeisung(P_el)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 22:45:00] + 13.05 Einspeisung(P_el)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 23:00:00] + 11.24 Einspeisung(P_el)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 23:15:00] + 11.24 Einspeisung(P_el)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 23:30:00] + 11.24 Einspeisung(P_el)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-03 23:45:00] + 11.24 Einspeisung(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Stromtarif(P_el)|total_flow_hours": |-
+ Constraint `Stromtarif(P_el)|total_flow_hours`
+ ----------------------------------------------
+ +1 Stromtarif(P_el)|total_flow_hours - 0.25 Stromtarif(P_el)|flow_rate[2020-01-01 00:00:00] - 0.25 Stromtarif(P_el)|flow_rate[2020-01-01 00:15:00]... -0.25 Stromtarif(P_el)|flow_rate[2020-01-03 23:15:00] - 0.25 Stromtarif(P_el)|flow_rate[2020-01-03 23:30:00] - 0.25 Stromtarif(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Stromtarif(P_el)->costs(temporal)": |-
+ Constraint `Stromtarif(P_el)->costs(temporal)`
+ [time: 288]:
+ -----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:00:00] - 1.99 Stromtarif(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:15:00] - 1.99 Stromtarif(P_el)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:30:00] - 1.99 Stromtarif(P_el)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 00:45:00] - 1.99 Stromtarif(P_el)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 01:00:00] - 0.7875 Stromtarif(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 01:15:00] - 0.7875 Stromtarif(P_el)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-01 01:30:00] - 0.7875 Stromtarif(P_el)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 22:15:00] - 13.3 Stromtarif(P_el)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 22:30:00] - 13.3 Stromtarif(P_el)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 22:45:00] - 13.3 Stromtarif(P_el)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:00:00] - 11.49 Stromtarif(P_el)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:15:00] - 11.49 Stromtarif(P_el)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:30:00] - 11.49 Stromtarif(P_el)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Stromtarif(P_el)->costs(temporal)[2020-01-03 23:45:00] - 11.49 Stromtarif(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Stromtarif(P_el)->CO2(temporal)": |-
+ Constraint `Stromtarif(P_el)->CO2(temporal)`
+ [time: 288]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:00:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:15:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:30:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 00:45:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:00:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:15:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-01 01:30:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:15:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:30:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 22:45:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:00:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:15:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:30:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Stromtarif(P_el)->CO2(temporal)[2020-01-03 23:45:00] - 0.075 Stromtarif(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Kessel(Q_fu)|on_hours_total": |-
+ Constraint `Kessel(Q_fu)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_fu)|on_hours_total - 0.25 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 0.25 Kessel(Q_fu)|on[2020-01-01 00:15:00]... -0.25 Kessel(Q_fu)|on[2020-01-03 23:15:00] - 0.25 Kessel(Q_fu)|on[2020-01-03 23:30:00] - 0.25 Kessel(Q_fu)|on[2020-01-03 23:45:00] = -0.0
+ "Kessel(Q_fu)|switch|transition": |-
+ Constraint `Kessel(Q_fu)|switch|transition`
+ [time: 287]:
+ --------------------------------------------------------
+ [2020-01-01 00:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:15:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 00:15:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:15:00] + 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:30:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 00:30:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:30:00] + 1 Kessel(Q_fu)|on[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:45:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 00:45:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:45:00] + 1 Kessel(Q_fu)|on[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00] + 1 Kessel(Q_fu)|on[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:15:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 01:15:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:15:00] + 1 Kessel(Q_fu)|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:30:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 01:30:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:30:00] + 1 Kessel(Q_fu)|on[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:45:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 01:45:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:45:00] + 1 Kessel(Q_fu)|on[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 22:15:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 22:15:00] - 1 Kessel(Q_fu)|on[2020-01-03 22:15:00] + 1 Kessel(Q_fu)|on[2020-01-03 22:00:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 22:30:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 22:30:00] - 1 Kessel(Q_fu)|on[2020-01-03 22:30:00] + 1 Kessel(Q_fu)|on[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 22:45:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 22:45:00] - 1 Kessel(Q_fu)|on[2020-01-03 22:45:00] + 1 Kessel(Q_fu)|on[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:00:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 23:00:00] - 1 Kessel(Q_fu)|on[2020-01-03 23:00:00] + 1 Kessel(Q_fu)|on[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:15:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 23:15:00] - 1 Kessel(Q_fu)|on[2020-01-03 23:15:00] + 1 Kessel(Q_fu)|on[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:30:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 23:30:00] - 1 Kessel(Q_fu)|on[2020-01-03 23:30:00] + 1 Kessel(Q_fu)|on[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:45:00] - 1 Kessel(Q_fu)|switch|off[2020-01-03 23:45:00] - 1 Kessel(Q_fu)|on[2020-01-03 23:45:00] + 1 Kessel(Q_fu)|on[2020-01-03 23:30:00] = -0.0
+ "Kessel(Q_fu)|switch|initial": |-
+ Constraint `Kessel(Q_fu)|switch|initial`
+ ----------------------------------------
+ +1 Kessel(Q_fu)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|switch|off[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_fu)|switch|mutex": |-
+ Constraint `Kessel(Q_fu)|switch|mutex`
+ [time: 288]:
+ ---------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:00:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 00:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:15:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 00:15:00] ≤ 1.0
+ [2020-01-01 00:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:30:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 00:30:00] ≤ 1.0
+ [2020-01-01 00:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 00:45:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 00:45:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:00:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 01:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:15:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 01:15:00] ≤ 1.0
+ [2020-01-01 01:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-01 01:30:00] + 1 Kessel(Q_fu)|switch|off[2020-01-01 01:30:00] ≤ 1.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 22:15:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 22:15:00] ≤ 1.0
+ [2020-01-03 22:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 22:30:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 22:30:00] ≤ 1.0
+ [2020-01-03 22:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 22:45:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 22:45:00] ≤ 1.0
+ [2020-01-03 23:00:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:00:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 23:00:00] ≤ 1.0
+ [2020-01-03 23:15:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:15:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 23:15:00] ≤ 1.0
+ [2020-01-03 23:30:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:30:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 23:30:00] ≤ 1.0
+ [2020-01-03 23:45:00]: +1 Kessel(Q_fu)|switch|on[2020-01-03 23:45:00] + 1 Kessel(Q_fu)|switch|off[2020-01-03 23:45:00] ≤ 1.0
+ "Kessel(Q_fu)->costs(temporal)": |-
+ Constraint `Kessel(Q_fu)->costs(temporal)`
+ [time: 288]:
+ -------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:00:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:15:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:30:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 00:45:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 01:00:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 01:15:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-01 01:30:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 22:15:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 22:30:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 22:45:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:00:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:15:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:30:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Kessel(Q_fu)->costs(temporal)[2020-01-03 23:45:00] - 1000 Kessel(Q_fu)|switch|on[2020-01-03 23:45:00] = -0.0
+ "Kessel(Q_fu)|flow_rate|ub": |-
+ Constraint `Kessel(Q_fu)|flow_rate|ub`
+ [time: 288]:
+ ---------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 95 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 00:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:15:00] - 95 Kessel(Q_fu)|on[2020-01-01 00:15:00] ≤ -0.0
+ [2020-01-01 00:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:30:00] - 95 Kessel(Q_fu)|on[2020-01-01 00:30:00] ≤ -0.0
+ [2020-01-01 00:45:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:45:00] - 95 Kessel(Q_fu)|on[2020-01-01 00:45:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 95 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 01:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:15:00] - 95 Kessel(Q_fu)|on[2020-01-01 01:15:00] ≤ -0.0
+ [2020-01-01 01:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:30:00] - 95 Kessel(Q_fu)|on[2020-01-01 01:30:00] ≤ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 22:15:00] - 95 Kessel(Q_fu)|on[2020-01-03 22:15:00] ≤ -0.0
+ [2020-01-03 22:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 22:30:00] - 95 Kessel(Q_fu)|on[2020-01-03 22:30:00] ≤ -0.0
+ [2020-01-03 22:45:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 22:45:00] - 95 Kessel(Q_fu)|on[2020-01-03 22:45:00] ≤ -0.0
+ [2020-01-03 23:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:00:00] - 95 Kessel(Q_fu)|on[2020-01-03 23:00:00] ≤ -0.0
+ [2020-01-03 23:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:15:00] - 95 Kessel(Q_fu)|on[2020-01-03 23:15:00] ≤ -0.0
+ [2020-01-03 23:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:30:00] - 95 Kessel(Q_fu)|on[2020-01-03 23:30:00] ≤ -0.0
+ [2020-01-03 23:45:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:45:00] - 95 Kessel(Q_fu)|on[2020-01-03 23:45:00] ≤ -0.0
+ "Kessel(Q_fu)|flow_rate|lb": |-
+ Constraint `Kessel(Q_fu)|flow_rate|lb`
+ [time: 288]:
+ ---------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 12 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:15:00] - 12 Kessel(Q_fu)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:30:00] - 12 Kessel(Q_fu)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:45:00] - 12 Kessel(Q_fu)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 12 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:15:00] - 12 Kessel(Q_fu)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:30:00] - 12 Kessel(Q_fu)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 22:15:00] - 12 Kessel(Q_fu)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 22:30:00] - 12 Kessel(Q_fu)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 22:45:00] - 12 Kessel(Q_fu)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:00:00] - 12 Kessel(Q_fu)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:15:00] - 12 Kessel(Q_fu)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:30:00] - 12 Kessel(Q_fu)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-03 23:45:00] - 12 Kessel(Q_fu)|on[2020-01-03 23:45:00] ≥ -0.0
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Constraint `Kessel(Q_fu)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_fu)|total_flow_hours - 0.25 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 0.25 Kessel(Q_fu)|flow_rate[2020-01-01 00:15:00]... -0.25 Kessel(Q_fu)|flow_rate[2020-01-03 23:15:00] - 0.25 Kessel(Q_fu)|flow_rate[2020-01-03 23:30:00] - 0.25 Kessel(Q_fu)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Kessel(Q_th)|total_flow_hours": |-
+ Constraint `Kessel(Q_th)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 0.25 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 0.25 Kessel(Q_th)|flow_rate[2020-01-01 00:15:00]... -0.25 Kessel(Q_th)|flow_rate[2020-01-03 23:15:00] - 0.25 Kessel(Q_th)|flow_rate[2020-01-03 23:30:00] - 0.25 Kessel(Q_th)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Kessel|conversion_0": |-
+ Constraint `Kessel|conversion_0`
+ [time: 288]:
+ ---------------------------------------------
+ [2020-01-01 00:00:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 00:15:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 00:30:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 00:45:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 01:15:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-01 01:30:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 22:15:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 22:30:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 22:45:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 23:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 23:15:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 23:30:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +0.85 Kessel(Q_fu)|flow_rate[2020-01-03 23:45:00] - 1 Kessel(Q_th)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "BHKW2(Q_fu)|on_hours_total": |-
+ Constraint `BHKW2(Q_fu)|on_hours_total`
+ ---------------------------------------
+ +1 BHKW2(Q_fu)|on_hours_total - 0.25 BHKW2(Q_fu)|on[2020-01-01 00:00:00] - 0.25 BHKW2(Q_fu)|on[2020-01-01 00:15:00]... -0.25 BHKW2(Q_fu)|on[2020-01-03 23:15:00] - 0.25 BHKW2(Q_fu)|on[2020-01-03 23:30:00] - 0.25 BHKW2(Q_fu)|on[2020-01-03 23:45:00] = -0.0
+ "BHKW2(Q_fu)|flow_rate|ub": |-
+ Constraint `BHKW2(Q_fu)|flow_rate|ub`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 288 BHKW2(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00] - 288 BHKW2(Q_fu)|on[2020-01-01 00:15:00] ≤ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:30:00] - 288 BHKW2(Q_fu)|on[2020-01-01 00:30:00] ≤ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:45:00] - 288 BHKW2(Q_fu)|on[2020-01-01 00:45:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 288 BHKW2(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:15:00] - 288 BHKW2(Q_fu)|on[2020-01-01 01:15:00] ≤ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:30:00] - 288 BHKW2(Q_fu)|on[2020-01-01 01:30:00] ≤ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:15:00] - 288 BHKW2(Q_fu)|on[2020-01-03 22:15:00] ≤ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:30:00] - 288 BHKW2(Q_fu)|on[2020-01-03 22:30:00] ≤ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:45:00] - 288 BHKW2(Q_fu)|on[2020-01-03 22:45:00] ≤ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:00:00] - 288 BHKW2(Q_fu)|on[2020-01-03 23:00:00] ≤ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] - 288 BHKW2(Q_fu)|on[2020-01-03 23:15:00] ≤ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] - 288 BHKW2(Q_fu)|on[2020-01-03 23:30:00] ≤ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] - 288 BHKW2(Q_fu)|on[2020-01-03 23:45:00] ≤ -0.0
+ "BHKW2(Q_fu)|flow_rate|lb": |-
+ Constraint `BHKW2(Q_fu)|flow_rate|lb`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 87 BHKW2(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00] - 87 BHKW2(Q_fu)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:30:00] - 87 BHKW2(Q_fu)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:45:00] - 87 BHKW2(Q_fu)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 87 BHKW2(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:15:00] - 87 BHKW2(Q_fu)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:30:00] - 87 BHKW2(Q_fu)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:15:00] - 87 BHKW2(Q_fu)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:30:00] - 87 BHKW2(Q_fu)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:45:00] - 87 BHKW2(Q_fu)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:00:00] - 87 BHKW2(Q_fu)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] - 87 BHKW2(Q_fu)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] - 87 BHKW2(Q_fu)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] - 87 BHKW2(Q_fu)|on[2020-01-03 23:45:00] ≥ -0.0
+ "BHKW2(Q_fu)|total_flow_hours": |-
+ Constraint `BHKW2(Q_fu)|total_flow_hours`
+ -----------------------------------------
+ +1 BHKW2(Q_fu)|total_flow_hours - 0.25 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 0.25 BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00]... -0.25 BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] - 0.25 BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] - 0.25 BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "BHKW2(Q_th)|on_hours_total": |-
+ Constraint `BHKW2(Q_th)|on_hours_total`
+ ---------------------------------------
+ +1 BHKW2(Q_th)|on_hours_total - 0.25 BHKW2(Q_th)|on[2020-01-01 00:00:00] - 0.25 BHKW2(Q_th)|on[2020-01-01 00:15:00]... -0.25 BHKW2(Q_th)|on[2020-01-03 23:15:00] - 0.25 BHKW2(Q_th)|on[2020-01-03 23:30:00] - 0.25 BHKW2(Q_th)|on[2020-01-03 23:45:00] = -0.0
+ "BHKW2(Q_th)|flow_rate|ub": |-
+ Constraint `BHKW2(Q_th)|flow_rate|ub`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:15:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 00:15:00] ≤ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:30:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 00:30:00] ≤ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:45:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 00:45:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:15:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 01:15:00] ≤ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:30:00] - 1e+07 BHKW2(Q_th)|on[2020-01-01 01:30:00] ≤ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 22:15:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 22:15:00] ≤ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 22:30:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 22:30:00] ≤ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 22:45:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 22:45:00] ≤ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:00:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 23:00:00] ≤ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:15:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 23:15:00] ≤ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:30:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 23:30:00] ≤ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:45:00] - 1e+07 BHKW2(Q_th)|on[2020-01-03 23:45:00] ≤ -0.0
+ "BHKW2(Q_th)|flow_rate|lb": |-
+ Constraint `BHKW2(Q_th)|flow_rate|lb`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:15:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:30:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 00:45:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:15:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-01 01:30:00] - 1e-05 BHKW2(Q_th)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 22:15:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 22:30:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 22:45:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:00:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:15:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:30:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2(Q_th)|flow_rate[2020-01-03 23:45:00] - 1e-05 BHKW2(Q_th)|on[2020-01-03 23:45:00] ≥ -0.0
+ "BHKW2(Q_th)|total_flow_hours": |-
+ Constraint `BHKW2(Q_th)|total_flow_hours`
+ -----------------------------------------
+ +1 BHKW2(Q_th)|total_flow_hours - 0.25 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] - 0.25 BHKW2(Q_th)|flow_rate[2020-01-01 00:15:00]... -0.25 BHKW2(Q_th)|flow_rate[2020-01-03 23:15:00] - 0.25 BHKW2(Q_th)|flow_rate[2020-01-03 23:30:00] - 0.25 BHKW2(Q_th)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "BHKW2(P_el)|on_hours_total": |-
+ Constraint `BHKW2(P_el)|on_hours_total`
+ ---------------------------------------
+ +1 BHKW2(P_el)|on_hours_total - 0.25 BHKW2(P_el)|on[2020-01-01 00:00:00] - 0.25 BHKW2(P_el)|on[2020-01-01 00:15:00]... -0.25 BHKW2(P_el)|on[2020-01-03 23:15:00] - 0.25 BHKW2(P_el)|on[2020-01-03 23:30:00] - 0.25 BHKW2(P_el)|on[2020-01-03 23:45:00] = -0.0
+ "BHKW2(P_el)|flow_rate|ub": |-
+ Constraint `BHKW2(P_el)|flow_rate|ub`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:15:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 00:15:00] ≤ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:30:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 00:30:00] ≤ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:45:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 00:45:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:15:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 01:15:00] ≤ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:30:00] - 1e+07 BHKW2(P_el)|on[2020-01-01 01:30:00] ≤ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 22:15:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 22:15:00] ≤ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 22:30:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 22:30:00] ≤ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 22:45:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 22:45:00] ≤ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:00:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 23:00:00] ≤ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:15:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 23:15:00] ≤ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:30:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 23:30:00] ≤ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:45:00] - 1e+07 BHKW2(P_el)|on[2020-01-03 23:45:00] ≤ -0.0
+ "BHKW2(P_el)|flow_rate|lb": |-
+ Constraint `BHKW2(P_el)|flow_rate|lb`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:15:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:30:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 00:45:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:15:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-01 01:30:00] - 1e-05 BHKW2(P_el)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 22:15:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 22:30:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 22:45:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:00:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:15:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:30:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2(P_el)|flow_rate[2020-01-03 23:45:00] - 1e-05 BHKW2(P_el)|on[2020-01-03 23:45:00] ≥ -0.0
+ "BHKW2(P_el)|total_flow_hours": |-
+ Constraint `BHKW2(P_el)|total_flow_hours`
+ -----------------------------------------
+ +1 BHKW2(P_el)|total_flow_hours - 0.25 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 0.25 BHKW2(P_el)|flow_rate[2020-01-01 00:15:00]... -0.25 BHKW2(P_el)|flow_rate[2020-01-03 23:15:00] - 0.25 BHKW2(P_el)|flow_rate[2020-01-03 23:30:00] - 0.25 BHKW2(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "BHKW2|on|ub": |-
+ Constraint `BHKW2|on|ub`
+ [time: 288]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|on[2020-01-01 00:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 00:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 00:00:00] - 1 BHKW2(P_el)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 00:15:00]: +1 BHKW2|on[2020-01-01 00:15:00] - 1 BHKW2(Q_fu)|on[2020-01-01 00:15:00] - 1 BHKW2(Q_th)|on[2020-01-01 00:15:00] - 1 BHKW2(P_el)|on[2020-01-01 00:15:00] ≤ 1e-05
+ [2020-01-01 00:30:00]: +1 BHKW2|on[2020-01-01 00:30:00] - 1 BHKW2(Q_fu)|on[2020-01-01 00:30:00] - 1 BHKW2(Q_th)|on[2020-01-01 00:30:00] - 1 BHKW2(P_el)|on[2020-01-01 00:30:00] ≤ 1e-05
+ [2020-01-01 00:45:00]: +1 BHKW2|on[2020-01-01 00:45:00] - 1 BHKW2(Q_fu)|on[2020-01-01 00:45:00] - 1 BHKW2(Q_th)|on[2020-01-01 00:45:00] - 1 BHKW2(P_el)|on[2020-01-01 00:45:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 BHKW2|on[2020-01-01 01:00:00] - 1 BHKW2(Q_fu)|on[2020-01-01 01:00:00] - 1 BHKW2(Q_th)|on[2020-01-01 01:00:00] - 1 BHKW2(P_el)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 01:15:00]: +1 BHKW2|on[2020-01-01 01:15:00] - 1 BHKW2(Q_fu)|on[2020-01-01 01:15:00] - 1 BHKW2(Q_th)|on[2020-01-01 01:15:00] - 1 BHKW2(P_el)|on[2020-01-01 01:15:00] ≤ 1e-05
+ [2020-01-01 01:30:00]: +1 BHKW2|on[2020-01-01 01:30:00] - 1 BHKW2(Q_fu)|on[2020-01-01 01:30:00] - 1 BHKW2(Q_th)|on[2020-01-01 01:30:00] - 1 BHKW2(P_el)|on[2020-01-01 01:30:00] ≤ 1e-05
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2|on[2020-01-03 22:15:00] - 1 BHKW2(Q_fu)|on[2020-01-03 22:15:00] - 1 BHKW2(Q_th)|on[2020-01-03 22:15:00] - 1 BHKW2(P_el)|on[2020-01-03 22:15:00] ≤ 1e-05
+ [2020-01-03 22:30:00]: +1 BHKW2|on[2020-01-03 22:30:00] - 1 BHKW2(Q_fu)|on[2020-01-03 22:30:00] - 1 BHKW2(Q_th)|on[2020-01-03 22:30:00] - 1 BHKW2(P_el)|on[2020-01-03 22:30:00] ≤ 1e-05
+ [2020-01-03 22:45:00]: +1 BHKW2|on[2020-01-03 22:45:00] - 1 BHKW2(Q_fu)|on[2020-01-03 22:45:00] - 1 BHKW2(Q_th)|on[2020-01-03 22:45:00] - 1 BHKW2(P_el)|on[2020-01-03 22:45:00] ≤ 1e-05
+ [2020-01-03 23:00:00]: +1 BHKW2|on[2020-01-03 23:00:00] - 1 BHKW2(Q_fu)|on[2020-01-03 23:00:00] - 1 BHKW2(Q_th)|on[2020-01-03 23:00:00] - 1 BHKW2(P_el)|on[2020-01-03 23:00:00] ≤ 1e-05
+ [2020-01-03 23:15:00]: +1 BHKW2|on[2020-01-03 23:15:00] - 1 BHKW2(Q_fu)|on[2020-01-03 23:15:00] - 1 BHKW2(Q_th)|on[2020-01-03 23:15:00] - 1 BHKW2(P_el)|on[2020-01-03 23:15:00] ≤ 1e-05
+ [2020-01-03 23:30:00]: +1 BHKW2|on[2020-01-03 23:30:00] - 1 BHKW2(Q_fu)|on[2020-01-03 23:30:00] - 1 BHKW2(Q_th)|on[2020-01-03 23:30:00] - 1 BHKW2(P_el)|on[2020-01-03 23:30:00] ≤ 1e-05
+ [2020-01-03 23:45:00]: +1 BHKW2|on[2020-01-03 23:45:00] - 1 BHKW2(Q_fu)|on[2020-01-03 23:45:00] - 1 BHKW2(Q_th)|on[2020-01-03 23:45:00] - 1 BHKW2(P_el)|on[2020-01-03 23:45:00] ≤ 1e-05
+ "BHKW2|on|lb": |-
+ Constraint `BHKW2|on|lb`
+ [time: 288]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|on[2020-01-01 00:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 00:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 00:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2|on[2020-01-01 00:15:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 00:15:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 00:15:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2|on[2020-01-01 00:30:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 00:30:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 00:30:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2|on[2020-01-01 00:45:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 00:45:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 00:45:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|on[2020-01-01 01:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 01:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 01:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2|on[2020-01-01 01:15:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 01:15:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 01:15:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2|on[2020-01-01 01:30:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-01 01:30:00] - 0.3333 BHKW2(Q_th)|on[2020-01-01 01:30:00] - 0.3333 BHKW2(P_el)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2|on[2020-01-03 22:15:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 22:15:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 22:15:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2|on[2020-01-03 22:30:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 22:30:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 22:30:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2|on[2020-01-03 22:45:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 22:45:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 22:45:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2|on[2020-01-03 23:00:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 23:00:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 23:00:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2|on[2020-01-03 23:15:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 23:15:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 23:15:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2|on[2020-01-03 23:30:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 23:30:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 23:30:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2|on[2020-01-03 23:45:00] - 0.3333 BHKW2(Q_fu)|on[2020-01-03 23:45:00] - 0.3333 BHKW2(Q_th)|on[2020-01-03 23:45:00] - 0.3333 BHKW2(P_el)|on[2020-01-03 23:45:00] ≥ -0.0
+ "BHKW2|on_hours_total": |-
+ Constraint `BHKW2|on_hours_total`
+ ---------------------------------
+ +1 BHKW2|on_hours_total - 0.25 BHKW2|on[2020-01-01 00:00:00] - 0.25 BHKW2|on[2020-01-01 00:15:00]... -0.25 BHKW2|on[2020-01-03 23:15:00] - 0.25 BHKW2|on[2020-01-03 23:30:00] - 0.25 BHKW2|on[2020-01-03 23:45:00] = -0.0
+ "BHKW2|switch|transition": |-
+ Constraint `BHKW2|switch|transition`
+ [time: 287]:
+ -------------------------------------------------
+ [2020-01-01 00:15:00]: +1 BHKW2|switch|on[2020-01-01 00:15:00] - 1 BHKW2|switch|off[2020-01-01 00:15:00] - 1 BHKW2|on[2020-01-01 00:15:00] + 1 BHKW2|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2|switch|on[2020-01-01 00:30:00] - 1 BHKW2|switch|off[2020-01-01 00:30:00] - 1 BHKW2|on[2020-01-01 00:30:00] + 1 BHKW2|on[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2|switch|on[2020-01-01 00:45:00] - 1 BHKW2|switch|off[2020-01-01 00:45:00] - 1 BHKW2|on[2020-01-01 00:45:00] + 1 BHKW2|on[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2|switch|on[2020-01-01 01:00:00] - 1 BHKW2|switch|off[2020-01-01 01:00:00] - 1 BHKW2|on[2020-01-01 01:00:00] + 1 BHKW2|on[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2|switch|on[2020-01-01 01:15:00] - 1 BHKW2|switch|off[2020-01-01 01:15:00] - 1 BHKW2|on[2020-01-01 01:15:00] + 1 BHKW2|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2|switch|on[2020-01-01 01:30:00] - 1 BHKW2|switch|off[2020-01-01 01:30:00] - 1 BHKW2|on[2020-01-01 01:30:00] + 1 BHKW2|on[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:45:00]: +1 BHKW2|switch|on[2020-01-01 01:45:00] - 1 BHKW2|switch|off[2020-01-01 01:45:00] - 1 BHKW2|on[2020-01-01 01:45:00] + 1 BHKW2|on[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2|switch|on[2020-01-03 22:15:00] - 1 BHKW2|switch|off[2020-01-03 22:15:00] - 1 BHKW2|on[2020-01-03 22:15:00] + 1 BHKW2|on[2020-01-03 22:00:00] = -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2|switch|on[2020-01-03 22:30:00] - 1 BHKW2|switch|off[2020-01-03 22:30:00] - 1 BHKW2|on[2020-01-03 22:30:00] + 1 BHKW2|on[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2|switch|on[2020-01-03 22:45:00] - 1 BHKW2|switch|off[2020-01-03 22:45:00] - 1 BHKW2|on[2020-01-03 22:45:00] + 1 BHKW2|on[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2|switch|on[2020-01-03 23:00:00] - 1 BHKW2|switch|off[2020-01-03 23:00:00] - 1 BHKW2|on[2020-01-03 23:00:00] + 1 BHKW2|on[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2|switch|on[2020-01-03 23:15:00] - 1 BHKW2|switch|off[2020-01-03 23:15:00] - 1 BHKW2|on[2020-01-03 23:15:00] + 1 BHKW2|on[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2|switch|on[2020-01-03 23:30:00] - 1 BHKW2|switch|off[2020-01-03 23:30:00] - 1 BHKW2|on[2020-01-03 23:30:00] + 1 BHKW2|on[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2|switch|on[2020-01-03 23:45:00] - 1 BHKW2|switch|off[2020-01-03 23:45:00] - 1 BHKW2|on[2020-01-03 23:45:00] + 1 BHKW2|on[2020-01-03 23:30:00] = -0.0
+ "BHKW2|switch|initial": |-
+ Constraint `BHKW2|switch|initial`
+ ---------------------------------
+ +1 BHKW2|switch|on[2020-01-01 00:00:00] - 1 BHKW2|switch|off[2020-01-01 00:00:00] - 1 BHKW2|on[2020-01-01 00:00:00] = -0.0
+ "BHKW2|switch|mutex": |-
+ Constraint `BHKW2|switch|mutex`
+ [time: 288]:
+ --------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2|switch|on[2020-01-01 00:00:00] + 1 BHKW2|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 00:15:00]: +1 BHKW2|switch|on[2020-01-01 00:15:00] + 1 BHKW2|switch|off[2020-01-01 00:15:00] ≤ 1.0
+ [2020-01-01 00:30:00]: +1 BHKW2|switch|on[2020-01-01 00:30:00] + 1 BHKW2|switch|off[2020-01-01 00:30:00] ≤ 1.0
+ [2020-01-01 00:45:00]: +1 BHKW2|switch|on[2020-01-01 00:45:00] + 1 BHKW2|switch|off[2020-01-01 00:45:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 BHKW2|switch|on[2020-01-01 01:00:00] + 1 BHKW2|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 01:15:00]: +1 BHKW2|switch|on[2020-01-01 01:15:00] + 1 BHKW2|switch|off[2020-01-01 01:15:00] ≤ 1.0
+ [2020-01-01 01:30:00]: +1 BHKW2|switch|on[2020-01-01 01:30:00] + 1 BHKW2|switch|off[2020-01-01 01:30:00] ≤ 1.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2|switch|on[2020-01-03 22:15:00] + 1 BHKW2|switch|off[2020-01-03 22:15:00] ≤ 1.0
+ [2020-01-03 22:30:00]: +1 BHKW2|switch|on[2020-01-03 22:30:00] + 1 BHKW2|switch|off[2020-01-03 22:30:00] ≤ 1.0
+ [2020-01-03 22:45:00]: +1 BHKW2|switch|on[2020-01-03 22:45:00] + 1 BHKW2|switch|off[2020-01-03 22:45:00] ≤ 1.0
+ [2020-01-03 23:00:00]: +1 BHKW2|switch|on[2020-01-03 23:00:00] + 1 BHKW2|switch|off[2020-01-03 23:00:00] ≤ 1.0
+ [2020-01-03 23:15:00]: +1 BHKW2|switch|on[2020-01-03 23:15:00] + 1 BHKW2|switch|off[2020-01-03 23:15:00] ≤ 1.0
+ [2020-01-03 23:30:00]: +1 BHKW2|switch|on[2020-01-03 23:30:00] + 1 BHKW2|switch|off[2020-01-03 23:30:00] ≤ 1.0
+ [2020-01-03 23:45:00]: +1 BHKW2|switch|on[2020-01-03 23:45:00] + 1 BHKW2|switch|off[2020-01-03 23:45:00] ≤ 1.0
+ "BHKW2->costs(temporal)": |-
+ Constraint `BHKW2->costs(temporal)`
+ [time: 288]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 00:00:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 BHKW2->costs(temporal)[2020-01-01 00:15:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 BHKW2->costs(temporal)[2020-01-01 00:30:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 BHKW2->costs(temporal)[2020-01-01 00:45:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 BHKW2->costs(temporal)[2020-01-01 01:00:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 BHKW2->costs(temporal)[2020-01-01 01:15:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 BHKW2->costs(temporal)[2020-01-01 01:30:00] - 2.4e+04 BHKW2|switch|on[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 BHKW2->costs(temporal)[2020-01-03 22:15:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 BHKW2->costs(temporal)[2020-01-03 22:30:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 BHKW2->costs(temporal)[2020-01-03 22:45:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 BHKW2->costs(temporal)[2020-01-03 23:00:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 BHKW2->costs(temporal)[2020-01-03 23:15:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 BHKW2->costs(temporal)[2020-01-03 23:30:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 BHKW2->costs(temporal)[2020-01-03 23:45:00] - 2.4e+04 BHKW2|switch|on[2020-01-03 23:45:00] = -0.0
+ "BHKW2|conversion_0": |-
+ Constraint `BHKW2|conversion_0`
+ [time: 288]:
+ --------------------------------------------
+ [2020-01-01 00:00:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 00:30:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 00:45:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 01:15:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-01 01:30:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 22:15:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 22:30:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 22:45:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 23:00:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +0.58 BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] - 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "BHKW2|conversion_1": |-
+ Constraint `BHKW2|conversion_1`
+ [time: 288]:
+ --------------------------------------------
+ [2020-01-01 00:00:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 00:30:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 00:45:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 01:15:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-01 01:30:00] - 1 BHKW2(P_el)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 22:15:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 22:30:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 22:45:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 23:00:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +0.22 BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] - 1 BHKW2(P_el)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Constraint `Speicher(Q_th_load)|on_hours_total`
+ -----------------------------------------------
+ +1 Speicher(Q_th_load)|on_hours_total - 0.25 Speicher(Q_th_load)|on[2020-01-01 00:00:00] - 0.25 Speicher(Q_th_load)|on[2020-01-01 00:15:00]... -0.25 Speicher(Q_th_load)|on[2020-01-03 23:15:00] - 0.25 Speicher(Q_th_load)|on[2020-01-03 23:30:00] - 0.25 Speicher(Q_th_load)|on[2020-01-03 23:45:00] = -0.0
+ "Speicher(Q_th_load)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|ub`
+ [time: 288]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 137 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 00:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00] - 137 Speicher(Q_th_load)|on[2020-01-01 00:15:00] ≤ -0.0
+ [2020-01-01 00:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:30:00] - 137 Speicher(Q_th_load)|on[2020-01-01 00:30:00] ≤ -0.0
+ [2020-01-01 00:45:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:45:00] - 137 Speicher(Q_th_load)|on[2020-01-01 00:45:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 137 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 01:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:15:00] - 137 Speicher(Q_th_load)|on[2020-01-01 01:15:00] ≤ -0.0
+ [2020-01-01 01:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:30:00] - 137 Speicher(Q_th_load)|on[2020-01-01 01:30:00] ≤ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:15:00] - 137 Speicher(Q_th_load)|on[2020-01-03 22:15:00] ≤ -0.0
+ [2020-01-03 22:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:30:00] - 137 Speicher(Q_th_load)|on[2020-01-03 22:30:00] ≤ -0.0
+ [2020-01-03 22:45:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:45:00] - 137 Speicher(Q_th_load)|on[2020-01-03 22:45:00] ≤ -0.0
+ [2020-01-03 23:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:00:00] - 137 Speicher(Q_th_load)|on[2020-01-03 23:00:00] ≤ -0.0
+ [2020-01-03 23:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] - 137 Speicher(Q_th_load)|on[2020-01-03 23:15:00] ≤ -0.0
+ [2020-01-03 23:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] - 137 Speicher(Q_th_load)|on[2020-01-03 23:30:00] ≤ -0.0
+ [2020-01-03 23:45:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] - 137 Speicher(Q_th_load)|on[2020-01-03 23:45:00] ≤ -0.0
+ "Speicher(Q_th_load)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|lb`
+ [time: 288]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:30:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:45:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:15:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:30:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:15:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:30:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:45:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-03 23:45:00] ≥ -0.0
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_load)|total_flow_hours`
+ -------------------------------------------------
+ +1 Speicher(Q_th_load)|total_flow_hours - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00]... -0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Constraint `Speicher(Q_th_unload)|on_hours_total`
+ -------------------------------------------------
+ +1 Speicher(Q_th_unload)|on_hours_total - 0.25 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] - 0.25 Speicher(Q_th_unload)|on[2020-01-01 00:15:00]... -0.25 Speicher(Q_th_unload)|on[2020-01-03 23:15:00] - 0.25 Speicher(Q_th_unload)|on[2020-01-03 23:30:00] - 0.25 Speicher(Q_th_unload)|on[2020-01-03 23:45:00] = -0.0
+ "Speicher(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|ub`
+ [time: 288]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 00:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 00:15:00] ≤ -0.0
+ [2020-01-01 00:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:30:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 00:30:00] ≤ -0.0
+ [2020-01-01 00:45:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:45:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 00:45:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 01:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:15:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 01:15:00] ≤ -0.0
+ [2020-01-01 01:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:30:00] - 158 Speicher(Q_th_unload)|on[2020-01-01 01:30:00] ≤ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:15:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 22:15:00] ≤ -0.0
+ [2020-01-03 22:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:30:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 22:30:00] ≤ -0.0
+ [2020-01-03 22:45:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:45:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 22:45:00] ≤ -0.0
+ [2020-01-03 23:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:00:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 23:00:00] ≤ -0.0
+ [2020-01-03 23:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 23:15:00] ≤ -0.0
+ [2020-01-03 23:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 23:30:00] ≤ -0.0
+ [2020-01-03 23:45:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00] - 158 Speicher(Q_th_unload)|on[2020-01-03 23:45:00] ≤ -0.0
+ "Speicher(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|lb`
+ [time: 288]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 00:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:15:00] ≥ -0.0
+ [2020-01-01 00:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:30:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:30:00] ≥ -0.0
+ [2020-01-01 00:45:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:45:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:45:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:15:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:15:00] ≥ -0.0
+ [2020-01-01 01:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:30:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:30:00] ≥ -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:15:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 22:15:00] ≥ -0.0
+ [2020-01-03 22:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:30:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 22:30:00] ≥ -0.0
+ [2020-01-03 22:45:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:45:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 22:45:00] ≥ -0.0
+ [2020-01-03 23:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 23:00:00] ≥ -0.0
+ [2020-01-03 23:15:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 23:15:00] ≥ -0.0
+ [2020-01-03 23:30:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 23:30:00] ≥ -0.0
+ [2020-01-03 23:45:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-03 23:45:00] ≥ -0.0
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_unload)|total_flow_hours`
+ ---------------------------------------------------
+ +1 Speicher(Q_th_unload)|total_flow_hours - 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00]... -0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00] - 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00] - 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Speicher|prevent_simultaneous_use": |-
+ Constraint `Speicher|prevent_simultaneous_use`
+ [time: 288]:
+ -----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 00:15:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:15:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:15:00] ≤ 1.0
+ [2020-01-01 00:30:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:30:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:30:00] ≤ 1.0
+ [2020-01-01 00:45:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:45:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:45:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 01:15:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:15:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:15:00] ≤ 1.0
+ [2020-01-01 01:30:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:30:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:30:00] ≤ 1.0
+ ...
+ [2020-01-03 22:15:00]: +1 Speicher(Q_th_load)|on[2020-01-03 22:15:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 22:15:00] ≤ 1.0
+ [2020-01-03 22:30:00]: +1 Speicher(Q_th_load)|on[2020-01-03 22:30:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 22:30:00] ≤ 1.0
+ [2020-01-03 22:45:00]: +1 Speicher(Q_th_load)|on[2020-01-03 22:45:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 22:45:00] ≤ 1.0
+ [2020-01-03 23:00:00]: +1 Speicher(Q_th_load)|on[2020-01-03 23:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 23:00:00] ≤ 1.0
+ [2020-01-03 23:15:00]: +1 Speicher(Q_th_load)|on[2020-01-03 23:15:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 23:15:00] ≤ 1.0
+ [2020-01-03 23:30:00]: +1 Speicher(Q_th_load)|on[2020-01-03 23:30:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 23:30:00] ≤ 1.0
+ [2020-01-03 23:45:00]: +1 Speicher(Q_th_load)|on[2020-01-03 23:45:00] + 1 Speicher(Q_th_unload)|on[2020-01-03 23:45:00] ≤ 1.0
+ "Speicher|netto_discharge": |-
+ Constraint `Speicher|netto_discharge`
+ [time: 288]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|netto_discharge[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Speicher|netto_discharge[2020-01-01 00:15:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Speicher|netto_discharge[2020-01-01 00:30:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:30:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Speicher|netto_discharge[2020-01-01 00:45:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:45:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|netto_discharge[2020-01-01 01:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Speicher|netto_discharge[2020-01-01 01:15:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:15:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Speicher|netto_discharge[2020-01-01 01:30:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:30:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Speicher|netto_discharge[2020-01-03 22:15:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:15:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Speicher|netto_discharge[2020-01-03 22:30:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:30:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Speicher|netto_discharge[2020-01-03 22:45:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:45:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Speicher|netto_discharge[2020-01-03 23:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Speicher|netto_discharge[2020-01-03 23:15:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Speicher|netto_discharge[2020-01-03 23:30:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Speicher|netto_discharge[2020-01-03 23:45:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Speicher|charge_state": |-
+ Constraint `Speicher|charge_state`
+ [time: 288]:
+ -----------------------------------------------
+ [2020-01-01 00:15:00]: +1 Speicher|charge_state[2020-01-01 00:15:00] - 0.9997 Speicher|charge_state[2020-01-01 00:00:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Speicher|charge_state[2020-01-01 00:30:00] - 0.9997 Speicher|charge_state[2020-01-01 00:15:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Speicher|charge_state[2020-01-01 00:45:00] - 0.9997 Speicher|charge_state[2020-01-01 00:30:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 00:30:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 0.9997 Speicher|charge_state[2020-01-01 00:45:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 00:45:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Speicher|charge_state[2020-01-01 01:15:00] - 0.9997 Speicher|charge_state[2020-01-01 01:00:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Speicher|charge_state[2020-01-01 01:30:00] - 0.9997 Speicher|charge_state[2020-01-01 01:15:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 01:15:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:45:00]: +1 Speicher|charge_state[2020-01-01 01:45:00] - 0.9997 Speicher|charge_state[2020-01-01 01:30:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-01 01:30:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:30:00]: +1 Speicher|charge_state[2020-01-03 22:30:00] - 0.9997 Speicher|charge_state[2020-01-03 22:15:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 22:15:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Speicher|charge_state[2020-01-03 22:45:00] - 0.9997 Speicher|charge_state[2020-01-03 22:30:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 22:30:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Speicher|charge_state[2020-01-03 23:00:00] - 0.9997 Speicher|charge_state[2020-01-03 22:45:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 22:45:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Speicher|charge_state[2020-01-03 23:15:00] - 0.9997 Speicher|charge_state[2020-01-03 23:00:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:00:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Speicher|charge_state[2020-01-03 23:30:00] - 0.9997 Speicher|charge_state[2020-01-03 23:15:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Speicher|charge_state[2020-01-03 23:45:00] - 0.9997 Speicher|charge_state[2020-01-03 23:30:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00] = -0.0
+ [2020-01-04 00:00:00]: +1 Speicher|charge_state[2020-01-04 00:00:00] - 0.9997 Speicher|charge_state[2020-01-03 23:45:00] - 0.25 Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] + 0.25 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00] = -0.0
+ "Speicher|initial_charge_state": |-
+ Constraint `Speicher|initial_charge_state`
+ ------------------------------------------
+ +1 Speicher|charge_state[2020-01-01 00:00:00] = 137.0
+ "Speicher|final_charge_max": |-
+ Constraint `Speicher|final_charge_max`
+ --------------------------------------
+ +1 Speicher|charge_state[2020-01-04 00:00:00] ≤ 158.0
+ "Speicher|final_charge_min": |-
+ Constraint `Speicher|final_charge_min`
+ --------------------------------------
+ +1 Speicher|charge_state[2020-01-04 00:00:00] ≥ 137.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 288]:
+ ---------------------------------------
+ [2020-01-01 00:00:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 00:00:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] + 1 Strom|excess_input[2020-01-01 00:00:00] - 1 Strom|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 00:15:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 00:15:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 00:15:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:15:00] + 1 Strom|excess_input[2020-01-01 00:15:00] - 1 Strom|excess_output[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 00:30:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 00:30:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 00:30:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:30:00] + 1 Strom|excess_input[2020-01-01 00:30:00] - 1 Strom|excess_output[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 00:45:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 00:45:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 00:45:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:45:00] + 1 Strom|excess_input[2020-01-01 00:45:00] - 1 Strom|excess_output[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 01:00:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 01:00:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 01:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] + 1 Strom|excess_input[2020-01-01 01:00:00] - 1 Strom|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 01:15:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 01:15:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 01:15:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:15:00] + 1 Strom|excess_input[2020-01-01 01:15:00] - 1 Strom|excess_output[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-01 01:30:00] + 1 BHKW2(P_el)|flow_rate[2020-01-01 01:30:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-01 01:30:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:30:00] + 1 Strom|excess_input[2020-01-01 01:30:00] - 1 Strom|excess_output[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 22:15:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 22:15:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 22:15:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 22:15:00] + 1 Strom|excess_input[2020-01-03 22:15:00] - 1 Strom|excess_output[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 22:30:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 22:30:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 22:30:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 22:30:00] + 1 Strom|excess_input[2020-01-03 22:30:00] - 1 Strom|excess_output[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 22:45:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 22:45:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 22:45:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 22:45:00] + 1 Strom|excess_input[2020-01-03 22:45:00] - 1 Strom|excess_output[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 23:00:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 23:00:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 23:00:00] + 1 Strom|excess_input[2020-01-03 23:00:00] - 1 Strom|excess_output[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 23:15:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 23:15:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:15:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 23:15:00] + 1 Strom|excess_input[2020-01-03 23:15:00] - 1 Strom|excess_output[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 23:30:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 23:30:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:30:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 23:30:00] + 1 Strom|excess_input[2020-01-03 23:30:00] - 1 Strom|excess_output[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Stromtarif(P_el)|flow_rate[2020-01-03 23:45:00] + 1 BHKW2(P_el)|flow_rate[2020-01-03 23:45:00] - 1 Stromlast(P_el_Last)|flow_rate[2020-01-03 23:45:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-03 23:45:00] + 1 Strom|excess_input[2020-01-03 23:45:00] - 1 Strom|excess_output[2020-01-03 23:45:00] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 2.5e+04 Strom|excess_input[2020-01-01 00:00:00] - 2.5e+04 Strom|excess_input[2020-01-01 00:15:00]... -2.5e+04 Strom|excess_output[2020-01-03 23:15:00] - 2.5e+04 Strom|excess_output[2020-01-03 23:30:00] - 2.5e+04 Strom|excess_output[2020-01-03 23:45:00] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 288]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Fernwärme|excess_input[2020-01-01 00:00:00] - 1 Fernwärme|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:15:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:15:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:15:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:15:00] + 1 Fernwärme|excess_input[2020-01-01 00:15:00] - 1 Fernwärme|excess_output[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:30:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:30:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:30:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:30:00] + 1 Fernwärme|excess_input[2020-01-01 00:30:00] - 1 Fernwärme|excess_output[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:45:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 00:45:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:45:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:45:00] + 1 Fernwärme|excess_input[2020-01-01 00:45:00] - 1 Fernwärme|excess_output[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Fernwärme|excess_input[2020-01-01 01:00:00] - 1 Fernwärme|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:15:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:15:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:15:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:15:00] + 1 Fernwärme|excess_input[2020-01-01 01:15:00] - 1 Fernwärme|excess_output[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:30:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-01 01:30:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:30:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:30:00] + 1 Fernwärme|excess_input[2020-01-01 01:30:00] - 1 Fernwärme|excess_output[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 22:15:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 22:15:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:15:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:15:00] + 1 Fernwärme|excess_input[2020-01-03 22:15:00] - 1 Fernwärme|excess_output[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 22:30:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 22:30:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:30:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:30:00] + 1 Fernwärme|excess_input[2020-01-03 22:30:00] - 1 Fernwärme|excess_output[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 22:45:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 22:45:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 22:45:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 22:45:00] + 1 Fernwärme|excess_input[2020-01-03 22:45:00] - 1 Fernwärme|excess_output[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 23:00:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:00:00] + 1 Fernwärme|excess_input[2020-01-03 23:00:00] - 1 Fernwärme|excess_output[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 23:15:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:15:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:15:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:15:00] + 1 Fernwärme|excess_input[2020-01-03 23:15:00] - 1 Fernwärme|excess_output[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 23:30:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:30:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:30:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:30:00] + 1 Fernwärme|excess_input[2020-01-03 23:30:00] - 1 Fernwärme|excess_output[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Kessel(Q_th)|flow_rate[2020-01-03 23:45:00] + 1 BHKW2(Q_th)|flow_rate[2020-01-03 23:45:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-03 23:45:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-03 23:45:00] + 1 Fernwärme|excess_input[2020-01-03 23:45:00] - 1 Fernwärme|excess_output[2020-01-03 23:45:00] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 2.5e+04 Fernwärme|excess_input[2020-01-01 00:00:00] - 2.5e+04 Fernwärme|excess_input[2020-01-01 00:15:00]... -2.5e+04 Fernwärme|excess_output[2020-01-03 23:15:00] - 2.5e+04 Fernwärme|excess_output[2020-01-03 23:30:00] - 2.5e+04 Fernwärme|excess_output[2020-01-03 23:45:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 288]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:15:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:15:00] + 1 Gas|excess_input[2020-01-01 00:15:00] - 1 Gas|excess_output[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:30:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:30:00] + 1 Gas|excess_input[2020-01-01 00:30:00] - 1 Gas|excess_output[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:45:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:45:00] + 1 Gas|excess_input[2020-01-01 00:45:00] - 1 Gas|excess_output[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:15:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:15:00] + 1 Gas|excess_input[2020-01-01 01:15:00] - 1 Gas|excess_output[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:30:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:30:00] + 1 Gas|excess_input[2020-01-01 01:30:00] - 1 Gas|excess_output[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:15:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 22:15:00] + 1 Gas|excess_input[2020-01-03 22:15:00] - 1 Gas|excess_output[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:30:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 22:30:00] + 1 Gas|excess_input[2020-01-03 22:30:00] - 1 Gas|excess_output[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 22:45:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 22:45:00] + 1 Gas|excess_input[2020-01-03 22:45:00] - 1 Gas|excess_output[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 23:00:00] + 1 Gas|excess_input[2020-01-03 23:00:00] - 1 Gas|excess_output[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:15:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 23:15:00] + 1 Gas|excess_input[2020-01-03 23:15:00] - 1 Gas|excess_output[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:30:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 23:30:00] + 1 Gas|excess_input[2020-01-03 23:30:00] - 1 Gas|excess_output[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-03 23:45:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-03 23:45:00] + 1 Gas|excess_input[2020-01-03 23:45:00] - 1 Gas|excess_output[2020-01-03 23:45:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 2.5e+04 Gas|excess_input[2020-01-01 00:00:00] - 2.5e+04 Gas|excess_input[2020-01-01 00:15:00]... -2.5e+04 Gas|excess_output[2020-01-03 23:15:00] - 2.5e+04 Gas|excess_output[2020-01-03 23:30:00] - 2.5e+04 Gas|excess_output[2020-01-03 23:45:00] = -0.0
+ "Kohle|balance": |-
+ Constraint `Kohle|balance`
+ [time: 288]:
+ ---------------------------------------
+ [2020-01-01 00:00:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Kohle|excess_input[2020-01-01 00:00:00] - 1 Kohle|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 00:15:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:15:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:15:00] + 1 Kohle|excess_input[2020-01-01 00:15:00] - 1 Kohle|excess_output[2020-01-01 00:15:00] = -0.0
+ [2020-01-01 00:30:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:30:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:30:00] + 1 Kohle|excess_input[2020-01-01 00:30:00] - 1 Kohle|excess_output[2020-01-01 00:30:00] = -0.0
+ [2020-01-01 00:45:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 00:45:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 00:45:00] + 1 Kohle|excess_input[2020-01-01 00:45:00] - 1 Kohle|excess_output[2020-01-01 00:45:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Kohle|excess_input[2020-01-01 01:00:00] - 1 Kohle|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 01:15:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:15:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:15:00] + 1 Kohle|excess_input[2020-01-01 01:15:00] - 1 Kohle|excess_output[2020-01-01 01:15:00] = -0.0
+ [2020-01-01 01:30:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-01 01:30:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-01 01:30:00] + 1 Kohle|excess_input[2020-01-01 01:30:00] - 1 Kohle|excess_output[2020-01-01 01:30:00] = -0.0
+ ...
+ [2020-01-03 22:15:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:15:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:15:00] + 1 Kohle|excess_input[2020-01-03 22:15:00] - 1 Kohle|excess_output[2020-01-03 22:15:00] = -0.0
+ [2020-01-03 22:30:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:30:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:30:00] + 1 Kohle|excess_input[2020-01-03 22:30:00] - 1 Kohle|excess_output[2020-01-03 22:30:00] = -0.0
+ [2020-01-03 22:45:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 22:45:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 22:45:00] + 1 Kohle|excess_input[2020-01-03 22:45:00] - 1 Kohle|excess_output[2020-01-03 22:45:00] = -0.0
+ [2020-01-03 23:00:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:00:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:00:00] + 1 Kohle|excess_input[2020-01-03 23:00:00] - 1 Kohle|excess_output[2020-01-03 23:00:00] = -0.0
+ [2020-01-03 23:15:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:15:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:15:00] + 1 Kohle|excess_input[2020-01-03 23:15:00] - 1 Kohle|excess_output[2020-01-03 23:15:00] = -0.0
+ [2020-01-03 23:30:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:30:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:30:00] + 1 Kohle|excess_input[2020-01-03 23:30:00] - 1 Kohle|excess_output[2020-01-03 23:30:00] = -0.0
+ [2020-01-03 23:45:00]: +1 Kohletarif(Q_Kohle)|flow_rate[2020-01-03 23:45:00] - 1 BHKW2(Q_fu)|flow_rate[2020-01-03 23:45:00] + 1 Kohle|excess_input[2020-01-03 23:45:00] - 1 Kohle|excess_output[2020-01-03 23:45:00] = -0.0
+ "Kohle->Penalty": |-
+ Constraint `Kohle->Penalty`
+ ---------------------------
+ +1 Kohle->Penalty - 2.5e+04 Kohle|excess_input[2020-01-01 00:00:00] - 2.5e+04 Kohle|excess_input[2020-01-01 00:15:00]... -2.5e+04 Kohle|excess_output[2020-01-03 23:15:00] - 2.5e+04 Kohle|excess_output[2020-01-03 23:30:00] - 2.5e+04 Kohle|excess_output[2020-01-03 23:45:00] = -0.0
+binaries:
+ - "Kessel(Q_fu)|on"
+ - "Kessel(Q_fu)|switch|on"
+ - "Kessel(Q_fu)|switch|off"
+ - "BHKW2(Q_fu)|on"
+ - "BHKW2(Q_th)|on"
+ - "BHKW2(P_el)|on"
+ - "BHKW2|on"
+ - "BHKW2|switch|on"
+ - "BHKW2|switch|off"
+ - "Speicher(Q_th_load)|on"
+ - "Speicher(Q_th_unload)|on"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - PE(periodic)
+ - PE(temporal)
+ - "PE(temporal)|per_timestep"
+ - PE
+ - Penalty
+ - "Wärmelast(Q_th_Last)|flow_rate"
+ - "Wärmelast(Q_th_Last)|total_flow_hours"
+ - "Stromlast(P_el_Last)|flow_rate"
+ - "Stromlast(P_el_Last)|total_flow_hours"
+ - "Kohletarif(Q_Kohle)|flow_rate"
+ - "Kohletarif(Q_Kohle)|total_flow_hours"
+ - "Kohletarif(Q_Kohle)->costs(temporal)"
+ - "Kohletarif(Q_Kohle)->CO2(temporal)"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "Stromtarif(P_el)|flow_rate"
+ - "Stromtarif(P_el)|total_flow_hours"
+ - "Stromtarif(P_el)->costs(temporal)"
+ - "Stromtarif(P_el)->CO2(temporal)"
+ - "Kessel(Q_fu)|flow_rate"
+ - "Kessel(Q_fu)|on_hours_total"
+ - "Kessel(Q_fu)->costs(temporal)"
+ - "Kessel(Q_fu)|total_flow_hours"
+ - "Kessel(Q_th)|flow_rate"
+ - "Kessel(Q_th)|total_flow_hours"
+ - "BHKW2(Q_fu)|flow_rate"
+ - "BHKW2(Q_fu)|on_hours_total"
+ - "BHKW2(Q_fu)|total_flow_hours"
+ - "BHKW2(Q_th)|flow_rate"
+ - "BHKW2(Q_th)|on_hours_total"
+ - "BHKW2(Q_th)|total_flow_hours"
+ - "BHKW2(P_el)|flow_rate"
+ - "BHKW2(P_el)|on_hours_total"
+ - "BHKW2(P_el)|total_flow_hours"
+ - "BHKW2|on_hours_total"
+ - "BHKW2->costs(temporal)"
+ - "Speicher(Q_th_load)|flow_rate"
+ - "Speicher(Q_th_load)|on_hours_total"
+ - "Speicher(Q_th_load)|total_flow_hours"
+ - "Speicher(Q_th_unload)|flow_rate"
+ - "Speicher(Q_th_unload)|on_hours_total"
+ - "Speicher(Q_th_unload)|total_flow_hours"
+ - "Speicher|charge_state"
+ - "Speicher|netto_discharge"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+ - "Kohle|excess_input"
+ - "Kohle|excess_output"
+ - "Kohle->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/io_flow_system_long--solution.nc4 b/tests/ressources/v4-api/io_flow_system_long--solution.nc4
new file mode 100644
index 000000000..311aa66a3
Binary files /dev/null and b/tests/ressources/v4-api/io_flow_system_long--solution.nc4 differ
diff --git a/tests/ressources/v4-api/io_flow_system_long--summary.yaml b/tests/ressources/v4-api/io_flow_system_long--summary.yaml
new file mode 100644
index 000000000..dd9daded4
--- /dev/null
+++ b/tests/ressources/v4-api/io_flow_system_long--summary.yaml
@@ -0,0 +1,54 @@
+Name: io_flow_system_long
+Number of timesteps: 288
+Calculation Type: FullCalculation
+Constraints: 11557
+Variables: 13283
+Main Results:
+ Objective: 343613.3
+ Penalty: 0.0
+ Effects:
+ CO2 [kg]:
+ temporal: 7653.75
+ periodic: -0.0
+ total: 7653.75
+ costs [€]:
+ temporal: 343613.3
+ periodic: -0.0
+ total: 343613.3
+ PE [kWh_PE]:
+ temporal: -0.0
+ periodic: -0.0
+ total: 0.0
+ Invest-Decisions:
+ Invested: {}
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.78
+ solving: 7.57
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/io_flow_system_segments--flow_system.nc4 b/tests/ressources/v4-api/io_flow_system_segments--flow_system.nc4
new file mode 100644
index 000000000..082c37eb5
Binary files /dev/null and b/tests/ressources/v4-api/io_flow_system_segments--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/io_flow_system_segments--model_documentation.yaml b/tests/ressources/v4-api/io_flow_system_segments--model_documentation.yaml
new file mode 100644
index 000000000..2ac0e8b68
--- /dev/null
+++ b/tests/ressources/v4-api/io_flow_system_segments--model_documentation.yaml
@@ -0,0 +1,1914 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 costs + 1 Penalty
+ Sense: min
+ Value: -11005.751896495389
+termination_condition: optimal
+status: ok
+nvars: 508
+nvarsbin: 146
+nvarscont: 362
+ncons: 590
+variables:
+ costs(periodic): |-
+ Variable
+ --------
+ costs(periodic) ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable
+ --------
+ costs(temporal) ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: costs(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: costs(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: costs(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: costs(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: costs(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: costs(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: costs(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ costs: |-
+ Variable
+ --------
+ costs ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable
+ --------
+ CO2(periodic) ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable
+ --------
+ CO2(temporal) ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ CO2: |-
+ Variable
+ --------
+ CO2 ∈ [-inf, inf]
+ PE(periodic): |-
+ Variable
+ --------
+ PE(periodic) ∈ [-inf, inf]
+ PE(temporal): |-
+ Variable
+ --------
+ PE(temporal) ∈ [-inf, inf]
+ "PE(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: PE(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: PE(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: PE(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: PE(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: PE(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: PE(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: PE(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: PE(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: PE(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ PE: |-
+ Variable
+ --------
+ PE ∈ [-inf, 3500]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "CO2(temporal)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Wärmelast(Q_th_Last)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] ∈ [30, 30]
+ [2020-01-01 01:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00] ∈ [0, 0]
+ [2020-01-01 02:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00] ∈ [90, 90]
+ [2020-01-01 03:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 03:00:00] ∈ [110, 110]
+ [2020-01-01 04:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 04:00:00] ∈ [110, 110]
+ [2020-01-01 05:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 05:00:00] ∈ [20, 20]
+ [2020-01-01 06:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] ∈ [20, 20]
+ [2020-01-01 07:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] ∈ [20, 20]
+ [2020-01-01 08:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] ∈ [20, 20]
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Wärmelast(Q_th_Last)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Gastarif(Q_Gas)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Einspeisung(P_el)|total_flow_hours ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 200]
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 200]
+ [2020-01-01 02:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 200]
+ [2020-01-01 03:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 200]
+ [2020-01-01 04:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 200]
+ [2020-01-01 05:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 200]
+ [2020-01-01 06:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 200]
+ [2020-01-01 07:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 200]
+ [2020-01-01 08:00:00]: Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 200]
+ "Kessel(Q_fu)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_fu)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_fu)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_fu)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_fu)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_fu)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_fu)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_fu)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_fu)|on_hours_total ∈ [0, inf]
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_fu)|total_flow_hours ∈ [0, inf]
+ "Kessel(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 50]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 50]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 50]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 50]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 50]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 50]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 50]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 50]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 50]
+ "Kessel(Q_th)|size": |-
+ Variable
+ --------
+ Kessel(Q_th)|size ∈ [50, 50]
+ "Kessel(Q_th)->costs(periodic)": |-
+ Variable
+ --------
+ Kessel(Q_th)->costs(periodic) ∈ [-inf, inf]
+ "Kessel(Q_th)->PE(periodic)": |-
+ Variable
+ --------
+ Kessel(Q_th)->PE(periodic) ∈ [-inf, inf]
+ "Kessel(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ Kessel(Q_th)|on_hours_total ∈ [0, 1000]
+ "Kessel(Q_th)|switch|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|switch|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|switch|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|switch|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|switch|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|switch|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|switch|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|switch|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|switch|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel(Q_th)|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel(Q_th)|switch|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel(Q_th)|switch|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel(Q_th)|switch|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel(Q_th)|switch|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel(Q_th)|switch|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel(Q_th)|switch|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel(Q_th)|switch|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel(Q_th)|switch|count": |-
+ Variable
+ --------
+ Kessel(Q_th)|switch|count ∈ [0, 1000]
+ "Kessel(Q_th)|consecutive_on_hours": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] ∈ [0, 10]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] ∈ [0, 10]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] ∈ [0, 10]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] ∈ [0, 10]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] ∈ [0, 10]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] ∈ [0, 10]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] ∈ [0, 10]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] ∈ [0, 10]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] ∈ [0, 10]
+ "Kessel(Q_th)|consecutive_off_hours": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] ∈ [0, 10]
+ [2020-01-01 01:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] ∈ [0, 10]
+ [2020-01-01 02:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] ∈ [0, 10]
+ [2020-01-01 03:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] ∈ [0, 10]
+ [2020-01-01 04:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] ∈ [0, 10]
+ [2020-01-01 05:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] ∈ [0, 10]
+ [2020-01-01 06:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] ∈ [0, 10]
+ [2020-01-01 07:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] ∈ [0, 10]
+ [2020-01-01 08:00:00]: Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] ∈ [0, 10]
+ "Kessel(Q_th)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ Kessel(Q_th)|total_flow_hours ∈ [0, 1e+06]
+ "Kessel|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Kessel|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Kessel|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Kessel|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Kessel|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Kessel|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Kessel|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Kessel|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Kessel|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Kessel|on_hours_total": |-
+ Variable
+ --------
+ Kessel|on_hours_total ∈ [0, inf]
+ "Kessel->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Kessel->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Kessel->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Kessel->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Kessel->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Kessel->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Kessel->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Kessel->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Kessel->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Kessel->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Kessel->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher(Q_th_load)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_load)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|total_flow_hours ∈ [0, inf]
+ "Speicher(Q_th_unload)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_unload)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|total_flow_hours ∈ [0, inf]
+ "Speicher|charge_state": |-
+ Variable (time: 10)
+ -------------------
+ [2020-01-01 00:00:00]: Speicher|charge_state[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Speicher|charge_state[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Speicher|charge_state[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Speicher|charge_state[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Speicher|charge_state[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Speicher|charge_state[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Speicher|charge_state[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Speicher|charge_state[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Speicher|charge_state[2020-01-01 08:00:00] ∈ [0, 1000]
+ [2020-01-01 09:00:00]: Speicher|charge_state[2020-01-01 09:00:00] ∈ [0, 1000]
+ "Speicher|netto_discharge": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher|netto_discharge[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Speicher|netto_discharge[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Speicher|netto_discharge[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Speicher|netto_discharge[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Speicher|netto_discharge[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Speicher|netto_discharge[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Speicher|netto_discharge[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Speicher|netto_discharge[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Speicher|netto_discharge[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher|size": |-
+ Variable
+ --------
+ Speicher|size ∈ [0, 1000]
+ "Speicher->costs(periodic)": |-
+ Variable
+ --------
+ Speicher->costs(periodic) ∈ [-inf, inf]
+ "Speicher->CO2(periodic)": |-
+ Variable
+ --------
+ Speicher->CO2(periodic) ∈ [-inf, inf]
+ "Speicher|PiecewiseEffects|costs": |-
+ Variable
+ --------
+ Speicher|PiecewiseEffects|costs ∈ [-inf, inf]
+ "Speicher|PiecewiseEffects|PE": |-
+ Variable
+ --------
+ Speicher|PiecewiseEffects|PE ∈ [-inf, inf]
+ "Speicher|Piece_0|inside_piece": |-
+ Variable
+ --------
+ Speicher|Piece_0|inside_piece ∈ {0, 1}
+ "Speicher|Piece_0|lambda0": |-
+ Variable
+ --------
+ Speicher|Piece_0|lambda0 ∈ [0, 1]
+ "Speicher|Piece_0|lambda1": |-
+ Variable
+ --------
+ Speicher|Piece_0|lambda1 ∈ [0, 1]
+ "Speicher|Piece_1|inside_piece": |-
+ Variable
+ --------
+ Speicher|Piece_1|inside_piece ∈ {0, 1}
+ "Speicher|Piece_1|lambda0": |-
+ Variable
+ --------
+ Speicher|Piece_1|lambda0 ∈ [0, 1]
+ "Speicher|Piece_1|lambda1": |-
+ Variable
+ --------
+ Speicher|Piece_1|lambda1 ∈ [0, 1]
+ "Speicher->PE(periodic)": |-
+ Variable
+ --------
+ Speicher->PE(periodic) ∈ [-inf, inf]
+ "KWK(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "KWK(Q_fu)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_fu)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK(Q_fu)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK(Q_fu)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK(Q_fu)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK(Q_fu)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK(Q_fu)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK(Q_fu)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK(Q_fu)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK(Q_fu)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK(Q_fu)|on_hours_total": |-
+ Variable
+ --------
+ KWK(Q_fu)|on_hours_total ∈ [0, inf]
+ "KWK(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ KWK(Q_fu)|total_flow_hours ∈ [0, inf]
+ "KWK(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 3300]
+ [2020-01-01 01:00:00]: KWK(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 3300]
+ [2020-01-01 02:00:00]: KWK(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 3300]
+ [2020-01-01 03:00:00]: KWK(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 3300]
+ [2020-01-01 04:00:00]: KWK(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 3300]
+ [2020-01-01 05:00:00]: KWK(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 3300]
+ [2020-01-01 06:00:00]: KWK(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 3300]
+ [2020-01-01 07:00:00]: KWK(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 3300]
+ [2020-01-01 08:00:00]: KWK(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 3300]
+ "KWK(P_el)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(P_el)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK(P_el)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK(P_el)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK(P_el)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK(P_el)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK(P_el)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK(P_el)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK(P_el)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK(P_el)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK(P_el)|on_hours_total": |-
+ Variable
+ --------
+ KWK(P_el)|on_hours_total ∈ [0, inf]
+ "KWK(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ KWK(P_el)|total_flow_hours ∈ [0, inf]
+ "KWK(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: KWK(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: KWK(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: KWK(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: KWK(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: KWK(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: KWK(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: KWK(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: KWK(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "KWK(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ KWK(Q_th)|on_hours_total ∈ [0, inf]
+ "KWK(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ KWK(Q_th)|total_flow_hours ∈ [0, inf]
+ "KWK|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK|on_hours_total": |-
+ Variable
+ --------
+ KWK|on_hours_total ∈ [0, inf]
+ "KWK|switch|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|switch|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|switch|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|switch|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|switch|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|switch|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|switch|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|switch|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|switch|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|switch|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK|switch|off": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|switch|off[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|switch|off[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|switch|off[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|switch|off[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|switch|off[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|switch|off[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|switch|off[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|switch|off[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|switch|off[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: KWK->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: KWK->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: KWK->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: KWK->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: KWK->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: KWK->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: KWK->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: KWK->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "KWK|Piece_0|inside_piece": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|Piece_0|inside_piece[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|Piece_0|inside_piece[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|Piece_0|inside_piece[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|Piece_0|inside_piece[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|Piece_0|inside_piece[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|Piece_0|inside_piece[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|Piece_0|inside_piece[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|Piece_0|inside_piece[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|Piece_0|inside_piece[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK|Piece_0|lambda0": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|Piece_0|lambda0[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: KWK|Piece_0|lambda0[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: KWK|Piece_0|lambda0[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: KWK|Piece_0|lambda0[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: KWK|Piece_0|lambda0[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: KWK|Piece_0|lambda0[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: KWK|Piece_0|lambda0[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: KWK|Piece_0|lambda0[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: KWK|Piece_0|lambda0[2020-01-01 08:00:00] ∈ [0, 1]
+ "KWK|Piece_0|lambda1": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|Piece_0|lambda1[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: KWK|Piece_0|lambda1[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: KWK|Piece_0|lambda1[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: KWK|Piece_0|lambda1[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: KWK|Piece_0|lambda1[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: KWK|Piece_0|lambda1[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: KWK|Piece_0|lambda1[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: KWK|Piece_0|lambda1[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: KWK|Piece_0|lambda1[2020-01-01 08:00:00] ∈ [0, 1]
+ "KWK|Piece_1|inside_piece": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|Piece_1|inside_piece[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: KWK|Piece_1|inside_piece[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: KWK|Piece_1|inside_piece[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: KWK|Piece_1|inside_piece[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: KWK|Piece_1|inside_piece[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: KWK|Piece_1|inside_piece[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: KWK|Piece_1|inside_piece[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: KWK|Piece_1|inside_piece[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: KWK|Piece_1|inside_piece[2020-01-01 08:00:00] ∈ {0, 1}
+ "KWK|Piece_1|lambda0": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|Piece_1|lambda0[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: KWK|Piece_1|lambda0[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: KWK|Piece_1|lambda0[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: KWK|Piece_1|lambda0[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: KWK|Piece_1|lambda0[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: KWK|Piece_1|lambda0[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: KWK|Piece_1|lambda0[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: KWK|Piece_1|lambda0[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: KWK|Piece_1|lambda0[2020-01-01 08:00:00] ∈ [0, 1]
+ "KWK|Piece_1|lambda1": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: KWK|Piece_1|lambda1[2020-01-01 00:00:00] ∈ [0, 1]
+ [2020-01-01 01:00:00]: KWK|Piece_1|lambda1[2020-01-01 01:00:00] ∈ [0, 1]
+ [2020-01-01 02:00:00]: KWK|Piece_1|lambda1[2020-01-01 02:00:00] ∈ [0, 1]
+ [2020-01-01 03:00:00]: KWK|Piece_1|lambda1[2020-01-01 03:00:00] ∈ [0, 1]
+ [2020-01-01 04:00:00]: KWK|Piece_1|lambda1[2020-01-01 04:00:00] ∈ [0, 1]
+ [2020-01-01 05:00:00]: KWK|Piece_1|lambda1[2020-01-01 05:00:00] ∈ [0, 1]
+ [2020-01-01 06:00:00]: KWK|Piece_1|lambda1[2020-01-01 06:00:00] ∈ [0, 1]
+ [2020-01-01 07:00:00]: KWK|Piece_1|lambda1[2020-01-01 07:00:00] ∈ [0, 1]
+ [2020-01-01 08:00:00]: KWK|Piece_1|lambda1[2020-01-01 08:00:00] ∈ [0, 1]
+ "Strom|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ ----------------------------
+ +1 costs(periodic) - 1 Kessel(Q_th)->costs(periodic) - 1 Speicher->costs(periodic) = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ ----------------------------
+ +1 costs(temporal) - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 9]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] - 1 Kessel->costs(temporal)[2020-01-01 00:00:00] - 1 KWK->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] - 1 Kessel->costs(temporal)[2020-01-01 01:00:00] - 1 KWK->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] - 1 Kessel->costs(temporal)[2020-01-01 02:00:00] - 1 KWK->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] - 1 Kessel->costs(temporal)[2020-01-01 03:00:00] - 1 KWK->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] - 1 Kessel->costs(temporal)[2020-01-01 04:00:00] - 1 KWK->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 05:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] - 1 Kessel->costs(temporal)[2020-01-01 05:00:00] - 1 KWK->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] - 1 Kessel->costs(temporal)[2020-01-01 06:00:00] - 1 KWK->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] - 1 Kessel->costs(temporal)[2020-01-01 07:00:00] - 1 KWK->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00]... -1 Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] - 1 Kessel->costs(temporal)[2020-01-01 08:00:00] - 1 KWK->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ costs: |-
+ Constraint `costs`
+ ------------------
+ +1 costs - 1 costs(temporal) - 1 costs(periodic) = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ --------------------------
+ +1 CO2(periodic) - 1 Speicher->CO2(periodic) = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ --------------------------
+ +1 CO2(temporal) - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 1 Kessel->CO2(temporal)[2020-01-01 08:00:00] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ ----------------
+ +1 CO2 - 1 CO2(temporal) - 1 CO2(periodic) = -0.0
+ PE(periodic): |-
+ Constraint `PE(periodic)`
+ -------------------------
+ +1 PE(periodic) - 1 Kessel(Q_th)->PE(periodic) - 1 Speicher->PE(periodic) = -0.0
+ PE(temporal): |-
+ Constraint `PE(temporal)`
+ -------------------------
+ +1 PE(temporal) - 1 PE(temporal)|per_timestep[2020-01-01 00:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 01:00:00]... -1 PE(temporal)|per_timestep[2020-01-01 06:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 07:00:00] - 1 PE(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "PE(temporal)|per_timestep": |-
+ Constraint `PE(temporal)|per_timestep`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 PE(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ PE: |-
+ Constraint `PE`
+ ---------------
+ +1 PE - 1 PE(temporal) - 1 PE(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty = -0.0
+ "CO2(temporal)->costs(temporal)": |-
+ Constraint `CO2(temporal)->costs(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Constraint `Wärmelast(Q_th_Last)|total_flow_hours`
+ --------------------------------------------------
+ +1 Wärmelast(Q_th_Last)|total_flow_hours - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ ---------------------------------------------
+ +1 Gastarif(Q_Gas)|total_flow_hours - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ -----------------------------------------------
+ +1 Einspeisung(P_el)|total_flow_hours - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] + 40 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_fu)|on_hours_total": |-
+ Constraint `Kessel(Q_fu)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_fu)|on_hours_total - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00]... -1 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_fu)|flow_rate|ub": |-
+ Constraint `Kessel(Q_fu)|flow_rate|ub`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 200 Kessel(Q_fu)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_fu)|flow_rate|lb": |-
+ Constraint `Kessel(Q_fu)|flow_rate|lb`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e-05 Kessel(Q_fu)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_fu)|total_flow_hours": |-
+ Constraint `Kessel(Q_fu)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_fu)|total_flow_hours - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)->costs(periodic)": |-
+ Constraint `Kessel(Q_th)->costs(periodic)`
+ ------------------------------------------
+ +1 Kessel(Q_th)->costs(periodic) - 10 Kessel(Q_th)|size = 1000.0
+ "Kessel(Q_th)->PE(periodic)": |-
+ Constraint `Kessel(Q_th)->PE(periodic)`
+ ---------------------------------------
+ +1 Kessel(Q_th)->PE(periodic) - 2 Kessel(Q_th)|size = -0.0
+ "Kessel(Q_th)|complementary": |-
+ Constraint `Kessel(Q_th)|complementary`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|off[2020-01-01 00:00:00] = 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|off[2020-01-01 01:00:00] = 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|off[2020-01-01 02:00:00] = 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|off[2020-01-01 03:00:00] = 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|off[2020-01-01 04:00:00] = 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|off[2020-01-01 05:00:00] = 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|off[2020-01-01 06:00:00] = 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|off[2020-01-01 07:00:00] = 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|off[2020-01-01 08:00:00] = 1.0
+ "Kessel(Q_th)|on_hours_total": |-
+ Constraint `Kessel(Q_th)|on_hours_total`
+ ----------------------------------------
+ +1 Kessel(Q_th)|on_hours_total - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00]... -1 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|switch|transition": |-
+ Constraint `Kessel(Q_th)|switch|transition`
+ [time: 8]:
+ ------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 08:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|on[2020-01-01 07:00:00] = -0.0
+ "Kessel(Q_th)|switch|initial": |-
+ Constraint `Kessel(Q_th)|switch|initial`
+ ----------------------------------------
+ +1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|switch|off[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] = -1.0
+ "Kessel(Q_th)|switch|mutex": |-
+ Constraint `Kessel(Q_th)|switch|mutex`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] + 1 Kessel(Q_th)|switch|off[2020-01-01 08:00:00] ≤ 1.0
+ "Kessel(Q_th)|switch|count": |-
+ Constraint `Kessel(Q_th)|switch|count`
+ --------------------------------------
+ +1 Kessel(Q_th)|switch|count - 1 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 01:00:00]... -1 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|consecutive_on_hours|ub": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|ub`
+ [time: 9]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 10 Kessel(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 10 Kessel(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 10 Kessel(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 10 Kessel(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 10 Kessel(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 10 Kessel(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 10 Kessel(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 10 Kessel(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 10 Kessel(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|consecutive_on_hours|forward": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|forward`
+ [time: 8]:
+ -----------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] ≤ 1.0
+ "Kessel(Q_th)|consecutive_on_hours|backward": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|backward`
+ [time: 8]:
+ ------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 10 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -9.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 10 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -9.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 10 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -9.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 10 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -9.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 10 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -9.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 10 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -9.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 10 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -9.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 10 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -9.0
+ "Kessel(Q_th)|consecutive_on_hours|initial": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|initial`
+ ------------------------------------------------------
+ +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 2 Kessel(Q_th)|on[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_th)|consecutive_on_hours|lb": |-
+ Constraint `Kessel(Q_th)|consecutive_on_hours|lb`
+ [time: 9]:
+ ------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] + 1 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] + 1 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] + 1 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] + 1 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] + 1 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] + 1 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] + 1 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] + 1 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_on_hours[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_th)|consecutive_off_hours|ub": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|ub`
+ [time: 9]:
+ -------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 9 Kessel(Q_th)|off[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 9 Kessel(Q_th)|off[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 9 Kessel(Q_th)|off[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 9 Kessel(Q_th)|off[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 9 Kessel(Q_th)|off[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 9 Kessel(Q_th)|off[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 9 Kessel(Q_th)|off[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 9 Kessel(Q_th)|off[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 9 Kessel(Q_th)|off[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|consecutive_off_hours|forward": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|forward`
+ [time: 8]:
+ ------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] ≤ 1.0
+ "Kessel(Q_th)|consecutive_off_hours|backward": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|backward`
+ [time: 8]:
+ -------------------------------------------------------------------
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 9 Kessel(Q_th)|off[2020-01-01 01:00:00] ≥ -8.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 01:00:00] - 9 Kessel(Q_th)|off[2020-01-01 02:00:00] ≥ -8.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 02:00:00] - 9 Kessel(Q_th)|off[2020-01-01 03:00:00] ≥ -8.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 03:00:00] - 9 Kessel(Q_th)|off[2020-01-01 04:00:00] ≥ -8.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 04:00:00] - 9 Kessel(Q_th)|off[2020-01-01 05:00:00] ≥ -8.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 05:00:00] - 9 Kessel(Q_th)|off[2020-01-01 06:00:00] ≥ -8.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 06:00:00] - 9 Kessel(Q_th)|off[2020-01-01 07:00:00] ≥ -8.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 08:00:00] - 1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 07:00:00] - 9 Kessel(Q_th)|off[2020-01-01 08:00:00] ≥ -8.0
+ "Kessel(Q_th)|consecutive_off_hours|initial": |-
+ Constraint `Kessel(Q_th)|consecutive_off_hours|initial`
+ -------------------------------------------------------
+ +1 Kessel(Q_th)|consecutive_off_hours[2020-01-01 00:00:00] - 1 Kessel(Q_th)|off[2020-01-01 00:00:00] = -0.0
+ "Kessel(Q_th)->costs(temporal)": |-
+ Constraint `Kessel(Q_th)->costs(temporal)`
+ [time: 9]:
+ -----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 00:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 01:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 02:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 03:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 04:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 05:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 06:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 07:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)->costs(temporal)[2020-01-01 08:00:00] - 0.01 Kessel(Q_th)|switch|on[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|flow_rate|lb2": |-
+ Constraint `Kessel(Q_th)|flow_rate|lb2`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 5 Kessel(Q_th)|on[2020-01-01 00:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] - 5 Kessel(Q_th)|on[2020-01-01 01:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] - 5 Kessel(Q_th)|on[2020-01-01 02:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] - 5 Kessel(Q_th)|on[2020-01-01 03:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] - 5 Kessel(Q_th)|on[2020-01-01 04:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] - 5 Kessel(Q_th)|on[2020-01-01 05:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 5 Kessel(Q_th)|on[2020-01-01 06:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 5 Kessel(Q_th)|on[2020-01-01 07:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] - 5 Kessel(Q_th)|on[2020-01-01 08:00:00] - 0.1 Kessel(Q_th)|size ≥ -5.0
+ "Kessel(Q_th)|flow_rate|ub2": |-
+ Constraint `Kessel(Q_th)|flow_rate|ub2`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_th)|size ≤ -0.0
+ "Kessel(Q_th)|flow_rate|ub1": |-
+ Constraint `Kessel(Q_th)|flow_rate|ub1`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +50 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +50 Kessel(Q_th)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +50 Kessel(Q_th)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +50 Kessel(Q_th)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +50 Kessel(Q_th)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +50 Kessel(Q_th)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +50 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +50 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +50 Kessel(Q_th)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel(Q_th)|flow_rate|lb1": |-
+ Constraint `Kessel(Q_th)|flow_rate|lb1`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +5 Kessel(Q_th)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +5 Kessel(Q_th)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +5 Kessel(Q_th)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +5 Kessel(Q_th)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +5 Kessel(Q_th)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +5 Kessel(Q_th)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +5 Kessel(Q_th)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +5 Kessel(Q_th)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +5 Kessel(Q_th)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] ≤ -0.0
+ "Kessel(Q_th)|total_flow_hours": |-
+ Constraint `Kessel(Q_th)|total_flow_hours`
+ ------------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Kessel(Q_th)|load_factor_max": |-
+ Constraint `Kessel(Q_th)|load_factor_max`
+ -----------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 9 Kessel(Q_th)|size ≤ -0.0
+ "Kessel(Q_th)|load_factor_min": |-
+ Constraint `Kessel(Q_th)|load_factor_min`
+ -----------------------------------------
+ +1 Kessel(Q_th)|total_flow_hours - 0.9 Kessel(Q_th)|size ≥ -0.0
+ "Kessel|on|ub": |-
+ Constraint `Kessel|on|ub`
+ [time: 9]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel|on[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 1 Kessel(Q_th)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 Kessel|on[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 01:00:00] - 1 Kessel(Q_th)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 02:00:00]: +1 Kessel|on[2020-01-01 02:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 02:00:00] - 1 Kessel(Q_th)|on[2020-01-01 02:00:00] ≤ 1e-05
+ [2020-01-01 03:00:00]: +1 Kessel|on[2020-01-01 03:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 03:00:00] - 1 Kessel(Q_th)|on[2020-01-01 03:00:00] ≤ 1e-05
+ [2020-01-01 04:00:00]: +1 Kessel|on[2020-01-01 04:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 04:00:00] - 1 Kessel(Q_th)|on[2020-01-01 04:00:00] ≤ 1e-05
+ [2020-01-01 05:00:00]: +1 Kessel|on[2020-01-01 05:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 05:00:00] - 1 Kessel(Q_th)|on[2020-01-01 05:00:00] ≤ 1e-05
+ [2020-01-01 06:00:00]: +1 Kessel|on[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 1 Kessel(Q_th)|on[2020-01-01 06:00:00] ≤ 1e-05
+ [2020-01-01 07:00:00]: +1 Kessel|on[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 1 Kessel(Q_th)|on[2020-01-01 07:00:00] ≤ 1e-05
+ [2020-01-01 08:00:00]: +1 Kessel|on[2020-01-01 08:00:00] - 1 Kessel(Q_fu)|on[2020-01-01 08:00:00] - 1 Kessel(Q_th)|on[2020-01-01 08:00:00] ≤ 1e-05
+ "Kessel|on|lb": |-
+ Constraint `Kessel|on|lb`
+ [time: 9]:
+ ------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel|on[2020-01-01 00:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 00:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Kessel|on[2020-01-01 01:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 01:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Kessel|on[2020-01-01 02:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 02:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Kessel|on[2020-01-01 03:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 03:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Kessel|on[2020-01-01 04:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 04:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Kessel|on[2020-01-01 05:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 05:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Kessel|on[2020-01-01 06:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 06:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Kessel|on[2020-01-01 07:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 07:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Kessel|on[2020-01-01 08:00:00] - 0.5 Kessel(Q_fu)|on[2020-01-01 08:00:00] - 0.5 Kessel(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Kessel|on_hours_total": |-
+ Constraint `Kessel|on_hours_total`
+ ----------------------------------
+ +1 Kessel|on_hours_total - 1 Kessel|on[2020-01-01 00:00:00] - 1 Kessel|on[2020-01-01 01:00:00]... -1 Kessel|on[2020-01-01 06:00:00] - 1 Kessel|on[2020-01-01 07:00:00] - 1 Kessel|on[2020-01-01 08:00:00] = -0.0
+ "Kessel->costs(temporal)": |-
+ Constraint `Kessel->costs(temporal)`
+ [time: 9]:
+ -----------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ "Kessel->CO2(temporal)": |-
+ Constraint `Kessel->CO2(temporal)`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 00:00:00] - 1000 Kessel|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 01:00:00] - 1000 Kessel|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 02:00:00] - 1000 Kessel|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 03:00:00] - 1000 Kessel|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 04:00:00] - 1000 Kessel|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 05:00:00] - 1000 Kessel|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 06:00:00] - 1000 Kessel|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 07:00:00] - 1000 Kessel|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel->CO2(temporal)[2020-01-01 08:00:00] - 1000 Kessel|on[2020-01-01 08:00:00] = -0.0
+ "Kessel|conversion_0": |-
+ Constraint `Kessel|conversion_0`
+ [time: 9]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Constraint `Speicher(Q_th_load)|on_hours_total`
+ -----------------------------------------------
+ +1 Speicher(Q_th_load)|on_hours_total - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|ub`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_load)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|lb`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_load)|total_flow_hours`
+ -------------------------------------------------
+ +1 Speicher(Q_th_load)|total_flow_hours - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Constraint `Speicher(Q_th_unload)|on_hours_total`
+ -------------------------------------------------
+ +1 Speicher(Q_th_unload)|on_hours_total - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_unload)|total_flow_hours`
+ ---------------------------------------------------
+ +1 Speicher(Q_th_unload)|total_flow_hours - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|prevent_simultaneous_use": |-
+ Constraint `Speicher|prevent_simultaneous_use`
+ [time: 9]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ 1.0
+ "Speicher|netto_discharge": |-
+ Constraint `Speicher|netto_discharge`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|netto_discharge[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|netto_discharge[2020-01-01 01:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|netto_discharge[2020-01-01 02:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|netto_discharge[2020-01-01 03:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|netto_discharge[2020-01-01 04:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|netto_discharge[2020-01-01 05:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|netto_discharge[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|netto_discharge[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|netto_discharge[2020-01-01 08:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|charge_state": |-
+ Constraint `Speicher|charge_state`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 0.92 Speicher|charge_state[2020-01-01 00:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 0.92 Speicher|charge_state[2020-01-01 01:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 0.92 Speicher|charge_state[2020-01-01 02:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 0.92 Speicher|charge_state[2020-01-01 03:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 0.92 Speicher|charge_state[2020-01-01 04:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 0.92 Speicher|charge_state[2020-01-01 05:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 0.92 Speicher|charge_state[2020-01-01 06:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 0.92 Speicher|charge_state[2020-01-01 07:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 0.92 Speicher|charge_state[2020-01-01 08:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher->costs(periodic)": |-
+ Constraint `Speicher->costs(periodic)`
+ --------------------------------------
+ +1 Speicher->costs(periodic) - 0.01 Speicher|size - 1 Speicher|PiecewiseEffects|costs = -0.0
+ "Speicher->CO2(periodic)": |-
+ Constraint `Speicher->CO2(periodic)`
+ ------------------------------------
+ +1 Speicher->CO2(periodic) - 0.01 Speicher|size = -0.0
+ "Speicher|Piece_0|inside_piece": |-
+ Constraint `Speicher|Piece_0|inside_piece`
+ ------------------------------------------
+ +1 Speicher|Piece_0|inside_piece - 1 Speicher|Piece_0|lambda0 - 1 Speicher|Piece_0|lambda1 = -0.0
+ "Speicher|Piece_1|inside_piece": |-
+ Constraint `Speicher|Piece_1|inside_piece`
+ ------------------------------------------
+ +1 Speicher|Piece_1|inside_piece - 1 Speicher|Piece_1|lambda0 - 1 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|size|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|size|lambda`
+ -----------------------------------------------------------
+ +1 Speicher|size - 5 Speicher|Piece_0|lambda0 - 25 Speicher|Piece_0|lambda1 - 25 Speicher|Piece_1|lambda0 - 100 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|size|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|size|single_segment`
+ -------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|lambda`
+ -----------------------------------------------------------------------------
+ +1 Speicher|PiecewiseEffects|costs - 50 Speicher|Piece_0|lambda0 - 250 Speicher|Piece_0|lambda1 - 250 Speicher|Piece_1|lambda0 - 800 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|costs|single_segment`
+ -------------------------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|lambda": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|lambda`
+ --------------------------------------------------------------------------
+ +1 Speicher|PiecewiseEffects|PE - 5 Speicher|Piece_0|lambda0 - 25 Speicher|Piece_0|lambda1 - 25 Speicher|Piece_1|lambda0 - 100 Speicher|Piece_1|lambda1 = -0.0
+ "Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|single_segment": |-
+ Constraint `Speicher|PiecewiseEffects|Speicher|PiecewiseEffects|PE|single_segment`
+ ----------------------------------------------------------------------------------
+ +1 Speicher|Piece_0|inside_piece + 1 Speicher|Piece_1|inside_piece ≤ 1.0
+ "Speicher->PE(periodic)": |-
+ Constraint `Speicher->PE(periodic)`
+ -----------------------------------
+ +1 Speicher->PE(periodic) - 1 Speicher|PiecewiseEffects|PE = -0.0
+ "Speicher|charge_state|ub": |-
+ Constraint `Speicher|charge_state|ub`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 1 Speicher|size ≤ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 1 Speicher|size ≤ -0.0
+ "Speicher|charge_state|lb": |-
+ Constraint `Speicher|charge_state|lb`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] ≥ -0.0
+ "Speicher|initial_charge_state": |-
+ Constraint `Speicher|initial_charge_state`
+ ------------------------------------------
+ +1 Speicher|charge_state[2020-01-01 00:00:00] = -0.0
+ "Speicher|final_charge_max": |-
+ Constraint `Speicher|final_charge_max`
+ --------------------------------------
+ +1 Speicher|charge_state[2020-01-01 09:00:00] ≤ 10.0
+ "KWK(Q_fu)|on_hours_total": |-
+ Constraint `KWK(Q_fu)|on_hours_total`
+ -------------------------------------
+ +1 KWK(Q_fu)|on_hours_total - 1 KWK(Q_fu)|on[2020-01-01 00:00:00] - 1 KWK(Q_fu)|on[2020-01-01 01:00:00]... -1 KWK(Q_fu)|on[2020-01-01 06:00:00] - 1 KWK(Q_fu)|on[2020-01-01 07:00:00] - 1 KWK(Q_fu)|on[2020-01-01 08:00:00] = -0.0
+ "KWK(Q_fu)|flow_rate|ub": |-
+ Constraint `KWK(Q_fu)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e+07 KWK(Q_fu)|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK(Q_fu)|flow_rate|lb": |-
+ Constraint `KWK(Q_fu)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1e-05 KWK(Q_fu)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK(Q_fu)|total_flow_hours": |-
+ Constraint `KWK(Q_fu)|total_flow_hours`
+ ---------------------------------------
+ +1 KWK(Q_fu)|total_flow_hours - 1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK(P_el)|on_hours_total": |-
+ Constraint `KWK(P_el)|on_hours_total`
+ -------------------------------------
+ +1 KWK(P_el)|on_hours_total - 1 KWK(P_el)|on[2020-01-01 00:00:00] - 1 KWK(P_el)|on[2020-01-01 01:00:00]... -1 KWK(P_el)|on[2020-01-01 06:00:00] - 1 KWK(P_el)|on[2020-01-01 07:00:00] - 1 KWK(P_el)|on[2020-01-01 08:00:00] = -0.0
+ "KWK(P_el)|flow_rate|ub": |-
+ Constraint `KWK(P_el)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 3300 KWK(P_el)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 3300 KWK(P_el)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 3300 KWK(P_el)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 3300 KWK(P_el)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 3300 KWK(P_el)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 3300 KWK(P_el)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 3300 KWK(P_el)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 3300 KWK(P_el)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 3300 KWK(P_el)|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK(P_el)|flow_rate|lb": |-
+ Constraint `KWK(P_el)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 1e-05 KWK(P_el)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK(P_el)|total_flow_hours": |-
+ Constraint `KWK(P_el)|total_flow_hours`
+ ---------------------------------------
+ +1 KWK(P_el)|total_flow_hours - 1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 01:00:00]... -1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK(Q_th)|on_hours_total": |-
+ Constraint `KWK(Q_th)|on_hours_total`
+ -------------------------------------
+ +1 KWK(Q_th)|on_hours_total - 1 KWK(Q_th)|on[2020-01-01 00:00:00] - 1 KWK(Q_th)|on[2020-01-01 01:00:00]... -1 KWK(Q_th)|on[2020-01-01 06:00:00] - 1 KWK(Q_th)|on[2020-01-01 07:00:00] - 1 KWK(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "KWK(Q_th)|flow_rate|ub": |-
+ Constraint `KWK(Q_th)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] - 1e+07 KWK(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK(Q_th)|flow_rate|lb": |-
+ Constraint `KWK(Q_th)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] - 1e-05 KWK(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK(Q_th)|total_flow_hours": |-
+ Constraint `KWK(Q_th)|total_flow_hours`
+ ---------------------------------------
+ +1 KWK(Q_th)|total_flow_hours - 1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "KWK|on|ub": |-
+ Constraint `KWK|on|ub`
+ [time: 9]:
+ ---------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|on[2020-01-01 00:00:00] - 1 KWK(Q_fu)|on[2020-01-01 00:00:00] - 1 KWK(P_el)|on[2020-01-01 00:00:00] - 1 KWK(Q_th)|on[2020-01-01 00:00:00] ≤ 1e-05
+ [2020-01-01 01:00:00]: +1 KWK|on[2020-01-01 01:00:00] - 1 KWK(Q_fu)|on[2020-01-01 01:00:00] - 1 KWK(P_el)|on[2020-01-01 01:00:00] - 1 KWK(Q_th)|on[2020-01-01 01:00:00] ≤ 1e-05
+ [2020-01-01 02:00:00]: +1 KWK|on[2020-01-01 02:00:00] - 1 KWK(Q_fu)|on[2020-01-01 02:00:00] - 1 KWK(P_el)|on[2020-01-01 02:00:00] - 1 KWK(Q_th)|on[2020-01-01 02:00:00] ≤ 1e-05
+ [2020-01-01 03:00:00]: +1 KWK|on[2020-01-01 03:00:00] - 1 KWK(Q_fu)|on[2020-01-01 03:00:00] - 1 KWK(P_el)|on[2020-01-01 03:00:00] - 1 KWK(Q_th)|on[2020-01-01 03:00:00] ≤ 1e-05
+ [2020-01-01 04:00:00]: +1 KWK|on[2020-01-01 04:00:00] - 1 KWK(Q_fu)|on[2020-01-01 04:00:00] - 1 KWK(P_el)|on[2020-01-01 04:00:00] - 1 KWK(Q_th)|on[2020-01-01 04:00:00] ≤ 1e-05
+ [2020-01-01 05:00:00]: +1 KWK|on[2020-01-01 05:00:00] - 1 KWK(Q_fu)|on[2020-01-01 05:00:00] - 1 KWK(P_el)|on[2020-01-01 05:00:00] - 1 KWK(Q_th)|on[2020-01-01 05:00:00] ≤ 1e-05
+ [2020-01-01 06:00:00]: +1 KWK|on[2020-01-01 06:00:00] - 1 KWK(Q_fu)|on[2020-01-01 06:00:00] - 1 KWK(P_el)|on[2020-01-01 06:00:00] - 1 KWK(Q_th)|on[2020-01-01 06:00:00] ≤ 1e-05
+ [2020-01-01 07:00:00]: +1 KWK|on[2020-01-01 07:00:00] - 1 KWK(Q_fu)|on[2020-01-01 07:00:00] - 1 KWK(P_el)|on[2020-01-01 07:00:00] - 1 KWK(Q_th)|on[2020-01-01 07:00:00] ≤ 1e-05
+ [2020-01-01 08:00:00]: +1 KWK|on[2020-01-01 08:00:00] - 1 KWK(Q_fu)|on[2020-01-01 08:00:00] - 1 KWK(P_el)|on[2020-01-01 08:00:00] - 1 KWK(Q_th)|on[2020-01-01 08:00:00] ≤ 1e-05
+ "KWK|on|lb": |-
+ Constraint `KWK|on|lb`
+ [time: 9]:
+ ---------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|on[2020-01-01 00:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 00:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 00:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 KWK|on[2020-01-01 01:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 01:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 01:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 KWK|on[2020-01-01 02:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 02:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 02:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 KWK|on[2020-01-01 03:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 03:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 03:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 KWK|on[2020-01-01 04:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 04:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 04:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 KWK|on[2020-01-01 05:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 05:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 05:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 KWK|on[2020-01-01 06:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 06:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 06:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 KWK|on[2020-01-01 07:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 07:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 07:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 KWK|on[2020-01-01 08:00:00] - 0.3333 KWK(Q_fu)|on[2020-01-01 08:00:00] - 0.3333 KWK(P_el)|on[2020-01-01 08:00:00] - 0.3333 KWK(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "KWK|on_hours_total": |-
+ Constraint `KWK|on_hours_total`
+ -------------------------------
+ +1 KWK|on_hours_total - 1 KWK|on[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 01:00:00]... -1 KWK|on[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 08:00:00] = -0.0
+ "KWK|switch|transition": |-
+ Constraint `KWK|switch|transition`
+ [time: 8]:
+ ---------------------------------------------
+ [2020-01-01 01:00:00]: +1 KWK|switch|on[2020-01-01 01:00:00] - 1 KWK|switch|off[2020-01-01 01:00:00] - 1 KWK|on[2020-01-01 01:00:00] + 1 KWK|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK|switch|on[2020-01-01 02:00:00] - 1 KWK|switch|off[2020-01-01 02:00:00] - 1 KWK|on[2020-01-01 02:00:00] + 1 KWK|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK|switch|on[2020-01-01 03:00:00] - 1 KWK|switch|off[2020-01-01 03:00:00] - 1 KWK|on[2020-01-01 03:00:00] + 1 KWK|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK|switch|on[2020-01-01 04:00:00] - 1 KWK|switch|off[2020-01-01 04:00:00] - 1 KWK|on[2020-01-01 04:00:00] + 1 KWK|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK|switch|on[2020-01-01 05:00:00] - 1 KWK|switch|off[2020-01-01 05:00:00] - 1 KWK|on[2020-01-01 05:00:00] + 1 KWK|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK|switch|on[2020-01-01 06:00:00] - 1 KWK|switch|off[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 06:00:00] + 1 KWK|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK|switch|on[2020-01-01 07:00:00] - 1 KWK|switch|off[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 07:00:00] + 1 KWK|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK|switch|on[2020-01-01 08:00:00] - 1 KWK|switch|off[2020-01-01 08:00:00] - 1 KWK|on[2020-01-01 08:00:00] + 1 KWK|on[2020-01-01 07:00:00] = -0.0
+ "KWK|switch|initial": |-
+ Constraint `KWK|switch|initial`
+ -------------------------------
+ +1 KWK|switch|on[2020-01-01 00:00:00] - 1 KWK|switch|off[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 00:00:00] = -1.0
+ "KWK|switch|mutex": |-
+ Constraint `KWK|switch|mutex`
+ [time: 9]:
+ ----------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|switch|on[2020-01-01 00:00:00] + 1 KWK|switch|off[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 KWK|switch|on[2020-01-01 01:00:00] + 1 KWK|switch|off[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 KWK|switch|on[2020-01-01 02:00:00] + 1 KWK|switch|off[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 KWK|switch|on[2020-01-01 03:00:00] + 1 KWK|switch|off[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 KWK|switch|on[2020-01-01 04:00:00] + 1 KWK|switch|off[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 KWK|switch|on[2020-01-01 05:00:00] + 1 KWK|switch|off[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 KWK|switch|on[2020-01-01 06:00:00] + 1 KWK|switch|off[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 KWK|switch|on[2020-01-01 07:00:00] + 1 KWK|switch|off[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 KWK|switch|on[2020-01-01 08:00:00] + 1 KWK|switch|off[2020-01-01 08:00:00] ≤ 1.0
+ "KWK->costs(temporal)": |-
+ Constraint `KWK->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK->costs(temporal)[2020-01-01 00:00:00] - 0.01 KWK|switch|on[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK->costs(temporal)[2020-01-01 01:00:00] - 0.01 KWK|switch|on[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK->costs(temporal)[2020-01-01 02:00:00] - 0.01 KWK|switch|on[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK->costs(temporal)[2020-01-01 03:00:00] - 0.01 KWK|switch|on[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK->costs(temporal)[2020-01-01 04:00:00] - 0.01 KWK|switch|on[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK->costs(temporal)[2020-01-01 05:00:00] - 0.01 KWK|switch|on[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK->costs(temporal)[2020-01-01 06:00:00] - 0.01 KWK|switch|on[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK->costs(temporal)[2020-01-01 07:00:00] - 0.01 KWK|switch|on[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK->costs(temporal)[2020-01-01 08:00:00] - 0.01 KWK|switch|on[2020-01-01 08:00:00] = -0.0
+ "KWK|Piece_0|inside_piece": |-
+ Constraint `KWK|Piece_0|inside_piece`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 00:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 00:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 01:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 01:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 02:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 02:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 03:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 03:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 04:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 04:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 05:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 05:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 06:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 06:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 07:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 07:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 08:00:00] - 1 KWK|Piece_0|lambda0[2020-01-01 08:00:00] - 1 KWK|Piece_0|lambda1[2020-01-01 08:00:00] = -0.0
+ "KWK|Piece_1|inside_piece": |-
+ Constraint `KWK|Piece_1|inside_piece`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 00:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 01:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 02:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 03:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 04:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 05:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 06:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 07:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 KWK|Piece_1|lambda0[2020-01-01 08:00:00] - 1 KWK|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "KWK|KWK(P_el)|flow_rate|lambda": |-
+ Constraint `KWK|KWK(P_el)|flow_rate|lambda`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 5 KWK|Piece_0|lambda0[2020-01-01 00:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 00:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 00:00:00] - 60 KWK|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 5.125 KWK|Piece_0|lambda0[2020-01-01 01:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 01:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 01:00:00] - 61.25 KWK|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 5.25 KWK|Piece_0|lambda0[2020-01-01 02:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 02:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 02:00:00] - 62.5 KWK|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 5.375 KWK|Piece_0|lambda0[2020-01-01 03:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 03:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 03:00:00] - 63.75 KWK|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 5.5 KWK|Piece_0|lambda0[2020-01-01 04:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 04:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 04:00:00] - 65 KWK|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 5.625 KWK|Piece_0|lambda0[2020-01-01 05:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 05:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 05:00:00] - 66.25 KWK|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 5.75 KWK|Piece_0|lambda0[2020-01-01 06:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 06:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 06:00:00] - 67.5 KWK|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 5.875 KWK|Piece_0|lambda0[2020-01-01 07:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 07:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 07:00:00] - 68.75 KWK|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 08:00:00] - 30 KWK|Piece_0|lambda1[2020-01-01 08:00:00] - 40 KWK|Piece_1|lambda0[2020-01-01 08:00:00] - 70 KWK|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "KWK|KWK(P_el)|flow_rate|single_segment": |-
+ Constraint `KWK|KWK(P_el)|flow_rate|single_segment`
+ [time: 9]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 00:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 01:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 KWK|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 02:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 KWK|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 03:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 KWK|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 04:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 KWK|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 05:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 KWK|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 06:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 07:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 08:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 KWK|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK|KWK(Q_th)|flow_rate|lambda": |-
+ Constraint `KWK|KWK(Q_th)|flow_rate|lambda`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 00:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 00:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 00:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 01:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 01:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 01:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 02:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 02:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 02:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 03:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 03:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 03:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 04:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 04:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 04:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 05:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 05:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 05:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 06:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 06:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 06:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 07:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 07:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 07:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00] - 6 KWK|Piece_0|lambda0[2020-01-01 08:00:00] - 35 KWK|Piece_0|lambda1[2020-01-01 08:00:00] - 45 KWK|Piece_1|lambda0[2020-01-01 08:00:00] - 100 KWK|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "KWK|KWK(Q_th)|flow_rate|single_segment": |-
+ Constraint `KWK|KWK(Q_th)|flow_rate|single_segment`
+ [time: 9]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 00:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 01:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 KWK|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 02:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 KWK|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 03:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 KWK|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 04:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 KWK|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 05:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 KWK|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 06:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 07:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 08:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 KWK|on[2020-01-01 08:00:00] ≤ -0.0
+ "KWK|KWK(Q_fu)|flow_rate|lambda": |-
+ Constraint `KWK|KWK(Q_fu)|flow_rate|lambda`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 00:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 00:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 00:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 01:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 01:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 01:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 02:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 02:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 02:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 03:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 03:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 03:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 04:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 04:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 04:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 05:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 05:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 05:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 06:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 06:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 06:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 07:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 07:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 07:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] - 12 KWK|Piece_0|lambda0[2020-01-01 08:00:00] - 70 KWK|Piece_0|lambda1[2020-01-01 08:00:00] - 90 KWK|Piece_1|lambda0[2020-01-01 08:00:00] - 200 KWK|Piece_1|lambda1[2020-01-01 08:00:00] = -0.0
+ "KWK|KWK(Q_fu)|flow_rate|single_segment": |-
+ Constraint `KWK|KWK(Q_fu)|flow_rate|single_segment`
+ [time: 9]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 00:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 00:00:00] - 1 KWK|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 01:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 01:00:00] - 1 KWK|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 02:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 02:00:00] - 1 KWK|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 03:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 03:00:00] - 1 KWK|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 04:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 04:00:00] - 1 KWK|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 05:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 05:00:00] - 1 KWK|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 06:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 06:00:00] - 1 KWK|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 07:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 07:00:00] - 1 KWK|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 KWK|Piece_0|inside_piece[2020-01-01 08:00:00] + 1 KWK|Piece_1|inside_piece[2020-01-01 08:00:00] - 1 KWK|on[2020-01-01 08:00:00] ≤ -0.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 9]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] + 1 Strom|excess_input[2020-01-01 00:00:00] - 1 Strom|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 01:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] + 1 Strom|excess_input[2020-01-01 01:00:00] - 1 Strom|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 02:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] + 1 Strom|excess_input[2020-01-01 02:00:00] - 1 Strom|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 03:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] + 1 Strom|excess_input[2020-01-01 03:00:00] - 1 Strom|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 04:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] + 1 Strom|excess_input[2020-01-01 04:00:00] - 1 Strom|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 05:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] + 1 Strom|excess_input[2020-01-01 05:00:00] - 1 Strom|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] + 1 Strom|excess_input[2020-01-01 06:00:00] - 1 Strom|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] + 1 Strom|excess_input[2020-01-01 07:00:00] - 1 Strom|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 KWK(P_el)|flow_rate[2020-01-01 08:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] + 1 Strom|excess_input[2020-01-01 08:00:00] - 1 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 1e+05 Strom|excess_input[2020-01-01 00:00:00] - 1e+05 Strom|excess_input[2020-01-01 01:00:00]... -1e+05 Strom|excess_output[2020-01-01 06:00:00] - 1e+05 Strom|excess_output[2020-01-01 07:00:00] - 1e+05 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 9]:
+ -----------------------------------------
+ [2020-01-01 00:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 00:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Fernwärme|excess_input[2020-01-01 00:00:00] - 1 Fernwärme|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Fernwärme|excess_input[2020-01-01 01:00:00] - 1 Fernwärme|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 02:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Fernwärme|excess_input[2020-01-01 02:00:00] - 1 Fernwärme|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 03:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Fernwärme|excess_input[2020-01-01 03:00:00] - 1 Fernwärme|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 04:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Fernwärme|excess_input[2020-01-01 04:00:00] - 1 Fernwärme|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 05:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Fernwärme|excess_input[2020-01-01 05:00:00] - 1 Fernwärme|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 06:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Fernwärme|excess_input[2020-01-01 06:00:00] - 1 Fernwärme|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 07:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Fernwärme|excess_input[2020-01-01 07:00:00] - 1 Fernwärme|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Kessel(Q_th)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 KWK(Q_th)|flow_rate[2020-01-01 08:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Fernwärme|excess_input[2020-01-01 08:00:00] - 1 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00] - 1e+05 Fernwärme|excess_input[2020-01-01 01:00:00]... -1e+05 Fernwärme|excess_output[2020-01-01 06:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 07:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 02:00:00] + 1 Gas|excess_input[2020-01-01 02:00:00] - 1 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 03:00:00] + 1 Gas|excess_input[2020-01-01 03:00:00] - 1 Gas|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 04:00:00] + 1 Gas|excess_input[2020-01-01 04:00:00] - 1 Gas|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 05:00:00] + 1 Gas|excess_input[2020-01-01 05:00:00] - 1 Gas|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 06:00:00] + 1 Gas|excess_input[2020-01-01 06:00:00] - 1 Gas|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 07:00:00] + 1 Gas|excess_input[2020-01-01 07:00:00] - 1 Gas|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] - 1 Kessel(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 KWK(Q_fu)|flow_rate[2020-01-01 08:00:00] + 1 Gas|excess_input[2020-01-01 08:00:00] - 1 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00] - 1e+05 Gas|excess_input[2020-01-01 01:00:00]... -1e+05 Gas|excess_output[2020-01-01 06:00:00] - 1e+05 Gas|excess_output[2020-01-01 07:00:00] - 1e+05 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+binaries:
+ - "Kessel(Q_fu)|on"
+ - "Kessel(Q_th)|on"
+ - "Kessel(Q_th)|off"
+ - "Kessel(Q_th)|switch|on"
+ - "Kessel(Q_th)|switch|off"
+ - "Kessel|on"
+ - "Speicher(Q_th_load)|on"
+ - "Speicher(Q_th_unload)|on"
+ - "Speicher|Piece_0|inside_piece"
+ - "Speicher|Piece_1|inside_piece"
+ - "KWK(Q_fu)|on"
+ - "KWK(P_el)|on"
+ - "KWK(Q_th)|on"
+ - "KWK|on"
+ - "KWK|switch|on"
+ - "KWK|switch|off"
+ - "KWK|Piece_0|inside_piece"
+ - "KWK|Piece_1|inside_piece"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - PE(periodic)
+ - PE(temporal)
+ - "PE(temporal)|per_timestep"
+ - PE
+ - Penalty
+ - "CO2(temporal)->costs(temporal)"
+ - "Wärmelast(Q_th_Last)|flow_rate"
+ - "Wärmelast(Q_th_Last)|total_flow_hours"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "Kessel(Q_fu)|flow_rate"
+ - "Kessel(Q_fu)|on_hours_total"
+ - "Kessel(Q_fu)|total_flow_hours"
+ - "Kessel(Q_th)|flow_rate"
+ - "Kessel(Q_th)|size"
+ - "Kessel(Q_th)->costs(periodic)"
+ - "Kessel(Q_th)->PE(periodic)"
+ - "Kessel(Q_th)|on_hours_total"
+ - "Kessel(Q_th)|switch|count"
+ - "Kessel(Q_th)|consecutive_on_hours"
+ - "Kessel(Q_th)|consecutive_off_hours"
+ - "Kessel(Q_th)->costs(temporal)"
+ - "Kessel(Q_th)|total_flow_hours"
+ - "Kessel|on_hours_total"
+ - "Kessel->costs(temporal)"
+ - "Kessel->CO2(temporal)"
+ - "Speicher(Q_th_load)|flow_rate"
+ - "Speicher(Q_th_load)|on_hours_total"
+ - "Speicher(Q_th_load)|total_flow_hours"
+ - "Speicher(Q_th_unload)|flow_rate"
+ - "Speicher(Q_th_unload)|on_hours_total"
+ - "Speicher(Q_th_unload)|total_flow_hours"
+ - "Speicher|charge_state"
+ - "Speicher|netto_discharge"
+ - "Speicher|size"
+ - "Speicher->costs(periodic)"
+ - "Speicher->CO2(periodic)"
+ - "Speicher|PiecewiseEffects|costs"
+ - "Speicher|PiecewiseEffects|PE"
+ - "Speicher|Piece_0|lambda0"
+ - "Speicher|Piece_0|lambda1"
+ - "Speicher|Piece_1|lambda0"
+ - "Speicher|Piece_1|lambda1"
+ - "Speicher->PE(periodic)"
+ - "KWK(Q_fu)|flow_rate"
+ - "KWK(Q_fu)|on_hours_total"
+ - "KWK(Q_fu)|total_flow_hours"
+ - "KWK(P_el)|flow_rate"
+ - "KWK(P_el)|on_hours_total"
+ - "KWK(P_el)|total_flow_hours"
+ - "KWK(Q_th)|flow_rate"
+ - "KWK(Q_th)|on_hours_total"
+ - "KWK(Q_th)|total_flow_hours"
+ - "KWK|on_hours_total"
+ - "KWK->costs(temporal)"
+ - "KWK|Piece_0|lambda0"
+ - "KWK|Piece_0|lambda1"
+ - "KWK|Piece_1|lambda0"
+ - "KWK|Piece_1|lambda1"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/io_flow_system_segments--solution.nc4 b/tests/ressources/v4-api/io_flow_system_segments--solution.nc4
new file mode 100644
index 000000000..06a36cb99
Binary files /dev/null and b/tests/ressources/v4-api/io_flow_system_segments--solution.nc4 differ
diff --git a/tests/ressources/v4-api/io_flow_system_segments--summary.yaml b/tests/ressources/v4-api/io_flow_system_segments--summary.yaml
new file mode 100644
index 000000000..9e46ae138
--- /dev/null
+++ b/tests/ressources/v4-api/io_flow_system_segments--summary.yaml
@@ -0,0 +1,56 @@
+Name: io_flow_system_segments
+Number of timesteps: 9
+Calculation Type: FullCalculation
+Constraints: 590
+Variables: 508
+Main Results:
+ Objective: -11005.75
+ Penalty: 0.0
+ Effects:
+ CO2 [kg]:
+ temporal: 1277.95
+ periodic: 0.53
+ total: 1278.48
+ costs [€]:
+ temporal: -12961.03
+ periodic: 1955.28
+ total: -11005.75
+ PE [kWh_PE]:
+ temporal: -0.0
+ periodic: 152.92
+ total: 152.92
+ Invest-Decisions:
+ Invested:
+ Kessel(Q_th): 50.0
+ Speicher: 52.92
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 1.1
+ solving: 0.83
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/io_simple_flow_system--flow_system.nc4 b/tests/ressources/v4-api/io_simple_flow_system--flow_system.nc4
new file mode 100644
index 000000000..0bb604858
Binary files /dev/null and b/tests/ressources/v4-api/io_simple_flow_system--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/io_simple_flow_system--model_documentation.yaml b/tests/ressources/v4-api/io_simple_flow_system--model_documentation.yaml
new file mode 100644
index 000000000..af47d3d6c
--- /dev/null
+++ b/tests/ressources/v4-api/io_simple_flow_system--model_documentation.yaml
@@ -0,0 +1,944 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +1 costs + 1 Penalty
+ Sense: min
+ Value: 81.88394666666667
+termination_condition: optimal
+status: ok
+nvars: 279
+nvarsbin: 36
+nvarscont: 243
+ncons: 253
+variables:
+ costs(periodic): |-
+ Variable
+ --------
+ costs(periodic) ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable
+ --------
+ costs(temporal) ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: costs(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: costs(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: costs(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: costs(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: costs(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: costs(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: costs(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: costs(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: costs(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, inf]
+ costs: |-
+ Variable
+ --------
+ costs ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable
+ --------
+ CO2(periodic) ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable
+ --------
+ CO2(temporal) ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)|per_timestep[2020-01-01 00:00:00] ∈ [-inf, 1000]
+ [2020-01-01 01:00:00]: CO2(temporal)|per_timestep[2020-01-01 01:00:00] ∈ [-inf, 1000]
+ [2020-01-01 02:00:00]: CO2(temporal)|per_timestep[2020-01-01 02:00:00] ∈ [-inf, 1000]
+ [2020-01-01 03:00:00]: CO2(temporal)|per_timestep[2020-01-01 03:00:00] ∈ [-inf, 1000]
+ [2020-01-01 04:00:00]: CO2(temporal)|per_timestep[2020-01-01 04:00:00] ∈ [-inf, 1000]
+ [2020-01-01 05:00:00]: CO2(temporal)|per_timestep[2020-01-01 05:00:00] ∈ [-inf, 1000]
+ [2020-01-01 06:00:00]: CO2(temporal)|per_timestep[2020-01-01 06:00:00] ∈ [-inf, 1000]
+ [2020-01-01 07:00:00]: CO2(temporal)|per_timestep[2020-01-01 07:00:00] ∈ [-inf, 1000]
+ [2020-01-01 08:00:00]: CO2(temporal)|per_timestep[2020-01-01 08:00:00] ∈ [-inf, 1000]
+ CO2: |-
+ Variable
+ --------
+ CO2 ∈ [-inf, inf]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "CO2(temporal)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher(Q_th_load)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_load)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_load)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_load)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_load)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_load)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_load)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_load)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_load)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_load)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_load)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_load)|total_flow_hours ∈ [0, inf]
+ "Speicher(Q_th_unload)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+04]
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+04]
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+04]
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+04]
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+04]
+ "Speicher(Q_th_unload)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|on_hours_total ∈ [0, inf]
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Variable
+ --------
+ Speicher(Q_th_unload)|total_flow_hours ∈ [0, inf]
+ "Speicher|charge_state": |-
+ Variable (time: 10)
+ -------------------
+ [2020-01-01 00:00:00]: Speicher|charge_state[2020-01-01 00:00:00] ∈ [0, 8e+06]
+ [2020-01-01 01:00:00]: Speicher|charge_state[2020-01-01 01:00:00] ∈ [0, 7e+06]
+ [2020-01-01 02:00:00]: Speicher|charge_state[2020-01-01 02:00:00] ∈ [0, 8e+06]
+ [2020-01-01 03:00:00]: Speicher|charge_state[2020-01-01 03:00:00] ∈ [0, 8e+06]
+ [2020-01-01 04:00:00]: Speicher|charge_state[2020-01-01 04:00:00] ∈ [0, 8e+06]
+ [2020-01-01 05:00:00]: Speicher|charge_state[2020-01-01 05:00:00] ∈ [0, 8e+06]
+ [2020-01-01 06:00:00]: Speicher|charge_state[2020-01-01 06:00:00] ∈ [0, 8e+06]
+ [2020-01-01 07:00:00]: Speicher|charge_state[2020-01-01 07:00:00] ∈ [0, 8e+06]
+ [2020-01-01 08:00:00]: Speicher|charge_state[2020-01-01 08:00:00] ∈ [0, 8e+06]
+ [2020-01-01 09:00:00]: Speicher|charge_state[2020-01-01 09:00:00] ∈ [0, 8e+06]
+ "Speicher|netto_discharge": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Speicher|netto_discharge[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Speicher|netto_discharge[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Speicher|netto_discharge[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Speicher|netto_discharge[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Speicher|netto_discharge[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Speicher|netto_discharge[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Speicher|netto_discharge[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Speicher|netto_discharge[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Speicher|netto_discharge[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Speicher|size": |-
+ Variable
+ --------
+ Speicher|size ∈ [30, 30]
+ "Speicher->costs(periodic)": |-
+ Variable
+ --------
+ Speicher->costs(periodic) ∈ [-inf, inf]
+ "Boiler(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ Boiler(Q_fu)|total_flow_hours ∈ [0, inf]
+ "Boiler(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 50]
+ [2020-01-01 01:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 50]
+ [2020-01-01 02:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 50]
+ [2020-01-01 03:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 50]
+ [2020-01-01 04:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 50]
+ [2020-01-01 05:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 50]
+ [2020-01-01 06:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 50]
+ [2020-01-01 07:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 50]
+ [2020-01-01 08:00:00]: Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 50]
+ "Boiler(Q_th)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Boiler(Q_th)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: Boiler(Q_th)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: Boiler(Q_th)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: Boiler(Q_th)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: Boiler(Q_th)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: Boiler(Q_th)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: Boiler(Q_th)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: Boiler(Q_th)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: Boiler(Q_th)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "Boiler(Q_th)|on_hours_total": |-
+ Variable
+ --------
+ Boiler(Q_th)|on_hours_total ∈ [0, inf]
+ "Boiler(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ Boiler(Q_th)|total_flow_hours ∈ [0, inf]
+ "Wärmelast(Q_th_Last)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] ∈ [30, 30]
+ [2020-01-01 01:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00] ∈ [0, 0]
+ [2020-01-01 02:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00] ∈ [90, 90]
+ [2020-01-01 03:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 03:00:00] ∈ [110, 110]
+ [2020-01-01 04:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 04:00:00] ∈ [110, 110]
+ [2020-01-01 05:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 05:00:00] ∈ [20, 20]
+ [2020-01-01 06:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] ∈ [20, 20]
+ [2020-01-01 07:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] ∈ [20, 20]
+ [2020-01-01 08:00:00]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] ∈ [20, 20]
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Variable
+ --------
+ Wärmelast(Q_th_Last)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1000]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1000]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1000]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1000]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1000]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1000]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1000]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1000]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable
+ --------
+ Gastarif(Q_Gas)|total_flow_hours ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ Einspeisung(P_el)|total_flow_hours ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] ∈ [-inf, inf]
+ [2020-01-01 01:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] ∈ [-inf, inf]
+ [2020-01-01 02:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] ∈ [-inf, inf]
+ [2020-01-01 03:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] ∈ [-inf, inf]
+ [2020-01-01 04:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] ∈ [-inf, inf]
+ [2020-01-01 05:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] ∈ [-inf, inf]
+ [2020-01-01 06:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] ∈ [-inf, inf]
+ [2020-01-01 07:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] ∈ [-inf, inf]
+ [2020-01-01 08:00:00]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] ∈ [-inf, inf]
+ "CHP_unit(Q_fu)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "CHP_unit(Q_fu)|total_flow_hours": |-
+ Variable
+ --------
+ CHP_unit(Q_fu)|total_flow_hours ∈ [0, inf]
+ "CHP_unit(Q_th)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 02:00:00] ∈ [0, 1e+07]
+ [2020-01-01 03:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 03:00:00] ∈ [0, 1e+07]
+ [2020-01-01 04:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 04:00:00] ∈ [0, 1e+07]
+ [2020-01-01 05:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 05:00:00] ∈ [0, 1e+07]
+ [2020-01-01 06:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00]: CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00] ∈ [0, 1e+07]
+ "CHP_unit(Q_th)|total_flow_hours": |-
+ Variable
+ --------
+ CHP_unit(Q_th)|total_flow_hours ∈ [0, inf]
+ "CHP_unit(P_el)|flow_rate": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00] ∈ [0, 60]
+ [2020-01-01 01:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00] ∈ [0, 60]
+ [2020-01-01 02:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00] ∈ [0, 60]
+ [2020-01-01 03:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 03:00:00] ∈ [0, 60]
+ [2020-01-01 04:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 04:00:00] ∈ [0, 60]
+ [2020-01-01 05:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 05:00:00] ∈ [0, 60]
+ [2020-01-01 06:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00] ∈ [0, 60]
+ [2020-01-01 07:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00] ∈ [0, 60]
+ [2020-01-01 08:00:00]: CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00] ∈ [0, 60]
+ "CHP_unit(P_el)|on": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: CHP_unit(P_el)|on[2020-01-01 00:00:00] ∈ {0, 1}
+ [2020-01-01 01:00:00]: CHP_unit(P_el)|on[2020-01-01 01:00:00] ∈ {0, 1}
+ [2020-01-01 02:00:00]: CHP_unit(P_el)|on[2020-01-01 02:00:00] ∈ {0, 1}
+ [2020-01-01 03:00:00]: CHP_unit(P_el)|on[2020-01-01 03:00:00] ∈ {0, 1}
+ [2020-01-01 04:00:00]: CHP_unit(P_el)|on[2020-01-01 04:00:00] ∈ {0, 1}
+ [2020-01-01 05:00:00]: CHP_unit(P_el)|on[2020-01-01 05:00:00] ∈ {0, 1}
+ [2020-01-01 06:00:00]: CHP_unit(P_el)|on[2020-01-01 06:00:00] ∈ {0, 1}
+ [2020-01-01 07:00:00]: CHP_unit(P_el)|on[2020-01-01 07:00:00] ∈ {0, 1}
+ [2020-01-01 08:00:00]: CHP_unit(P_el)|on[2020-01-01 08:00:00] ∈ {0, 1}
+ "CHP_unit(P_el)|on_hours_total": |-
+ Variable
+ --------
+ CHP_unit(P_el)|on_hours_total ∈ [0, inf]
+ "CHP_unit(P_el)|total_flow_hours": |-
+ Variable
+ --------
+ CHP_unit(P_el)|total_flow_hours ∈ [0, inf]
+ "Strom|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Strom|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Strom|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Strom|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Strom|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Strom|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Strom|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Strom|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Strom|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Strom|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Fernwärme|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Fernwärme|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Fernwärme|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Fernwärme|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Fernwärme|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Fernwärme|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Fernwärme|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Fernwärme|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Fernwärme|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_input[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_input[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_input[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_input[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_input[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_input[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_input[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_input[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_input[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 9)
+ ------------------
+ [2020-01-01 00:00:00]: Gas|excess_output[2020-01-01 00:00:00] ∈ [0, inf]
+ [2020-01-01 01:00:00]: Gas|excess_output[2020-01-01 01:00:00] ∈ [0, inf]
+ [2020-01-01 02:00:00]: Gas|excess_output[2020-01-01 02:00:00] ∈ [0, inf]
+ [2020-01-01 03:00:00]: Gas|excess_output[2020-01-01 03:00:00] ∈ [0, inf]
+ [2020-01-01 04:00:00]: Gas|excess_output[2020-01-01 04:00:00] ∈ [0, inf]
+ [2020-01-01 05:00:00]: Gas|excess_output[2020-01-01 05:00:00] ∈ [0, inf]
+ [2020-01-01 06:00:00]: Gas|excess_output[2020-01-01 06:00:00] ∈ [0, inf]
+ [2020-01-01 07:00:00]: Gas|excess_output[2020-01-01 07:00:00] ∈ [0, inf]
+ [2020-01-01 08:00:00]: Gas|excess_output[2020-01-01 08:00:00] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ ----------------------------
+ +1 costs(periodic) - 1 Speicher->costs(periodic) = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ ----------------------------
+ +1 costs(temporal) - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 9]:
+ ----------------------------------------------------
+ [2020-01-01 00:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 03:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 04:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 05:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] = -0.0
+ costs: |-
+ Constraint `costs`
+ ------------------
+ +1 costs - 1 costs(temporal) - 1 costs(periodic) = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ --------------------------
+ +1 CO2(periodic) = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ --------------------------
+ +1 CO2(temporal) - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 9]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 02:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 03:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 04:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 05:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ ----------------
+ +1 CO2 - 1 CO2(temporal) - 1 CO2(periodic) = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty = -0.0
+ "CO2(temporal)->costs(temporal)": |-
+ Constraint `CO2(temporal)->costs(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 03:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 04:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 05:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Constraint `Speicher(Q_th_load)|on_hours_total`
+ -----------------------------------------------
+ +1 Speicher(Q_th_load)|on_hours_total - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_load)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|ub`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_load)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|lb`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_load)|total_flow_hours`
+ -------------------------------------------------
+ +1 Speicher(Q_th_load)|total_flow_hours - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Constraint `Speicher(Q_th_unload)|on_hours_total`
+ -------------------------------------------------
+ +1 Speicher(Q_th_unload)|on_hours_total - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] = -0.0
+ "Speicher(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|ub`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Speicher(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|lb`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_unload)|total_flow_hours`
+ ---------------------------------------------------
+ +1 Speicher(Q_th_unload)|total_flow_hours - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|prevent_simultaneous_use": |-
+ Constraint `Speicher|prevent_simultaneous_use`
+ [time: 9]:
+ ---------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00] ≤ 1.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00] ≤ 1.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 02:00:00] ≤ 1.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 03:00:00] ≤ 1.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 04:00:00] ≤ 1.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 05:00:00] ≤ 1.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00] ≤ 1.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00] ≤ 1.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00] ≤ 1.0
+ "Speicher|netto_discharge": |-
+ Constraint `Speicher|netto_discharge`
+ [time: 9]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|netto_discharge[2020-01-01 00:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|netto_discharge[2020-01-01 01:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|netto_discharge[2020-01-01 02:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|netto_discharge[2020-01-01 03:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|netto_discharge[2020-01-01 04:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|netto_discharge[2020-01-01 05:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|netto_discharge[2020-01-01 06:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|netto_discharge[2020-01-01 07:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|netto_discharge[2020-01-01 08:00:00] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher|charge_state": |-
+ Constraint `Speicher|charge_state`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 0.92 Speicher|charge_state[2020-01-01 00:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 0.92 Speicher|charge_state[2020-01-01 01:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 0.92 Speicher|charge_state[2020-01-01 02:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 0.92 Speicher|charge_state[2020-01-01 03:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 03:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 0.92 Speicher|charge_state[2020-01-01 04:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 04:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 0.92 Speicher|charge_state[2020-01-01 05:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 05:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 0.92 Speicher|charge_state[2020-01-01 06:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 0.92 Speicher|charge_state[2020-01-01 07:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 0.92 Speicher|charge_state[2020-01-01 08:00:00] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Speicher->costs(periodic)": |-
+ Constraint `Speicher->costs(periodic)`
+ --------------------------------------
+ +1 Speicher->costs(periodic) = 20.0
+ "Speicher|charge_state|ub": |-
+ Constraint `Speicher|charge_state|ub`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] - 0.7 Speicher|size ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] - 0.8 Speicher|size ≤ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] - 0.8 Speicher|size ≤ -0.0
+ "Speicher|charge_state|lb": |-
+ Constraint `Speicher|charge_state|lb`
+ [time: 10]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher|charge_state[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Speicher|charge_state[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Speicher|charge_state[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Speicher|charge_state[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Speicher|charge_state[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Speicher|charge_state[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Speicher|charge_state[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Speicher|charge_state[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Speicher|charge_state[2020-01-01 08:00:00] ≥ -0.0
+ [2020-01-01 09:00:00]: +1 Speicher|charge_state[2020-01-01 09:00:00] ≥ -0.0
+ "Speicher|initial_charge_state": |-
+ Constraint `Speicher|initial_charge_state`
+ ------------------------------------------
+ +1 Speicher|charge_state[2020-01-01 00:00:00] = -0.0
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Constraint `Boiler(Q_fu)|total_flow_hours`
+ ------------------------------------------
+ +1 Boiler(Q_fu)|total_flow_hours - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Boiler(Q_th)|on_hours_total": |-
+ Constraint `Boiler(Q_th)|on_hours_total`
+ ----------------------------------------
+ +1 Boiler(Q_th)|on_hours_total - 1 Boiler(Q_th)|on[2020-01-01 00:00:00] - 1 Boiler(Q_th)|on[2020-01-01 01:00:00]... -1 Boiler(Q_th)|on[2020-01-01 06:00:00] - 1 Boiler(Q_th)|on[2020-01-01 07:00:00] - 1 Boiler(Q_th)|on[2020-01-01 08:00:00] = -0.0
+ "Boiler(Q_th)|flow_rate|ub": |-
+ Constraint `Boiler(Q_th)|flow_rate|ub`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] - 50 Boiler(Q_th)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] - 50 Boiler(Q_th)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] - 50 Boiler(Q_th)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] - 50 Boiler(Q_th)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] - 50 Boiler(Q_th)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] - 50 Boiler(Q_th)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] - 50 Boiler(Q_th)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] - 50 Boiler(Q_th)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] - 50 Boiler(Q_th)|on[2020-01-01 08:00:00] ≤ -0.0
+ "Boiler(Q_th)|flow_rate|lb": |-
+ Constraint `Boiler(Q_th)|flow_rate|lb`
+ [time: 9]:
+ -------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] - 5 Boiler(Q_th)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] - 5 Boiler(Q_th)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] - 5 Boiler(Q_th)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] - 5 Boiler(Q_th)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] - 5 Boiler(Q_th)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] - 5 Boiler(Q_th)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] - 5 Boiler(Q_th)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] - 5 Boiler(Q_th)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] - 5 Boiler(Q_th)|on[2020-01-01 08:00:00] ≥ -0.0
+ "Boiler(Q_th)|total_flow_hours": |-
+ Constraint `Boiler(Q_th)|total_flow_hours`
+ ------------------------------------------
+ +1 Boiler(Q_th)|total_flow_hours - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Boiler|conversion_0": |-
+ Constraint `Boiler|conversion_0`
+ [time: 9]:
+ -------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Constraint `Wärmelast(Q_th_Last)|total_flow_hours`
+ --------------------------------------------------
+ +1 Wärmelast(Q_th_Last)|total_flow_hours - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ ---------------------------------------------
+ +1 Gastarif(Q_Gas)|total_flow_hours - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 9]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 03:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 04:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 05:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 9]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 03:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 04:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 05:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ -----------------------------------------------
+ +1 Einspeisung(P_el)|total_flow_hours - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 9]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 03:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 04:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 05:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP_unit(Q_fu)|total_flow_hours": |-
+ Constraint `CHP_unit(Q_fu)|total_flow_hours`
+ --------------------------------------------
+ +1 CHP_unit(Q_fu)|total_flow_hours - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00]... -1 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP_unit(Q_th)|total_flow_hours": |-
+ Constraint `CHP_unit(Q_th)|total_flow_hours`
+ --------------------------------------------
+ +1 CHP_unit(Q_th)|total_flow_hours - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP_unit(P_el)|on_hours_total": |-
+ Constraint `CHP_unit(P_el)|on_hours_total`
+ ------------------------------------------
+ +1 CHP_unit(P_el)|on_hours_total - 1 CHP_unit(P_el)|on[2020-01-01 00:00:00] - 1 CHP_unit(P_el)|on[2020-01-01 01:00:00]... -1 CHP_unit(P_el)|on[2020-01-01 06:00:00] - 1 CHP_unit(P_el)|on[2020-01-01 07:00:00] - 1 CHP_unit(P_el)|on[2020-01-01 08:00:00] = -0.0
+ "CHP_unit(P_el)|flow_rate|ub": |-
+ Constraint `CHP_unit(P_el)|flow_rate|ub`
+ [time: 9]:
+ ---------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 00:00:00] ≤ -0.0
+ [2020-01-01 01:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 01:00:00] ≤ -0.0
+ [2020-01-01 02:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 02:00:00] ≤ -0.0
+ [2020-01-01 03:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 03:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 03:00:00] ≤ -0.0
+ [2020-01-01 04:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 04:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 04:00:00] ≤ -0.0
+ [2020-01-01 05:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 05:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 05:00:00] ≤ -0.0
+ [2020-01-01 06:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 06:00:00] ≤ -0.0
+ [2020-01-01 07:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 07:00:00] ≤ -0.0
+ [2020-01-01 08:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00] - 60 CHP_unit(P_el)|on[2020-01-01 08:00:00] ≤ -0.0
+ "CHP_unit(P_el)|flow_rate|lb": |-
+ Constraint `CHP_unit(P_el)|flow_rate|lb`
+ [time: 9]:
+ ---------------------------------------------------
+ [2020-01-01 00:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 00:00:00] ≥ -0.0
+ [2020-01-01 01:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 01:00:00] ≥ -0.0
+ [2020-01-01 02:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 02:00:00] ≥ -0.0
+ [2020-01-01 03:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 03:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 03:00:00] ≥ -0.0
+ [2020-01-01 04:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 04:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 04:00:00] ≥ -0.0
+ [2020-01-01 05:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 05:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 05:00:00] ≥ -0.0
+ [2020-01-01 06:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 06:00:00] ≥ -0.0
+ [2020-01-01 07:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 07:00:00] ≥ -0.0
+ [2020-01-01 08:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00] - 5 CHP_unit(P_el)|on[2020-01-01 08:00:00] ≥ -0.0
+ "CHP_unit(P_el)|total_flow_hours": |-
+ Constraint `CHP_unit(P_el)|total_flow_hours`
+ --------------------------------------------
+ +1 CHP_unit(P_el)|total_flow_hours - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00]... -1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP_unit|conversion_0": |-
+ Constraint `CHP_unit|conversion_0`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 00:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "CHP_unit|conversion_1": |-
+ Constraint `CHP_unit|conversion_1`
+ [time: 9]:
+ ---------------------------------------------
+ [2020-01-01 00:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00] = -0.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 9]:
+ -------------------------------------
+ [2020-01-01 00:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00] + 1 Strom|excess_input[2020-01-01 00:00:00] - 1 Strom|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00] + 1 Strom|excess_input[2020-01-01 01:00:00] - 1 Strom|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00] + 1 Strom|excess_input[2020-01-01 02:00:00] - 1 Strom|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 03:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 03:00:00] + 1 Strom|excess_input[2020-01-01 03:00:00] - 1 Strom|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 04:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 04:00:00] + 1 Strom|excess_input[2020-01-01 04:00:00] - 1 Strom|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 05:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 05:00:00] + 1 Strom|excess_input[2020-01-01 05:00:00] - 1 Strom|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00] + 1 Strom|excess_input[2020-01-01 06:00:00] - 1 Strom|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00] + 1 Strom|excess_input[2020-01-01 07:00:00] - 1 Strom|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00] + 1 Strom|excess_input[2020-01-01 08:00:00] - 1 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 1e+05 Strom|excess_input[2020-01-01 00:00:00] - 1e+05 Strom|excess_input[2020-01-01 01:00:00]... -1e+05 Strom|excess_output[2020-01-01 06:00:00] - 1e+05 Strom|excess_output[2020-01-01 07:00:00] - 1e+05 Strom|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 9]:
+ -----------------------------------------
+ [2020-01-01 00:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00] + 1 Fernwärme|excess_input[2020-01-01 00:00:00] - 1 Fernwärme|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00] + 1 Fernwärme|excess_input[2020-01-01 01:00:00] - 1 Fernwärme|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 02:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00] + 1 Fernwärme|excess_input[2020-01-01 02:00:00] - 1 Fernwärme|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 03:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 03:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 03:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 03:00:00] + 1 Fernwärme|excess_input[2020-01-01 03:00:00] - 1 Fernwärme|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 04:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 04:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 04:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 04:00:00] + 1 Fernwärme|excess_input[2020-01-01 04:00:00] - 1 Fernwärme|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 05:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 05:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 05:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 05:00:00] + 1 Fernwärme|excess_input[2020-01-01 05:00:00] - 1 Fernwärme|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00] + 1 Fernwärme|excess_input[2020-01-01 06:00:00] - 1 Fernwärme|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00] + 1 Fernwärme|excess_input[2020-01-01 07:00:00] - 1 Fernwärme|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00] + 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00] + 1 Fernwärme|excess_input[2020-01-01 08:00:00] - 1 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00] - 1e+05 Fernwärme|excess_input[2020-01-01 01:00:00]... -1e+05 Fernwärme|excess_output[2020-01-01 06:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 07:00:00] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 9]:
+ -----------------------------------
+ [2020-01-01 00:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00] + 1 Gas|excess_input[2020-01-01 00:00:00] - 1 Gas|excess_output[2020-01-01 00:00:00] = -0.0
+ [2020-01-01 01:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00] + 1 Gas|excess_input[2020-01-01 01:00:00] - 1 Gas|excess_output[2020-01-01 01:00:00] = -0.0
+ [2020-01-01 02:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00] + 1 Gas|excess_input[2020-01-01 02:00:00] - 1 Gas|excess_output[2020-01-01 02:00:00] = -0.0
+ [2020-01-01 03:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 03:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 03:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 03:00:00] + 1 Gas|excess_input[2020-01-01 03:00:00] - 1 Gas|excess_output[2020-01-01 03:00:00] = -0.0
+ [2020-01-01 04:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 04:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 04:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 04:00:00] + 1 Gas|excess_input[2020-01-01 04:00:00] - 1 Gas|excess_output[2020-01-01 04:00:00] = -0.0
+ [2020-01-01 05:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 05:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 05:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 05:00:00] + 1 Gas|excess_input[2020-01-01 05:00:00] - 1 Gas|excess_output[2020-01-01 05:00:00] = -0.0
+ [2020-01-01 06:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00] + 1 Gas|excess_input[2020-01-01 06:00:00] - 1 Gas|excess_output[2020-01-01 06:00:00] = -0.0
+ [2020-01-01 07:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00] + 1 Gas|excess_input[2020-01-01 07:00:00] - 1 Gas|excess_output[2020-01-01 07:00:00] = -0.0
+ [2020-01-01 08:00:00]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00] + 1 Gas|excess_input[2020-01-01 08:00:00] - 1 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00] - 1e+05 Gas|excess_input[2020-01-01 01:00:00]... -1e+05 Gas|excess_output[2020-01-01 06:00:00] - 1e+05 Gas|excess_output[2020-01-01 07:00:00] - 1e+05 Gas|excess_output[2020-01-01 08:00:00] = -0.0
+binaries:
+ - "Speicher(Q_th_load)|on"
+ - "Speicher(Q_th_unload)|on"
+ - "Boiler(Q_th)|on"
+ - "CHP_unit(P_el)|on"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - Penalty
+ - "CO2(temporal)->costs(temporal)"
+ - "Speicher(Q_th_load)|flow_rate"
+ - "Speicher(Q_th_load)|on_hours_total"
+ - "Speicher(Q_th_load)|total_flow_hours"
+ - "Speicher(Q_th_unload)|flow_rate"
+ - "Speicher(Q_th_unload)|on_hours_total"
+ - "Speicher(Q_th_unload)|total_flow_hours"
+ - "Speicher|charge_state"
+ - "Speicher|netto_discharge"
+ - "Speicher|size"
+ - "Speicher->costs(periodic)"
+ - "Boiler(Q_fu)|flow_rate"
+ - "Boiler(Q_fu)|total_flow_hours"
+ - "Boiler(Q_th)|flow_rate"
+ - "Boiler(Q_th)|on_hours_total"
+ - "Boiler(Q_th)|total_flow_hours"
+ - "Wärmelast(Q_th_Last)|flow_rate"
+ - "Wärmelast(Q_th_Last)|total_flow_hours"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "CHP_unit(Q_fu)|flow_rate"
+ - "CHP_unit(Q_fu)|total_flow_hours"
+ - "CHP_unit(Q_th)|flow_rate"
+ - "CHP_unit(Q_th)|total_flow_hours"
+ - "CHP_unit(P_el)|flow_rate"
+ - "CHP_unit(P_el)|on_hours_total"
+ - "CHP_unit(P_el)|total_flow_hours"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/io_simple_flow_system--solution.nc4 b/tests/ressources/v4-api/io_simple_flow_system--solution.nc4
new file mode 100644
index 000000000..1c189d522
Binary files /dev/null and b/tests/ressources/v4-api/io_simple_flow_system--solution.nc4 differ
diff --git a/tests/ressources/v4-api/io_simple_flow_system--summary.yaml b/tests/ressources/v4-api/io_simple_flow_system--summary.yaml
new file mode 100644
index 000000000..b8cc09b01
--- /dev/null
+++ b/tests/ressources/v4-api/io_simple_flow_system--summary.yaml
@@ -0,0 +1,51 @@
+Name: io_simple_flow_system
+Number of timesteps: 9
+Calculation Type: FullCalculation
+Constraints: 253
+Variables: 279
+Main Results:
+ Objective: 81.88
+ Penalty: 0.0
+ Effects:
+ CO2 [kg]:
+ temporal: 255.09
+ periodic: -0.0
+ total: 255.09
+ costs [€]:
+ temporal: 61.88
+ periodic: 20.0
+ total: 81.88
+ Invest-Decisions:
+ Invested:
+ Speicher: 30.0
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.52
+ solving: 0.34
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/ressources/v4-api/io_simple_flow_system_scenarios--flow_system.nc4 b/tests/ressources/v4-api/io_simple_flow_system_scenarios--flow_system.nc4
new file mode 100644
index 000000000..af8160c46
Binary files /dev/null and b/tests/ressources/v4-api/io_simple_flow_system_scenarios--flow_system.nc4 differ
diff --git a/tests/ressources/v4-api/io_simple_flow_system_scenarios--model_documentation.yaml b/tests/ressources/v4-api/io_simple_flow_system_scenarios--model_documentation.yaml
new file mode 100644
index 000000000..c14f18133
--- /dev/null
+++ b/tests/ressources/v4-api/io_simple_flow_system_scenarios--model_documentation.yaml
@@ -0,0 +1,1375 @@
+objective: |-
+ Objective:
+ ----------
+ LinearExpression: +0.5 costs[A] + 0.25 costs[B] + 0.25 costs[C] + 1 Penalty
+ Sense: min
+ Value: 75.37394666666668
+termination_condition: optimal
+status: ok
+nvars: 829
+nvarsbin: 108
+nvarscont: 721
+ncons: 753
+variables:
+ costs(periodic): |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: costs(periodic)[A] ∈ [-inf, inf]
+ [B]: costs(periodic)[B] ∈ [-inf, inf]
+ [C]: costs(periodic)[C] ∈ [-inf, inf]
+ costs(temporal): |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: costs(temporal)[A] ∈ [-inf, inf]
+ [B]: costs(temporal)[B] ∈ [-inf, inf]
+ [C]: costs(temporal)[C] ∈ [-inf, inf]
+ "costs(temporal)|per_timestep": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: costs(temporal)|per_timestep[2020-01-01 00:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, B]: costs(temporal)|per_timestep[2020-01-01 00:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, C]: costs(temporal)|per_timestep[2020-01-01 00:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, A]: costs(temporal)|per_timestep[2020-01-01 01:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, B]: costs(temporal)|per_timestep[2020-01-01 01:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, C]: costs(temporal)|per_timestep[2020-01-01 01:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, A]: costs(temporal)|per_timestep[2020-01-01 02:00:00, A] ∈ [-inf, inf]
+ ...
+ [2020-01-01 06:00:00, C]: costs(temporal)|per_timestep[2020-01-01 06:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, A]: costs(temporal)|per_timestep[2020-01-01 07:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, B]: costs(temporal)|per_timestep[2020-01-01 07:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, C]: costs(temporal)|per_timestep[2020-01-01 07:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, A]: costs(temporal)|per_timestep[2020-01-01 08:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, B]: costs(temporal)|per_timestep[2020-01-01 08:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, C]: costs(temporal)|per_timestep[2020-01-01 08:00:00, C] ∈ [-inf, inf]
+ costs: |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: costs[A] ∈ [-inf, inf]
+ [B]: costs[B] ∈ [-inf, inf]
+ [C]: costs[C] ∈ [-inf, inf]
+ CO2(periodic): |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CO2(periodic)[A] ∈ [-inf, inf]
+ [B]: CO2(periodic)[B] ∈ [-inf, inf]
+ [C]: CO2(periodic)[C] ∈ [-inf, inf]
+ CO2(temporal): |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CO2(temporal)[A] ∈ [-inf, inf]
+ [B]: CO2(temporal)[B] ∈ [-inf, inf]
+ [C]: CO2(temporal)[C] ∈ [-inf, inf]
+ "CO2(temporal)|per_timestep": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: CO2(temporal)|per_timestep[2020-01-01 00:00:00, A] ∈ [-inf, 1000]
+ [2020-01-01 00:00:00, B]: CO2(temporal)|per_timestep[2020-01-01 00:00:00, B] ∈ [-inf, 1000]
+ [2020-01-01 00:00:00, C]: CO2(temporal)|per_timestep[2020-01-01 00:00:00, C] ∈ [-inf, 1000]
+ [2020-01-01 01:00:00, A]: CO2(temporal)|per_timestep[2020-01-01 01:00:00, A] ∈ [-inf, 1000]
+ [2020-01-01 01:00:00, B]: CO2(temporal)|per_timestep[2020-01-01 01:00:00, B] ∈ [-inf, 1000]
+ [2020-01-01 01:00:00, C]: CO2(temporal)|per_timestep[2020-01-01 01:00:00, C] ∈ [-inf, 1000]
+ [2020-01-01 02:00:00, A]: CO2(temporal)|per_timestep[2020-01-01 02:00:00, A] ∈ [-inf, 1000]
+ ...
+ [2020-01-01 06:00:00, C]: CO2(temporal)|per_timestep[2020-01-01 06:00:00, C] ∈ [-inf, 1000]
+ [2020-01-01 07:00:00, A]: CO2(temporal)|per_timestep[2020-01-01 07:00:00, A] ∈ [-inf, 1000]
+ [2020-01-01 07:00:00, B]: CO2(temporal)|per_timestep[2020-01-01 07:00:00, B] ∈ [-inf, 1000]
+ [2020-01-01 07:00:00, C]: CO2(temporal)|per_timestep[2020-01-01 07:00:00, C] ∈ [-inf, 1000]
+ [2020-01-01 08:00:00, A]: CO2(temporal)|per_timestep[2020-01-01 08:00:00, A] ∈ [-inf, 1000]
+ [2020-01-01 08:00:00, B]: CO2(temporal)|per_timestep[2020-01-01 08:00:00, B] ∈ [-inf, 1000]
+ [2020-01-01 08:00:00, C]: CO2(temporal)|per_timestep[2020-01-01 08:00:00, C] ∈ [-inf, 1000]
+ CO2: |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CO2[A] ∈ [-inf, inf]
+ [B]: CO2[B] ∈ [-inf, inf]
+ [C]: CO2[C] ∈ [-inf, inf]
+ Penalty: |-
+ Variable
+ --------
+ Penalty ∈ [-inf, inf]
+ "CO2(temporal)->costs(temporal)": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, B]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, C]: CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, A]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, B]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, C]: CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, A]: CO2(temporal)->costs(temporal)[2020-01-01 02:00:00, A] ∈ [-inf, inf]
+ ...
+ [2020-01-01 06:00:00, C]: CO2(temporal)->costs(temporal)[2020-01-01 06:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, A]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, B]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, C]: CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, A]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, B]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, C]: CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, C] ∈ [-inf, inf]
+ "Speicher(Q_th_load)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 00:00:00, B]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 00:00:00, C]: Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00, A]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00, B]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00, C]: Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00, A]: Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1e+04]
+ ...
+ [2020-01-01 06:00:00, C]: Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00, A]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00, B]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00, C]: Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00, A]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00, B]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00, C]: Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1e+04]
+ "Speicher(Q_th_load)|on": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Speicher(Q_th_load)|on[2020-01-01 00:00:00, A] ∈ {0, 1}
+ [2020-01-01 00:00:00, B]: Speicher(Q_th_load)|on[2020-01-01 00:00:00, B] ∈ {0, 1}
+ [2020-01-01 00:00:00, C]: Speicher(Q_th_load)|on[2020-01-01 00:00:00, C] ∈ {0, 1}
+ [2020-01-01 01:00:00, A]: Speicher(Q_th_load)|on[2020-01-01 01:00:00, A] ∈ {0, 1}
+ [2020-01-01 01:00:00, B]: Speicher(Q_th_load)|on[2020-01-01 01:00:00, B] ∈ {0, 1}
+ [2020-01-01 01:00:00, C]: Speicher(Q_th_load)|on[2020-01-01 01:00:00, C] ∈ {0, 1}
+ [2020-01-01 02:00:00, A]: Speicher(Q_th_load)|on[2020-01-01 02:00:00, A] ∈ {0, 1}
+ ...
+ [2020-01-01 06:00:00, C]: Speicher(Q_th_load)|on[2020-01-01 06:00:00, C] ∈ {0, 1}
+ [2020-01-01 07:00:00, A]: Speicher(Q_th_load)|on[2020-01-01 07:00:00, A] ∈ {0, 1}
+ [2020-01-01 07:00:00, B]: Speicher(Q_th_load)|on[2020-01-01 07:00:00, B] ∈ {0, 1}
+ [2020-01-01 07:00:00, C]: Speicher(Q_th_load)|on[2020-01-01 07:00:00, C] ∈ {0, 1}
+ [2020-01-01 08:00:00, A]: Speicher(Q_th_load)|on[2020-01-01 08:00:00, A] ∈ {0, 1}
+ [2020-01-01 08:00:00, B]: Speicher(Q_th_load)|on[2020-01-01 08:00:00, B] ∈ {0, 1}
+ [2020-01-01 08:00:00, C]: Speicher(Q_th_load)|on[2020-01-01 08:00:00, C] ∈ {0, 1}
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Speicher(Q_th_load)|on_hours_total[A] ∈ [0, inf]
+ [B]: Speicher(Q_th_load)|on_hours_total[B] ∈ [0, inf]
+ [C]: Speicher(Q_th_load)|on_hours_total[C] ∈ [0, inf]
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Speicher(Q_th_load)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Speicher(Q_th_load)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Speicher(Q_th_load)|total_flow_hours[C] ∈ [0, inf]
+ "Speicher(Q_th_unload)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 00:00:00, B]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 00:00:00, C]: Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00, A]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00, B]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 01:00:00, C]: Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 02:00:00, A]: Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1e+04]
+ ...
+ [2020-01-01 06:00:00, C]: Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00, A]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00, B]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 07:00:00, C]: Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00, A]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00, B]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1e+04]
+ [2020-01-01 08:00:00, C]: Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1e+04]
+ "Speicher(Q_th_unload)|on": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00, A] ∈ {0, 1}
+ [2020-01-01 00:00:00, B]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00, B] ∈ {0, 1}
+ [2020-01-01 00:00:00, C]: Speicher(Q_th_unload)|on[2020-01-01 00:00:00, C] ∈ {0, 1}
+ [2020-01-01 01:00:00, A]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00, A] ∈ {0, 1}
+ [2020-01-01 01:00:00, B]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00, B] ∈ {0, 1}
+ [2020-01-01 01:00:00, C]: Speicher(Q_th_unload)|on[2020-01-01 01:00:00, C] ∈ {0, 1}
+ [2020-01-01 02:00:00, A]: Speicher(Q_th_unload)|on[2020-01-01 02:00:00, A] ∈ {0, 1}
+ ...
+ [2020-01-01 06:00:00, C]: Speicher(Q_th_unload)|on[2020-01-01 06:00:00, C] ∈ {0, 1}
+ [2020-01-01 07:00:00, A]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00, A] ∈ {0, 1}
+ [2020-01-01 07:00:00, B]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00, B] ∈ {0, 1}
+ [2020-01-01 07:00:00, C]: Speicher(Q_th_unload)|on[2020-01-01 07:00:00, C] ∈ {0, 1}
+ [2020-01-01 08:00:00, A]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00, A] ∈ {0, 1}
+ [2020-01-01 08:00:00, B]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00, B] ∈ {0, 1}
+ [2020-01-01 08:00:00, C]: Speicher(Q_th_unload)|on[2020-01-01 08:00:00, C] ∈ {0, 1}
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Speicher(Q_th_unload)|on_hours_total[A] ∈ [0, inf]
+ [B]: Speicher(Q_th_unload)|on_hours_total[B] ∈ [0, inf]
+ [C]: Speicher(Q_th_unload)|on_hours_total[C] ∈ [0, inf]
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Speicher(Q_th_unload)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Speicher(Q_th_unload)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Speicher(Q_th_unload)|total_flow_hours[C] ∈ [0, inf]
+ "Speicher|charge_state": |-
+ Variable (time: 10, scenario: 3)
+ --------------------------------
+ [2020-01-01 00:00:00, A]: Speicher|charge_state[2020-01-01 00:00:00, A] ∈ [0, 8e+06]
+ [2020-01-01 00:00:00, B]: Speicher|charge_state[2020-01-01 00:00:00, B] ∈ [0, 8e+06]
+ [2020-01-01 00:00:00, C]: Speicher|charge_state[2020-01-01 00:00:00, C] ∈ [0, 8e+06]
+ [2020-01-01 01:00:00, A]: Speicher|charge_state[2020-01-01 01:00:00, A] ∈ [0, 7e+06]
+ [2020-01-01 01:00:00, B]: Speicher|charge_state[2020-01-01 01:00:00, B] ∈ [0, 7e+06]
+ [2020-01-01 01:00:00, C]: Speicher|charge_state[2020-01-01 01:00:00, C] ∈ [0, 7e+06]
+ [2020-01-01 02:00:00, A]: Speicher|charge_state[2020-01-01 02:00:00, A] ∈ [0, 8e+06]
+ ...
+ [2020-01-01 07:00:00, C]: Speicher|charge_state[2020-01-01 07:00:00, C] ∈ [0, 8e+06]
+ [2020-01-01 08:00:00, A]: Speicher|charge_state[2020-01-01 08:00:00, A] ∈ [0, 8e+06]
+ [2020-01-01 08:00:00, B]: Speicher|charge_state[2020-01-01 08:00:00, B] ∈ [0, 8e+06]
+ [2020-01-01 08:00:00, C]: Speicher|charge_state[2020-01-01 08:00:00, C] ∈ [0, 8e+06]
+ [2020-01-01 09:00:00, A]: Speicher|charge_state[2020-01-01 09:00:00, A] ∈ [0, 8e+06]
+ [2020-01-01 09:00:00, B]: Speicher|charge_state[2020-01-01 09:00:00, B] ∈ [0, 8e+06]
+ [2020-01-01 09:00:00, C]: Speicher|charge_state[2020-01-01 09:00:00, C] ∈ [0, 8e+06]
+ "Speicher|netto_discharge": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Speicher|netto_discharge[2020-01-01 00:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, B]: Speicher|netto_discharge[2020-01-01 00:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, C]: Speicher|netto_discharge[2020-01-01 00:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, A]: Speicher|netto_discharge[2020-01-01 01:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, B]: Speicher|netto_discharge[2020-01-01 01:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, C]: Speicher|netto_discharge[2020-01-01 01:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, A]: Speicher|netto_discharge[2020-01-01 02:00:00, A] ∈ [-inf, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Speicher|netto_discharge[2020-01-01 06:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, A]: Speicher|netto_discharge[2020-01-01 07:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, B]: Speicher|netto_discharge[2020-01-01 07:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, C]: Speicher|netto_discharge[2020-01-01 07:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, A]: Speicher|netto_discharge[2020-01-01 08:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, B]: Speicher|netto_discharge[2020-01-01 08:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, C]: Speicher|netto_discharge[2020-01-01 08:00:00, C] ∈ [-inf, inf]
+ "Speicher|size": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Speicher|size[A] ∈ [30, 30]
+ [B]: Speicher|size[B] ∈ [30, 30]
+ [C]: Speicher|size[C] ∈ [30, 30]
+ "Speicher->costs(periodic)": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Speicher->costs(periodic)[A] ∈ [-inf, inf]
+ [B]: Speicher->costs(periodic)[B] ∈ [-inf, inf]
+ [C]: Speicher->costs(periodic)[C] ∈ [-inf, inf]
+ "Boiler(Q_fu)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, B]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, C]: Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, A]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, B]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, C]: Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00, A]: Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1e+07]
+ ...
+ [2020-01-01 06:00:00, C]: Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, A]: Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, B]: Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, C]: Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, A]: Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, B]: Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, C]: Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1e+07]
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Boiler(Q_fu)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Boiler(Q_fu)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Boiler(Q_fu)|total_flow_hours[C] ∈ [0, inf]
+ "Boiler(Q_th)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 50]
+ [2020-01-01 00:00:00, B]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 50]
+ [2020-01-01 00:00:00, C]: Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 50]
+ [2020-01-01 01:00:00, A]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 50]
+ [2020-01-01 01:00:00, B]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 50]
+ [2020-01-01 01:00:00, C]: Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 50]
+ [2020-01-01 02:00:00, A]: Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 50]
+ ...
+ [2020-01-01 06:00:00, C]: Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 50]
+ [2020-01-01 07:00:00, A]: Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 50]
+ [2020-01-01 07:00:00, B]: Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 50]
+ [2020-01-01 07:00:00, C]: Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 50]
+ [2020-01-01 08:00:00, A]: Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 50]
+ [2020-01-01 08:00:00, B]: Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 50]
+ [2020-01-01 08:00:00, C]: Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 50]
+ "Boiler(Q_th)|on": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Boiler(Q_th)|on[2020-01-01 00:00:00, A] ∈ {0, 1}
+ [2020-01-01 00:00:00, B]: Boiler(Q_th)|on[2020-01-01 00:00:00, B] ∈ {0, 1}
+ [2020-01-01 00:00:00, C]: Boiler(Q_th)|on[2020-01-01 00:00:00, C] ∈ {0, 1}
+ [2020-01-01 01:00:00, A]: Boiler(Q_th)|on[2020-01-01 01:00:00, A] ∈ {0, 1}
+ [2020-01-01 01:00:00, B]: Boiler(Q_th)|on[2020-01-01 01:00:00, B] ∈ {0, 1}
+ [2020-01-01 01:00:00, C]: Boiler(Q_th)|on[2020-01-01 01:00:00, C] ∈ {0, 1}
+ [2020-01-01 02:00:00, A]: Boiler(Q_th)|on[2020-01-01 02:00:00, A] ∈ {0, 1}
+ ...
+ [2020-01-01 06:00:00, C]: Boiler(Q_th)|on[2020-01-01 06:00:00, C] ∈ {0, 1}
+ [2020-01-01 07:00:00, A]: Boiler(Q_th)|on[2020-01-01 07:00:00, A] ∈ {0, 1}
+ [2020-01-01 07:00:00, B]: Boiler(Q_th)|on[2020-01-01 07:00:00, B] ∈ {0, 1}
+ [2020-01-01 07:00:00, C]: Boiler(Q_th)|on[2020-01-01 07:00:00, C] ∈ {0, 1}
+ [2020-01-01 08:00:00, A]: Boiler(Q_th)|on[2020-01-01 08:00:00, A] ∈ {0, 1}
+ [2020-01-01 08:00:00, B]: Boiler(Q_th)|on[2020-01-01 08:00:00, B] ∈ {0, 1}
+ [2020-01-01 08:00:00, C]: Boiler(Q_th)|on[2020-01-01 08:00:00, C] ∈ {0, 1}
+ "Boiler(Q_th)|on_hours_total": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Boiler(Q_th)|on_hours_total[A] ∈ [0, inf]
+ [B]: Boiler(Q_th)|on_hours_total[B] ∈ [0, inf]
+ [C]: Boiler(Q_th)|on_hours_total[C] ∈ [0, inf]
+ "Boiler(Q_th)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Boiler(Q_th)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Boiler(Q_th)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Boiler(Q_th)|total_flow_hours[C] ∈ [0, inf]
+ "Wärmelast(Q_th_Last)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, A] ∈ [30, 30]
+ [2020-01-01 00:00:00, B]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, B] ∈ [30, 30]
+ [2020-01-01 00:00:00, C]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, C] ∈ [30, 30]
+ [2020-01-01 01:00:00, A]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 0]
+ [2020-01-01 01:00:00, B]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 0]
+ [2020-01-01 01:00:00, C]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 0]
+ [2020-01-01 02:00:00, A]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00, A] ∈ [90, 90]
+ ...
+ [2020-01-01 06:00:00, C]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00, C] ∈ [20, 20]
+ [2020-01-01 07:00:00, A]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, A] ∈ [20, 20]
+ [2020-01-01 07:00:00, B]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, B] ∈ [20, 20]
+ [2020-01-01 07:00:00, C]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, C] ∈ [20, 20]
+ [2020-01-01 08:00:00, A]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, A] ∈ [20, 20]
+ [2020-01-01 08:00:00, B]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, B] ∈ [20, 20]
+ [2020-01-01 08:00:00, C]: Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, C] ∈ [20, 20]
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Wärmelast(Q_th_Last)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Wärmelast(Q_th_Last)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Wärmelast(Q_th_Last)|total_flow_hours[C] ∈ [0, inf]
+ "Gastarif(Q_Gas)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1000]
+ [2020-01-01 00:00:00, B]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1000]
+ [2020-01-01 00:00:00, C]: Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1000]
+ [2020-01-01 01:00:00, A]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1000]
+ [2020-01-01 01:00:00, B]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1000]
+ [2020-01-01 01:00:00, C]: Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1000]
+ [2020-01-01 02:00:00, A]: Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1000]
+ ...
+ [2020-01-01 06:00:00, C]: Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1000]
+ [2020-01-01 07:00:00, A]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1000]
+ [2020-01-01 07:00:00, B]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1000]
+ [2020-01-01 07:00:00, C]: Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1000]
+ [2020-01-01 08:00:00, A]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1000]
+ [2020-01-01 08:00:00, B]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1000]
+ [2020-01-01 08:00:00, C]: Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1000]
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Gastarif(Q_Gas)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Gastarif(Q_Gas)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Gastarif(Q_Gas)|total_flow_hours[C] ∈ [0, inf]
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, B]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, C]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, A]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, B]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, C]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, A]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, A] ∈ [-inf, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, A]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, B]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, C]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, A]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, B]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, C]: Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, C] ∈ [-inf, inf]
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, B]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, C]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, A]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, B]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, C]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, A]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00, A] ∈ [-inf, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, A]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, B]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, C]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, A]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, B]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, C]: Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, C] ∈ [-inf, inf]
+ "Einspeisung(P_el)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, B]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, C]: Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, A]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, B]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, C]: Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00, A]: Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1e+07]
+ ...
+ [2020-01-01 06:00:00, C]: Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, A]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, B]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, C]: Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, A]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, B]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, C]: Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1e+07]
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: Einspeisung(P_el)|total_flow_hours[A] ∈ [0, inf]
+ [B]: Einspeisung(P_el)|total_flow_hours[B] ∈ [0, inf]
+ [C]: Einspeisung(P_el)|total_flow_hours[C] ∈ [0, inf]
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, B]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 00:00:00, C]: Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, A]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, B]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 01:00:00, C]: Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 02:00:00, A]: Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00, A] ∈ [-inf, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, A]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, B]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 07:00:00, C]: Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, C] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, A]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, A] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, B]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, B] ∈ [-inf, inf]
+ [2020-01-01 08:00:00, C]: Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, C] ∈ [-inf, inf]
+ "CHP_unit(Q_fu)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, B]: CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, C]: CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, A]: CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, B]: CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, C]: CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00, A]: CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1e+07]
+ ...
+ [2020-01-01 06:00:00, C]: CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, A]: CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, B]: CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, C]: CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, A]: CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, B]: CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, C]: CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1e+07]
+ "CHP_unit(Q_fu)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CHP_unit(Q_fu)|total_flow_hours[A] ∈ [0, inf]
+ [B]: CHP_unit(Q_fu)|total_flow_hours[B] ∈ [0, inf]
+ [C]: CHP_unit(Q_fu)|total_flow_hours[C] ∈ [0, inf]
+ "CHP_unit(Q_th)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, B]: CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 00:00:00, C]: CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, A]: CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, B]: CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 01:00:00, C]: CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 02:00:00, A]: CHP_unit(Q_th)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 1e+07]
+ ...
+ [2020-01-01 06:00:00, C]: CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, A]: CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, B]: CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 07:00:00, C]: CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, A]: CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, B]: CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 1e+07]
+ [2020-01-01 08:00:00, C]: CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 1e+07]
+ "CHP_unit(Q_th)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CHP_unit(Q_th)|total_flow_hours[A] ∈ [0, inf]
+ [B]: CHP_unit(Q_th)|total_flow_hours[B] ∈ [0, inf]
+ [C]: CHP_unit(Q_th)|total_flow_hours[C] ∈ [0, inf]
+ "CHP_unit(P_el)|flow_rate": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, A] ∈ [0, 60]
+ [2020-01-01 00:00:00, B]: CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, B] ∈ [0, 60]
+ [2020-01-01 00:00:00, C]: CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, C] ∈ [0, 60]
+ [2020-01-01 01:00:00, A]: CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, A] ∈ [0, 60]
+ [2020-01-01 01:00:00, B]: CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, B] ∈ [0, 60]
+ [2020-01-01 01:00:00, C]: CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, C] ∈ [0, 60]
+ [2020-01-01 02:00:00, A]: CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00, A] ∈ [0, 60]
+ ...
+ [2020-01-01 06:00:00, C]: CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, C] ∈ [0, 60]
+ [2020-01-01 07:00:00, A]: CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, A] ∈ [0, 60]
+ [2020-01-01 07:00:00, B]: CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, B] ∈ [0, 60]
+ [2020-01-01 07:00:00, C]: CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, C] ∈ [0, 60]
+ [2020-01-01 08:00:00, A]: CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, A] ∈ [0, 60]
+ [2020-01-01 08:00:00, B]: CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, B] ∈ [0, 60]
+ [2020-01-01 08:00:00, C]: CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, C] ∈ [0, 60]
+ "CHP_unit(P_el)|on": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: CHP_unit(P_el)|on[2020-01-01 00:00:00, A] ∈ {0, 1}
+ [2020-01-01 00:00:00, B]: CHP_unit(P_el)|on[2020-01-01 00:00:00, B] ∈ {0, 1}
+ [2020-01-01 00:00:00, C]: CHP_unit(P_el)|on[2020-01-01 00:00:00, C] ∈ {0, 1}
+ [2020-01-01 01:00:00, A]: CHP_unit(P_el)|on[2020-01-01 01:00:00, A] ∈ {0, 1}
+ [2020-01-01 01:00:00, B]: CHP_unit(P_el)|on[2020-01-01 01:00:00, B] ∈ {0, 1}
+ [2020-01-01 01:00:00, C]: CHP_unit(P_el)|on[2020-01-01 01:00:00, C] ∈ {0, 1}
+ [2020-01-01 02:00:00, A]: CHP_unit(P_el)|on[2020-01-01 02:00:00, A] ∈ {0, 1}
+ ...
+ [2020-01-01 06:00:00, C]: CHP_unit(P_el)|on[2020-01-01 06:00:00, C] ∈ {0, 1}
+ [2020-01-01 07:00:00, A]: CHP_unit(P_el)|on[2020-01-01 07:00:00, A] ∈ {0, 1}
+ [2020-01-01 07:00:00, B]: CHP_unit(P_el)|on[2020-01-01 07:00:00, B] ∈ {0, 1}
+ [2020-01-01 07:00:00, C]: CHP_unit(P_el)|on[2020-01-01 07:00:00, C] ∈ {0, 1}
+ [2020-01-01 08:00:00, A]: CHP_unit(P_el)|on[2020-01-01 08:00:00, A] ∈ {0, 1}
+ [2020-01-01 08:00:00, B]: CHP_unit(P_el)|on[2020-01-01 08:00:00, B] ∈ {0, 1}
+ [2020-01-01 08:00:00, C]: CHP_unit(P_el)|on[2020-01-01 08:00:00, C] ∈ {0, 1}
+ "CHP_unit(P_el)|on_hours_total": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CHP_unit(P_el)|on_hours_total[A] ∈ [0, inf]
+ [B]: CHP_unit(P_el)|on_hours_total[B] ∈ [0, inf]
+ [C]: CHP_unit(P_el)|on_hours_total[C] ∈ [0, inf]
+ "CHP_unit(P_el)|total_flow_hours": |-
+ Variable (scenario: 3)
+ ----------------------
+ [A]: CHP_unit(P_el)|total_flow_hours[A] ∈ [0, inf]
+ [B]: CHP_unit(P_el)|total_flow_hours[B] ∈ [0, inf]
+ [C]: CHP_unit(P_el)|total_flow_hours[C] ∈ [0, inf]
+ "Strom|excess_input": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Strom|excess_input[2020-01-01 00:00:00, A] ∈ [0, inf]
+ [2020-01-01 00:00:00, B]: Strom|excess_input[2020-01-01 00:00:00, B] ∈ [0, inf]
+ [2020-01-01 00:00:00, C]: Strom|excess_input[2020-01-01 00:00:00, C] ∈ [0, inf]
+ [2020-01-01 01:00:00, A]: Strom|excess_input[2020-01-01 01:00:00, A] ∈ [0, inf]
+ [2020-01-01 01:00:00, B]: Strom|excess_input[2020-01-01 01:00:00, B] ∈ [0, inf]
+ [2020-01-01 01:00:00, C]: Strom|excess_input[2020-01-01 01:00:00, C] ∈ [0, inf]
+ [2020-01-01 02:00:00, A]: Strom|excess_input[2020-01-01 02:00:00, A] ∈ [0, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Strom|excess_input[2020-01-01 06:00:00, C] ∈ [0, inf]
+ [2020-01-01 07:00:00, A]: Strom|excess_input[2020-01-01 07:00:00, A] ∈ [0, inf]
+ [2020-01-01 07:00:00, B]: Strom|excess_input[2020-01-01 07:00:00, B] ∈ [0, inf]
+ [2020-01-01 07:00:00, C]: Strom|excess_input[2020-01-01 07:00:00, C] ∈ [0, inf]
+ [2020-01-01 08:00:00, A]: Strom|excess_input[2020-01-01 08:00:00, A] ∈ [0, inf]
+ [2020-01-01 08:00:00, B]: Strom|excess_input[2020-01-01 08:00:00, B] ∈ [0, inf]
+ [2020-01-01 08:00:00, C]: Strom|excess_input[2020-01-01 08:00:00, C] ∈ [0, inf]
+ "Strom|excess_output": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Strom|excess_output[2020-01-01 00:00:00, A] ∈ [0, inf]
+ [2020-01-01 00:00:00, B]: Strom|excess_output[2020-01-01 00:00:00, B] ∈ [0, inf]
+ [2020-01-01 00:00:00, C]: Strom|excess_output[2020-01-01 00:00:00, C] ∈ [0, inf]
+ [2020-01-01 01:00:00, A]: Strom|excess_output[2020-01-01 01:00:00, A] ∈ [0, inf]
+ [2020-01-01 01:00:00, B]: Strom|excess_output[2020-01-01 01:00:00, B] ∈ [0, inf]
+ [2020-01-01 01:00:00, C]: Strom|excess_output[2020-01-01 01:00:00, C] ∈ [0, inf]
+ [2020-01-01 02:00:00, A]: Strom|excess_output[2020-01-01 02:00:00, A] ∈ [0, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Strom|excess_output[2020-01-01 06:00:00, C] ∈ [0, inf]
+ [2020-01-01 07:00:00, A]: Strom|excess_output[2020-01-01 07:00:00, A] ∈ [0, inf]
+ [2020-01-01 07:00:00, B]: Strom|excess_output[2020-01-01 07:00:00, B] ∈ [0, inf]
+ [2020-01-01 07:00:00, C]: Strom|excess_output[2020-01-01 07:00:00, C] ∈ [0, inf]
+ [2020-01-01 08:00:00, A]: Strom|excess_output[2020-01-01 08:00:00, A] ∈ [0, inf]
+ [2020-01-01 08:00:00, B]: Strom|excess_output[2020-01-01 08:00:00, B] ∈ [0, inf]
+ [2020-01-01 08:00:00, C]: Strom|excess_output[2020-01-01 08:00:00, C] ∈ [0, inf]
+ "Strom->Penalty": |-
+ Variable
+ --------
+ Strom->Penalty ∈ [-inf, inf]
+ "Fernwärme|excess_input": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Fernwärme|excess_input[2020-01-01 00:00:00, A] ∈ [0, inf]
+ [2020-01-01 00:00:00, B]: Fernwärme|excess_input[2020-01-01 00:00:00, B] ∈ [0, inf]
+ [2020-01-01 00:00:00, C]: Fernwärme|excess_input[2020-01-01 00:00:00, C] ∈ [0, inf]
+ [2020-01-01 01:00:00, A]: Fernwärme|excess_input[2020-01-01 01:00:00, A] ∈ [0, inf]
+ [2020-01-01 01:00:00, B]: Fernwärme|excess_input[2020-01-01 01:00:00, B] ∈ [0, inf]
+ [2020-01-01 01:00:00, C]: Fernwärme|excess_input[2020-01-01 01:00:00, C] ∈ [0, inf]
+ [2020-01-01 02:00:00, A]: Fernwärme|excess_input[2020-01-01 02:00:00, A] ∈ [0, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Fernwärme|excess_input[2020-01-01 06:00:00, C] ∈ [0, inf]
+ [2020-01-01 07:00:00, A]: Fernwärme|excess_input[2020-01-01 07:00:00, A] ∈ [0, inf]
+ [2020-01-01 07:00:00, B]: Fernwärme|excess_input[2020-01-01 07:00:00, B] ∈ [0, inf]
+ [2020-01-01 07:00:00, C]: Fernwärme|excess_input[2020-01-01 07:00:00, C] ∈ [0, inf]
+ [2020-01-01 08:00:00, A]: Fernwärme|excess_input[2020-01-01 08:00:00, A] ∈ [0, inf]
+ [2020-01-01 08:00:00, B]: Fernwärme|excess_input[2020-01-01 08:00:00, B] ∈ [0, inf]
+ [2020-01-01 08:00:00, C]: Fernwärme|excess_input[2020-01-01 08:00:00, C] ∈ [0, inf]
+ "Fernwärme|excess_output": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Fernwärme|excess_output[2020-01-01 00:00:00, A] ∈ [0, inf]
+ [2020-01-01 00:00:00, B]: Fernwärme|excess_output[2020-01-01 00:00:00, B] ∈ [0, inf]
+ [2020-01-01 00:00:00, C]: Fernwärme|excess_output[2020-01-01 00:00:00, C] ∈ [0, inf]
+ [2020-01-01 01:00:00, A]: Fernwärme|excess_output[2020-01-01 01:00:00, A] ∈ [0, inf]
+ [2020-01-01 01:00:00, B]: Fernwärme|excess_output[2020-01-01 01:00:00, B] ∈ [0, inf]
+ [2020-01-01 01:00:00, C]: Fernwärme|excess_output[2020-01-01 01:00:00, C] ∈ [0, inf]
+ [2020-01-01 02:00:00, A]: Fernwärme|excess_output[2020-01-01 02:00:00, A] ∈ [0, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Fernwärme|excess_output[2020-01-01 06:00:00, C] ∈ [0, inf]
+ [2020-01-01 07:00:00, A]: Fernwärme|excess_output[2020-01-01 07:00:00, A] ∈ [0, inf]
+ [2020-01-01 07:00:00, B]: Fernwärme|excess_output[2020-01-01 07:00:00, B] ∈ [0, inf]
+ [2020-01-01 07:00:00, C]: Fernwärme|excess_output[2020-01-01 07:00:00, C] ∈ [0, inf]
+ [2020-01-01 08:00:00, A]: Fernwärme|excess_output[2020-01-01 08:00:00, A] ∈ [0, inf]
+ [2020-01-01 08:00:00, B]: Fernwärme|excess_output[2020-01-01 08:00:00, B] ∈ [0, inf]
+ [2020-01-01 08:00:00, C]: Fernwärme|excess_output[2020-01-01 08:00:00, C] ∈ [0, inf]
+ "Fernwärme->Penalty": |-
+ Variable
+ --------
+ Fernwärme->Penalty ∈ [-inf, inf]
+ "Gas|excess_input": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Gas|excess_input[2020-01-01 00:00:00, A] ∈ [0, inf]
+ [2020-01-01 00:00:00, B]: Gas|excess_input[2020-01-01 00:00:00, B] ∈ [0, inf]
+ [2020-01-01 00:00:00, C]: Gas|excess_input[2020-01-01 00:00:00, C] ∈ [0, inf]
+ [2020-01-01 01:00:00, A]: Gas|excess_input[2020-01-01 01:00:00, A] ∈ [0, inf]
+ [2020-01-01 01:00:00, B]: Gas|excess_input[2020-01-01 01:00:00, B] ∈ [0, inf]
+ [2020-01-01 01:00:00, C]: Gas|excess_input[2020-01-01 01:00:00, C] ∈ [0, inf]
+ [2020-01-01 02:00:00, A]: Gas|excess_input[2020-01-01 02:00:00, A] ∈ [0, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Gas|excess_input[2020-01-01 06:00:00, C] ∈ [0, inf]
+ [2020-01-01 07:00:00, A]: Gas|excess_input[2020-01-01 07:00:00, A] ∈ [0, inf]
+ [2020-01-01 07:00:00, B]: Gas|excess_input[2020-01-01 07:00:00, B] ∈ [0, inf]
+ [2020-01-01 07:00:00, C]: Gas|excess_input[2020-01-01 07:00:00, C] ∈ [0, inf]
+ [2020-01-01 08:00:00, A]: Gas|excess_input[2020-01-01 08:00:00, A] ∈ [0, inf]
+ [2020-01-01 08:00:00, B]: Gas|excess_input[2020-01-01 08:00:00, B] ∈ [0, inf]
+ [2020-01-01 08:00:00, C]: Gas|excess_input[2020-01-01 08:00:00, C] ∈ [0, inf]
+ "Gas|excess_output": |-
+ Variable (time: 9, scenario: 3)
+ -------------------------------
+ [2020-01-01 00:00:00, A]: Gas|excess_output[2020-01-01 00:00:00, A] ∈ [0, inf]
+ [2020-01-01 00:00:00, B]: Gas|excess_output[2020-01-01 00:00:00, B] ∈ [0, inf]
+ [2020-01-01 00:00:00, C]: Gas|excess_output[2020-01-01 00:00:00, C] ∈ [0, inf]
+ [2020-01-01 01:00:00, A]: Gas|excess_output[2020-01-01 01:00:00, A] ∈ [0, inf]
+ [2020-01-01 01:00:00, B]: Gas|excess_output[2020-01-01 01:00:00, B] ∈ [0, inf]
+ [2020-01-01 01:00:00, C]: Gas|excess_output[2020-01-01 01:00:00, C] ∈ [0, inf]
+ [2020-01-01 02:00:00, A]: Gas|excess_output[2020-01-01 02:00:00, A] ∈ [0, inf]
+ ...
+ [2020-01-01 06:00:00, C]: Gas|excess_output[2020-01-01 06:00:00, C] ∈ [0, inf]
+ [2020-01-01 07:00:00, A]: Gas|excess_output[2020-01-01 07:00:00, A] ∈ [0, inf]
+ [2020-01-01 07:00:00, B]: Gas|excess_output[2020-01-01 07:00:00, B] ∈ [0, inf]
+ [2020-01-01 07:00:00, C]: Gas|excess_output[2020-01-01 07:00:00, C] ∈ [0, inf]
+ [2020-01-01 08:00:00, A]: Gas|excess_output[2020-01-01 08:00:00, A] ∈ [0, inf]
+ [2020-01-01 08:00:00, B]: Gas|excess_output[2020-01-01 08:00:00, B] ∈ [0, inf]
+ [2020-01-01 08:00:00, C]: Gas|excess_output[2020-01-01 08:00:00, C] ∈ [0, inf]
+ "Gas->Penalty": |-
+ Variable
+ --------
+ Gas->Penalty ∈ [-inf, inf]
+constraints:
+ costs(periodic): |-
+ Constraint `costs(periodic)`
+ [scenario: 3]:
+ -------------------------------------------
+ [A]: +1 costs(periodic)[A] - 1 Speicher->costs(periodic)[A] = -0.0
+ [B]: +1 costs(periodic)[B] - 1 Speicher->costs(periodic)[B] = -0.0
+ [C]: +1 costs(periodic)[C] - 1 Speicher->costs(periodic)[C] = -0.0
+ costs(temporal): |-
+ Constraint `costs(temporal)`
+ [scenario: 3]:
+ -------------------------------------------
+ [A]: +1 costs(temporal)[A] - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00, A] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00, A]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00, A] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00, A] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 costs(temporal)[B] - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00, B] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00, B]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00, B] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00, B] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 costs(temporal)[C] - 1 costs(temporal)|per_timestep[2020-01-01 00:00:00, C] - 1 costs(temporal)|per_timestep[2020-01-01 01:00:00, C]... -1 costs(temporal)|per_timestep[2020-01-01 06:00:00, C] - 1 costs(temporal)|per_timestep[2020-01-01 07:00:00, C] - 1 costs(temporal)|per_timestep[2020-01-01 08:00:00, C] = -0.0
+ "costs(temporal)|per_timestep": |-
+ Constraint `costs(temporal)|per_timestep`
+ [time: 9, scenario: 3]:
+ -----------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00, A] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, A] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, A] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00, B] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, B] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, B] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 costs(temporal)|per_timestep[2020-01-01 00:00:00, C] - 1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, C] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, C] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00, A] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, A] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, A] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00, B] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, B] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, B] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 costs(temporal)|per_timestep[2020-01-01 01:00:00, C] - 1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, C] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, C] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 costs(temporal)|per_timestep[2020-01-01 02:00:00, A] - 1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00, A] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, A] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 costs(temporal)|per_timestep[2020-01-01 06:00:00, C] - 1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00, C] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00, C] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00, A] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, A] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, A] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00, B] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, B] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, B] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 costs(temporal)|per_timestep[2020-01-01 07:00:00, C] - 1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, C] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, C] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00, A] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, A] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, A] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00, B] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, B] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, B] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 costs(temporal)|per_timestep[2020-01-01 08:00:00, C] - 1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, C] - 1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, C] - 1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, C] = -0.0
+ costs: |-
+ Constraint `costs`
+ [scenario: 3]:
+ ---------------------------------
+ [A]: +1 costs[A] - 1 costs(temporal)[A] - 1 costs(periodic)[A] = -0.0
+ [B]: +1 costs[B] - 1 costs(temporal)[B] - 1 costs(periodic)[B] = -0.0
+ [C]: +1 costs[C] - 1 costs(temporal)[C] - 1 costs(periodic)[C] = -0.0
+ CO2(periodic): |-
+ Constraint `CO2(periodic)`
+ [scenario: 3]:
+ -----------------------------------------
+ [A]: +1 CO2(periodic)[A] = -0.0
+ [B]: +1 CO2(periodic)[B] = -0.0
+ [C]: +1 CO2(periodic)[C] = -0.0
+ CO2(temporal): |-
+ Constraint `CO2(temporal)`
+ [scenario: 3]:
+ -----------------------------------------
+ [A]: +1 CO2(temporal)[A] - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00, A] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00, A]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00, A] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00, A] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 CO2(temporal)[B] - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00, B] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00, B]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00, B] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00, B] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 CO2(temporal)[C] - 1 CO2(temporal)|per_timestep[2020-01-01 00:00:00, C] - 1 CO2(temporal)|per_timestep[2020-01-01 01:00:00, C]... -1 CO2(temporal)|per_timestep[2020-01-01 06:00:00, C] - 1 CO2(temporal)|per_timestep[2020-01-01 07:00:00, C] - 1 CO2(temporal)|per_timestep[2020-01-01 08:00:00, C] = -0.0
+ "CO2(temporal)|per_timestep": |-
+ Constraint `CO2(temporal)|per_timestep`
+ [time: 9, scenario: 3]:
+ ---------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00, A] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00, B] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 CO2(temporal)|per_timestep[2020-01-01 00:00:00, C] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00, A] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00, B] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 CO2(temporal)|per_timestep[2020-01-01 01:00:00, C] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 CO2(temporal)|per_timestep[2020-01-01 02:00:00, A] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 CO2(temporal)|per_timestep[2020-01-01 06:00:00, C] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00, A] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00, B] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 CO2(temporal)|per_timestep[2020-01-01 07:00:00, C] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00, A] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00, B] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 CO2(temporal)|per_timestep[2020-01-01 08:00:00, C] - 1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, C] = -0.0
+ CO2: |-
+ Constraint `CO2`
+ [scenario: 3]:
+ -------------------------------
+ [A]: +1 CO2[A] - 1 CO2(temporal)[A] - 1 CO2(periodic)[A] = -0.0
+ [B]: +1 CO2[B] - 1 CO2(temporal)[B] - 1 CO2(periodic)[B] = -0.0
+ [C]: +1 CO2[C] - 1 CO2(temporal)[C] - 1 CO2(periodic)[C] = -0.0
+ Penalty: |-
+ Constraint `Penalty`
+ --------------------
+ +1 Penalty - 1 Strom->Penalty - 1 Fernwärme->Penalty - 1 Gas->Penalty = -0.0
+ "CO2(temporal)->costs(temporal)": |-
+ Constraint `CO2(temporal)->costs(temporal)`
+ [time: 9, scenario: 3]:
+ -------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, A] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, B] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 CO2(temporal)->costs(temporal)[2020-01-01 00:00:00, C] - 0.2 CO2(temporal)|per_timestep[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, A] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, B] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 CO2(temporal)->costs(temporal)[2020-01-01 01:00:00, C] - 0.2 CO2(temporal)|per_timestep[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 CO2(temporal)->costs(temporal)[2020-01-01 02:00:00, A] - 0.2 CO2(temporal)|per_timestep[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 CO2(temporal)->costs(temporal)[2020-01-01 06:00:00, C] - 0.2 CO2(temporal)|per_timestep[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, A] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, B] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 CO2(temporal)->costs(temporal)[2020-01-01 07:00:00, C] - 0.2 CO2(temporal)|per_timestep[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, A] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, B] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 CO2(temporal)->costs(temporal)[2020-01-01 08:00:00, C] - 0.2 CO2(temporal)|per_timestep[2020-01-01 08:00:00, C] = -0.0
+ "Speicher(Q_th_load)|on_hours_total": |-
+ Constraint `Speicher(Q_th_load)|on_hours_total`
+ [scenario: 3]:
+ --------------------------------------------------------------
+ [A]: +1 Speicher(Q_th_load)|on_hours_total[A] - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00, A] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00, A]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00, A] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00, A] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Speicher(Q_th_load)|on_hours_total[B] - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00, B] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00, B]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00, B] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00, B] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Speicher(Q_th_load)|on_hours_total[C] - 1 Speicher(Q_th_load)|on[2020-01-01 00:00:00, C] - 1 Speicher(Q_th_load)|on[2020-01-01 01:00:00, C]... -1 Speicher(Q_th_load)|on[2020-01-01 06:00:00, C] - 1 Speicher(Q_th_load)|on[2020-01-01 07:00:00, C] - 1 Speicher(Q_th_load)|on[2020-01-01 08:00:00, C] = -0.0
+ "Speicher(Q_th_load)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|ub`
+ [time: 9, scenario: 3]:
+ ---------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, A] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00, A] ≤ -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, B] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00, B] ≤ -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, C] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 00:00:00, C] ≤ -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, A] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00, A] ≤ -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, B] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00, B] ≤ -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, C] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 01:00:00, C] ≤ -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00, A] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 02:00:00, A] ≤ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, C] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 06:00:00, C] ≤ -0.0
+ [2020-01-01 07:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, A] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00, A] ≤ -0.0
+ [2020-01-01 07:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, B] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00, B] ≤ -0.0
+ [2020-01-01 07:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, C] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 07:00:00, C] ≤ -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, A] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00, A] ≤ -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, B] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00, B] ≤ -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, C] - 1e+04 Speicher(Q_th_load)|on[2020-01-01 08:00:00, C] ≤ -0.0
+ "Speicher(Q_th_load)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_load)|flow_rate|lb`
+ [time: 9, scenario: 3]:
+ ---------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, A] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00, A] ≥ -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, B] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00, B] ≥ -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, C] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 00:00:00, C] ≥ -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, A] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00, A] ≥ -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, B] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00, B] ≥ -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, C] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 01:00:00, C] ≥ -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00, A] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 02:00:00, A] ≥ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, C] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 06:00:00, C] ≥ -0.0
+ [2020-01-01 07:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, A] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00, A] ≥ -0.0
+ [2020-01-01 07:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, B] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00, B] ≥ -0.0
+ [2020-01-01 07:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, C] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 07:00:00, C] ≥ -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, A] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00, A] ≥ -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, B] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00, B] ≥ -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, C] - 1e-05 Speicher(Q_th_load)|on[2020-01-01 08:00:00, C] ≥ -0.0
+ "Speicher(Q_th_load)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_load)|total_flow_hours`
+ [scenario: 3]:
+ ----------------------------------------------------------------
+ [A]: +1 Speicher(Q_th_load)|total_flow_hours[A] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, A] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, A]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, A] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, A] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Speicher(Q_th_load)|total_flow_hours[B] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, B] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, B]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, B] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, B] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Speicher(Q_th_load)|total_flow_hours[C] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, C] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, C]... -1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, C] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, C] - 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Speicher(Q_th_unload)|on_hours_total": |-
+ Constraint `Speicher(Q_th_unload)|on_hours_total`
+ [scenario: 3]:
+ ----------------------------------------------------------------
+ [A]: +1 Speicher(Q_th_unload)|on_hours_total[A] - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, A] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, A]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00, A] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, A] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Speicher(Q_th_unload)|on_hours_total[B] - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, B] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, B]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00, B] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, B] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Speicher(Q_th_unload)|on_hours_total[C] - 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, C] - 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, C]... -1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00, C] - 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, C] - 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, C] = -0.0
+ "Speicher(Q_th_unload)|flow_rate|ub": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|ub`
+ [time: 9, scenario: 3]:
+ -----------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, A] ≤ -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, B] ≤ -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, C] ≤ -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, A] ≤ -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, B] ≤ -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, C] ≤ -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00, A] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 02:00:00, A] ≤ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 06:00:00, C] ≤ -0.0
+ [2020-01-01 07:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, A] ≤ -0.0
+ [2020-01-01 07:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, B] ≤ -0.0
+ [2020-01-01 07:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, C] ≤ -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, A] ≤ -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, B] ≤ -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] - 1e+04 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, C] ≤ -0.0
+ "Speicher(Q_th_unload)|flow_rate|lb": |-
+ Constraint `Speicher(Q_th_unload)|flow_rate|lb`
+ [time: 9, scenario: 3]:
+ -----------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, A] ≥ -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, B] ≥ -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, C] ≥ -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, A] ≥ -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, B] ≥ -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, C] ≥ -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00, A] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 02:00:00, A] ≥ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 06:00:00, C] ≥ -0.0
+ [2020-01-01 07:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, A] ≥ -0.0
+ [2020-01-01 07:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, B] ≥ -0.0
+ [2020-01-01 07:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, C] ≥ -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, A] ≥ -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, B] ≥ -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] - 1e-05 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, C] ≥ -0.0
+ "Speicher(Q_th_unload)|total_flow_hours": |-
+ Constraint `Speicher(Q_th_unload)|total_flow_hours`
+ [scenario: 3]:
+ ------------------------------------------------------------------
+ [A]: +1 Speicher(Q_th_unload)|total_flow_hours[A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Speicher(Q_th_unload)|total_flow_hours[B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Speicher(Q_th_unload)|total_flow_hours[C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C]... -1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Speicher|prevent_simultaneous_use": |-
+ Constraint `Speicher|prevent_simultaneous_use`
+ [time: 9, scenario: 3]:
+ ----------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00, A] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, A] ≤ 1.0
+ [2020-01-01 00:00:00, B]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00, B] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, B] ≤ 1.0
+ [2020-01-01 00:00:00, C]: +1 Speicher(Q_th_load)|on[2020-01-01 00:00:00, C] + 1 Speicher(Q_th_unload)|on[2020-01-01 00:00:00, C] ≤ 1.0
+ [2020-01-01 01:00:00, A]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00, A] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, A] ≤ 1.0
+ [2020-01-01 01:00:00, B]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00, B] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, B] ≤ 1.0
+ [2020-01-01 01:00:00, C]: +1 Speicher(Q_th_load)|on[2020-01-01 01:00:00, C] + 1 Speicher(Q_th_unload)|on[2020-01-01 01:00:00, C] ≤ 1.0
+ [2020-01-01 02:00:00, A]: +1 Speicher(Q_th_load)|on[2020-01-01 02:00:00, A] + 1 Speicher(Q_th_unload)|on[2020-01-01 02:00:00, A] ≤ 1.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher(Q_th_load)|on[2020-01-01 06:00:00, C] + 1 Speicher(Q_th_unload)|on[2020-01-01 06:00:00, C] ≤ 1.0
+ [2020-01-01 07:00:00, A]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00, A] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, A] ≤ 1.0
+ [2020-01-01 07:00:00, B]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00, B] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, B] ≤ 1.0
+ [2020-01-01 07:00:00, C]: +1 Speicher(Q_th_load)|on[2020-01-01 07:00:00, C] + 1 Speicher(Q_th_unload)|on[2020-01-01 07:00:00, C] ≤ 1.0
+ [2020-01-01 08:00:00, A]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00, A] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, A] ≤ 1.0
+ [2020-01-01 08:00:00, B]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00, B] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, B] ≤ 1.0
+ [2020-01-01 08:00:00, C]: +1 Speicher(Q_th_load)|on[2020-01-01 08:00:00, C] + 1 Speicher(Q_th_unload)|on[2020-01-01 08:00:00, C] ≤ 1.0
+ "Speicher|netto_discharge": |-
+ Constraint `Speicher|netto_discharge`
+ [time: 9, scenario: 3]:
+ -------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher|netto_discharge[2020-01-01 00:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher|netto_discharge[2020-01-01 00:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher|netto_discharge[2020-01-01 00:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher|netto_discharge[2020-01-01 01:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher|netto_discharge[2020-01-01 01:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher|netto_discharge[2020-01-01 01:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher|netto_discharge[2020-01-01 02:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00, A] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher|netto_discharge[2020-01-01 06:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 Speicher|netto_discharge[2020-01-01 07:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 Speicher|netto_discharge[2020-01-01 07:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 Speicher|netto_discharge[2020-01-01 07:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher|netto_discharge[2020-01-01 08:00:00, A] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher|netto_discharge[2020-01-01 08:00:00, B] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher|netto_discharge[2020-01-01 08:00:00, C] - 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] + 1 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Speicher|charge_state": |-
+ Constraint `Speicher|charge_state`
+ [time: 9, scenario: 3]:
+ ----------------------------------------------------------
+ [2020-01-01 01:00:00, A]: +1 Speicher|charge_state[2020-01-01 01:00:00, A] - 0.92 Speicher|charge_state[2020-01-01 00:00:00, A] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, A] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher|charge_state[2020-01-01 01:00:00, B] - 0.92 Speicher|charge_state[2020-01-01 00:00:00, B] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, B] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher|charge_state[2020-01-01 01:00:00, C] - 0.92 Speicher|charge_state[2020-01-01 00:00:00, C] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 00:00:00, C] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher|charge_state[2020-01-01 02:00:00, A] - 0.92 Speicher|charge_state[2020-01-01 01:00:00, A] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, A] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 02:00:00, B]: +1 Speicher|charge_state[2020-01-01 02:00:00, B] - 0.92 Speicher|charge_state[2020-01-01 01:00:00, B] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, B] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 02:00:00, C]: +1 Speicher|charge_state[2020-01-01 02:00:00, C] - 0.92 Speicher|charge_state[2020-01-01 01:00:00, C] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 01:00:00, C] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 03:00:00, A]: +1 Speicher|charge_state[2020-01-01 03:00:00, A] - 0.92 Speicher|charge_state[2020-01-01 02:00:00, A] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 02:00:00, A] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 07:00:00, C]: +1 Speicher|charge_state[2020-01-01 07:00:00, C] - 0.92 Speicher|charge_state[2020-01-01 06:00:00, C] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 06:00:00, C] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher|charge_state[2020-01-01 08:00:00, A] - 0.92 Speicher|charge_state[2020-01-01 07:00:00, A] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, A] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher|charge_state[2020-01-01 08:00:00, B] - 0.92 Speicher|charge_state[2020-01-01 07:00:00, B] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, B] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher|charge_state[2020-01-01 08:00:00, C] - 0.92 Speicher|charge_state[2020-01-01 07:00:00, C] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 07:00:00, C] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 09:00:00, A]: +1 Speicher|charge_state[2020-01-01 09:00:00, A] - 0.92 Speicher|charge_state[2020-01-01 08:00:00, A] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, A] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 09:00:00, B]: +1 Speicher|charge_state[2020-01-01 09:00:00, B] - 0.92 Speicher|charge_state[2020-01-01 08:00:00, B] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, B] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 09:00:00, C]: +1 Speicher|charge_state[2020-01-01 09:00:00, C] - 0.92 Speicher|charge_state[2020-01-01 08:00:00, C] - 0.9 Speicher(Q_th_load)|flow_rate[2020-01-01 08:00:00, C] + 1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Speicher->costs(periodic)": |-
+ Constraint `Speicher->costs(periodic)`
+ [scenario: 3]:
+ -----------------------------------------------------
+ [A]: +1 Speicher->costs(periodic)[A] = 20.0
+ [B]: +1 Speicher->costs(periodic)[B] = 20.0
+ [C]: +1 Speicher->costs(periodic)[C] = 20.0
+ "Speicher|charge_state|ub": |-
+ Constraint `Speicher|charge_state|ub`
+ [time: 10, scenario: 3]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher|charge_state[2020-01-01 00:00:00, A] - 0.8 Speicher|size[A] ≤ -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher|charge_state[2020-01-01 00:00:00, B] - 0.8 Speicher|size[B] ≤ -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher|charge_state[2020-01-01 00:00:00, C] - 0.8 Speicher|size[C] ≤ -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher|charge_state[2020-01-01 01:00:00, A] - 0.7 Speicher|size[A] ≤ -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher|charge_state[2020-01-01 01:00:00, B] - 0.7 Speicher|size[B] ≤ -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher|charge_state[2020-01-01 01:00:00, C] - 0.7 Speicher|size[C] ≤ -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher|charge_state[2020-01-01 02:00:00, A] - 0.8 Speicher|size[A] ≤ -0.0
+ ...
+ [2020-01-01 07:00:00, C]: +1 Speicher|charge_state[2020-01-01 07:00:00, C] - 0.8 Speicher|size[C] ≤ -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher|charge_state[2020-01-01 08:00:00, A] - 0.8 Speicher|size[A] ≤ -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher|charge_state[2020-01-01 08:00:00, B] - 0.8 Speicher|size[B] ≤ -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher|charge_state[2020-01-01 08:00:00, C] - 0.8 Speicher|size[C] ≤ -0.0
+ [2020-01-01 09:00:00, A]: +1 Speicher|charge_state[2020-01-01 09:00:00, A] - 0.8 Speicher|size[A] ≤ -0.0
+ [2020-01-01 09:00:00, B]: +1 Speicher|charge_state[2020-01-01 09:00:00, B] - 0.8 Speicher|size[B] ≤ -0.0
+ [2020-01-01 09:00:00, C]: +1 Speicher|charge_state[2020-01-01 09:00:00, C] - 0.8 Speicher|size[C] ≤ -0.0
+ "Speicher|charge_state|lb": |-
+ Constraint `Speicher|charge_state|lb`
+ [time: 10, scenario: 3]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher|charge_state[2020-01-01 00:00:00, A] ≥ -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher|charge_state[2020-01-01 00:00:00, B] ≥ -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher|charge_state[2020-01-01 00:00:00, C] ≥ -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher|charge_state[2020-01-01 01:00:00, A] ≥ -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher|charge_state[2020-01-01 01:00:00, B] ≥ -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher|charge_state[2020-01-01 01:00:00, C] ≥ -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher|charge_state[2020-01-01 02:00:00, A] ≥ -0.0
+ ...
+ [2020-01-01 07:00:00, C]: +1 Speicher|charge_state[2020-01-01 07:00:00, C] ≥ -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher|charge_state[2020-01-01 08:00:00, A] ≥ -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher|charge_state[2020-01-01 08:00:00, B] ≥ -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher|charge_state[2020-01-01 08:00:00, C] ≥ -0.0
+ [2020-01-01 09:00:00, A]: +1 Speicher|charge_state[2020-01-01 09:00:00, A] ≥ -0.0
+ [2020-01-01 09:00:00, B]: +1 Speicher|charge_state[2020-01-01 09:00:00, B] ≥ -0.0
+ [2020-01-01 09:00:00, C]: +1 Speicher|charge_state[2020-01-01 09:00:00, C] ≥ -0.0
+ "Speicher|initial_charge_state": |-
+ Constraint `Speicher|initial_charge_state`
+ [scenario: 3]:
+ ---------------------------------------------------------
+ [A]: +1 Speicher|charge_state[2020-01-01 00:00:00, A] = -0.0
+ [B]: +1 Speicher|charge_state[2020-01-01 00:00:00, B] = -0.0
+ [C]: +1 Speicher|charge_state[2020-01-01 00:00:00, C] = -0.0
+ "Boiler(Q_fu)|total_flow_hours": |-
+ Constraint `Boiler(Q_fu)|total_flow_hours`
+ [scenario: 3]:
+ ---------------------------------------------------------
+ [A]: +1 Boiler(Q_fu)|total_flow_hours[A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, A]... -1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Boiler(Q_fu)|total_flow_hours[B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, B]... -1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Boiler(Q_fu)|total_flow_hours[C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, C]... -1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Boiler(Q_th)|on_hours_total": |-
+ Constraint `Boiler(Q_th)|on_hours_total`
+ [scenario: 3]:
+ -------------------------------------------------------
+ [A]: +1 Boiler(Q_th)|on_hours_total[A] - 1 Boiler(Q_th)|on[2020-01-01 00:00:00, A] - 1 Boiler(Q_th)|on[2020-01-01 01:00:00, A]... -1 Boiler(Q_th)|on[2020-01-01 06:00:00, A] - 1 Boiler(Q_th)|on[2020-01-01 07:00:00, A] - 1 Boiler(Q_th)|on[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Boiler(Q_th)|on_hours_total[B] - 1 Boiler(Q_th)|on[2020-01-01 00:00:00, B] - 1 Boiler(Q_th)|on[2020-01-01 01:00:00, B]... -1 Boiler(Q_th)|on[2020-01-01 06:00:00, B] - 1 Boiler(Q_th)|on[2020-01-01 07:00:00, B] - 1 Boiler(Q_th)|on[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Boiler(Q_th)|on_hours_total[C] - 1 Boiler(Q_th)|on[2020-01-01 00:00:00, C] - 1 Boiler(Q_th)|on[2020-01-01 01:00:00, C]... -1 Boiler(Q_th)|on[2020-01-01 06:00:00, C] - 1 Boiler(Q_th)|on[2020-01-01 07:00:00, C] - 1 Boiler(Q_th)|on[2020-01-01 08:00:00, C] = -0.0
+ "Boiler(Q_th)|flow_rate|ub": |-
+ Constraint `Boiler(Q_th)|flow_rate|ub`
+ [time: 9, scenario: 3]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, A] - 50 Boiler(Q_th)|on[2020-01-01 00:00:00, A] ≤ -0.0
+ [2020-01-01 00:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, B] - 50 Boiler(Q_th)|on[2020-01-01 00:00:00, B] ≤ -0.0
+ [2020-01-01 00:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, C] - 50 Boiler(Q_th)|on[2020-01-01 00:00:00, C] ≤ -0.0
+ [2020-01-01 01:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, A] - 50 Boiler(Q_th)|on[2020-01-01 01:00:00, A] ≤ -0.0
+ [2020-01-01 01:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, B] - 50 Boiler(Q_th)|on[2020-01-01 01:00:00, B] ≤ -0.0
+ [2020-01-01 01:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, C] - 50 Boiler(Q_th)|on[2020-01-01 01:00:00, C] ≤ -0.0
+ [2020-01-01 02:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, A] - 50 Boiler(Q_th)|on[2020-01-01 02:00:00, A] ≤ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, C] - 50 Boiler(Q_th)|on[2020-01-01 06:00:00, C] ≤ -0.0
+ [2020-01-01 07:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, A] - 50 Boiler(Q_th)|on[2020-01-01 07:00:00, A] ≤ -0.0
+ [2020-01-01 07:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, B] - 50 Boiler(Q_th)|on[2020-01-01 07:00:00, B] ≤ -0.0
+ [2020-01-01 07:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, C] - 50 Boiler(Q_th)|on[2020-01-01 07:00:00, C] ≤ -0.0
+ [2020-01-01 08:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, A] - 50 Boiler(Q_th)|on[2020-01-01 08:00:00, A] ≤ -0.0
+ [2020-01-01 08:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, B] - 50 Boiler(Q_th)|on[2020-01-01 08:00:00, B] ≤ -0.0
+ [2020-01-01 08:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, C] - 50 Boiler(Q_th)|on[2020-01-01 08:00:00, C] ≤ -0.0
+ "Boiler(Q_th)|flow_rate|lb": |-
+ Constraint `Boiler(Q_th)|flow_rate|lb`
+ [time: 9, scenario: 3]:
+ --------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, A] - 5 Boiler(Q_th)|on[2020-01-01 00:00:00, A] ≥ -0.0
+ [2020-01-01 00:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, B] - 5 Boiler(Q_th)|on[2020-01-01 00:00:00, B] ≥ -0.0
+ [2020-01-01 00:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, C] - 5 Boiler(Q_th)|on[2020-01-01 00:00:00, C] ≥ -0.0
+ [2020-01-01 01:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, A] - 5 Boiler(Q_th)|on[2020-01-01 01:00:00, A] ≥ -0.0
+ [2020-01-01 01:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, B] - 5 Boiler(Q_th)|on[2020-01-01 01:00:00, B] ≥ -0.0
+ [2020-01-01 01:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, C] - 5 Boiler(Q_th)|on[2020-01-01 01:00:00, C] ≥ -0.0
+ [2020-01-01 02:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, A] - 5 Boiler(Q_th)|on[2020-01-01 02:00:00, A] ≥ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, C] - 5 Boiler(Q_th)|on[2020-01-01 06:00:00, C] ≥ -0.0
+ [2020-01-01 07:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, A] - 5 Boiler(Q_th)|on[2020-01-01 07:00:00, A] ≥ -0.0
+ [2020-01-01 07:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, B] - 5 Boiler(Q_th)|on[2020-01-01 07:00:00, B] ≥ -0.0
+ [2020-01-01 07:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, C] - 5 Boiler(Q_th)|on[2020-01-01 07:00:00, C] ≥ -0.0
+ [2020-01-01 08:00:00, A]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, A] - 5 Boiler(Q_th)|on[2020-01-01 08:00:00, A] ≥ -0.0
+ [2020-01-01 08:00:00, B]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, B] - 5 Boiler(Q_th)|on[2020-01-01 08:00:00, B] ≥ -0.0
+ [2020-01-01 08:00:00, C]: +1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, C] - 5 Boiler(Q_th)|on[2020-01-01 08:00:00, C] ≥ -0.0
+ "Boiler(Q_th)|total_flow_hours": |-
+ Constraint `Boiler(Q_th)|total_flow_hours`
+ [scenario: 3]:
+ ---------------------------------------------------------
+ [A]: +1 Boiler(Q_th)|total_flow_hours[A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, A]... -1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Boiler(Q_th)|total_flow_hours[B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, B]... -1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Boiler(Q_th)|total_flow_hours[C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, C]... -1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Boiler|conversion_0": |-
+ Constraint `Boiler|conversion_0`
+ [time: 9, scenario: 3]:
+ --------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, A] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, B] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +0.5 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, C] - 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Wärmelast(Q_th_Last)|total_flow_hours": |-
+ Constraint `Wärmelast(Q_th_Last)|total_flow_hours`
+ [scenario: 3]:
+ -----------------------------------------------------------------
+ [A]: +1 Wärmelast(Q_th_Last)|total_flow_hours[A] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, A] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, A]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00, A] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, A] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Wärmelast(Q_th_Last)|total_flow_hours[B] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, B] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, B]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00, B] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, B] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Wärmelast(Q_th_Last)|total_flow_hours[C] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, C] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, C]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00, C] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, C] - 1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Gastarif(Q_Gas)|total_flow_hours": |-
+ Constraint `Gastarif(Q_Gas)|total_flow_hours`
+ [scenario: 3]:
+ ------------------------------------------------------------
+ [A]: +1 Gastarif(Q_Gas)|total_flow_hours[A] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, A] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, A]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, A] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, A] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Gastarif(Q_Gas)|total_flow_hours[B] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, B] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, B]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, B] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, B] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Gastarif(Q_Gas)|total_flow_hours[C] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, C] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, C]... -1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, C] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, C] - 1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Gastarif(Q_Gas)->costs(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->costs(temporal)`
+ [time: 9, scenario: 3]:
+ ---------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, A] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, B] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 00:00:00, C] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, A] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, B] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 01:00:00, C] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 02:00:00, A] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 06:00:00, C] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, A] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, B] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 07:00:00, C] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, A] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, B] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Gastarif(Q_Gas)->costs(temporal)[2020-01-01 08:00:00, C] - 0.04 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Gastarif(Q_Gas)->CO2(temporal)": |-
+ Constraint `Gastarif(Q_Gas)->CO2(temporal)`
+ [time: 9, scenario: 3]:
+ -------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, A] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, B] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 00:00:00, C] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, A] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, B] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 01:00:00, C] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 02:00:00, A] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 06:00:00, C] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, A] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, B] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 07:00:00, C] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, A] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, B] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Gastarif(Q_Gas)->CO2(temporal)[2020-01-01 08:00:00, C] - 0.3 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Einspeisung(P_el)|total_flow_hours": |-
+ Constraint `Einspeisung(P_el)|total_flow_hours`
+ [scenario: 3]:
+ --------------------------------------------------------------
+ [A]: +1 Einspeisung(P_el)|total_flow_hours[A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, A]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 Einspeisung(P_el)|total_flow_hours[B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, B]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 Einspeisung(P_el)|total_flow_hours[C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, C]... -1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Einspeisung(P_el)->costs(temporal)": |-
+ Constraint `Einspeisung(P_el)->costs(temporal)`
+ [time: 9, scenario: 3]:
+ -----------------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, A] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, B] + 0.1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 00:00:00, C] + 0.15 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, A] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, B] + 0.1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 01:00:00, C] + 0.15 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 02:00:00, A] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 06:00:00, C] + 0.15 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, A] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, B] + 0.1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 07:00:00, C] + 0.15 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, A] + 0.08 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, B] + 0.1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Einspeisung(P_el)->costs(temporal)[2020-01-01 08:00:00, C] + 0.15 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "CHP_unit(Q_fu)|total_flow_hours": |-
+ Constraint `CHP_unit(Q_fu)|total_flow_hours`
+ [scenario: 3]:
+ -----------------------------------------------------------
+ [A]: +1 CHP_unit(Q_fu)|total_flow_hours[A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, A]... -1 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 CHP_unit(Q_fu)|total_flow_hours[B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, B]... -1 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 CHP_unit(Q_fu)|total_flow_hours[C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, C]... -1 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "CHP_unit(Q_th)|total_flow_hours": |-
+ Constraint `CHP_unit(Q_th)|total_flow_hours`
+ [scenario: 3]:
+ -----------------------------------------------------------
+ [A]: +1 CHP_unit(Q_th)|total_flow_hours[A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, A]... -1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 CHP_unit(Q_th)|total_flow_hours[B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, B]... -1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 CHP_unit(Q_th)|total_flow_hours[C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, C]... -1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "CHP_unit(P_el)|on_hours_total": |-
+ Constraint `CHP_unit(P_el)|on_hours_total`
+ [scenario: 3]:
+ ---------------------------------------------------------
+ [A]: +1 CHP_unit(P_el)|on_hours_total[A] - 1 CHP_unit(P_el)|on[2020-01-01 00:00:00, A] - 1 CHP_unit(P_el)|on[2020-01-01 01:00:00, A]... -1 CHP_unit(P_el)|on[2020-01-01 06:00:00, A] - 1 CHP_unit(P_el)|on[2020-01-01 07:00:00, A] - 1 CHP_unit(P_el)|on[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 CHP_unit(P_el)|on_hours_total[B] - 1 CHP_unit(P_el)|on[2020-01-01 00:00:00, B] - 1 CHP_unit(P_el)|on[2020-01-01 01:00:00, B]... -1 CHP_unit(P_el)|on[2020-01-01 06:00:00, B] - 1 CHP_unit(P_el)|on[2020-01-01 07:00:00, B] - 1 CHP_unit(P_el)|on[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 CHP_unit(P_el)|on_hours_total[C] - 1 CHP_unit(P_el)|on[2020-01-01 00:00:00, C] - 1 CHP_unit(P_el)|on[2020-01-01 01:00:00, C]... -1 CHP_unit(P_el)|on[2020-01-01 06:00:00, C] - 1 CHP_unit(P_el)|on[2020-01-01 07:00:00, C] - 1 CHP_unit(P_el)|on[2020-01-01 08:00:00, C] = -0.0
+ "CHP_unit(P_el)|flow_rate|ub": |-
+ Constraint `CHP_unit(P_el)|flow_rate|ub`
+ [time: 9, scenario: 3]:
+ ----------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, A] - 60 CHP_unit(P_el)|on[2020-01-01 00:00:00, A] ≤ -0.0
+ [2020-01-01 00:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, B] - 60 CHP_unit(P_el)|on[2020-01-01 00:00:00, B] ≤ -0.0
+ [2020-01-01 00:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, C] - 60 CHP_unit(P_el)|on[2020-01-01 00:00:00, C] ≤ -0.0
+ [2020-01-01 01:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, A] - 60 CHP_unit(P_el)|on[2020-01-01 01:00:00, A] ≤ -0.0
+ [2020-01-01 01:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, B] - 60 CHP_unit(P_el)|on[2020-01-01 01:00:00, B] ≤ -0.0
+ [2020-01-01 01:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, C] - 60 CHP_unit(P_el)|on[2020-01-01 01:00:00, C] ≤ -0.0
+ [2020-01-01 02:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00, A] - 60 CHP_unit(P_el)|on[2020-01-01 02:00:00, A] ≤ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, C] - 60 CHP_unit(P_el)|on[2020-01-01 06:00:00, C] ≤ -0.0
+ [2020-01-01 07:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, A] - 60 CHP_unit(P_el)|on[2020-01-01 07:00:00, A] ≤ -0.0
+ [2020-01-01 07:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, B] - 60 CHP_unit(P_el)|on[2020-01-01 07:00:00, B] ≤ -0.0
+ [2020-01-01 07:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, C] - 60 CHP_unit(P_el)|on[2020-01-01 07:00:00, C] ≤ -0.0
+ [2020-01-01 08:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, A] - 60 CHP_unit(P_el)|on[2020-01-01 08:00:00, A] ≤ -0.0
+ [2020-01-01 08:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, B] - 60 CHP_unit(P_el)|on[2020-01-01 08:00:00, B] ≤ -0.0
+ [2020-01-01 08:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, C] - 60 CHP_unit(P_el)|on[2020-01-01 08:00:00, C] ≤ -0.0
+ "CHP_unit(P_el)|flow_rate|lb": |-
+ Constraint `CHP_unit(P_el)|flow_rate|lb`
+ [time: 9, scenario: 3]:
+ ----------------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, A] - 5 CHP_unit(P_el)|on[2020-01-01 00:00:00, A] ≥ -0.0
+ [2020-01-01 00:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, B] - 5 CHP_unit(P_el)|on[2020-01-01 00:00:00, B] ≥ -0.0
+ [2020-01-01 00:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, C] - 5 CHP_unit(P_el)|on[2020-01-01 00:00:00, C] ≥ -0.0
+ [2020-01-01 01:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, A] - 5 CHP_unit(P_el)|on[2020-01-01 01:00:00, A] ≥ -0.0
+ [2020-01-01 01:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, B] - 5 CHP_unit(P_el)|on[2020-01-01 01:00:00, B] ≥ -0.0
+ [2020-01-01 01:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, C] - 5 CHP_unit(P_el)|on[2020-01-01 01:00:00, C] ≥ -0.0
+ [2020-01-01 02:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00, A] - 5 CHP_unit(P_el)|on[2020-01-01 02:00:00, A] ≥ -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, C] - 5 CHP_unit(P_el)|on[2020-01-01 06:00:00, C] ≥ -0.0
+ [2020-01-01 07:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, A] - 5 CHP_unit(P_el)|on[2020-01-01 07:00:00, A] ≥ -0.0
+ [2020-01-01 07:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, B] - 5 CHP_unit(P_el)|on[2020-01-01 07:00:00, B] ≥ -0.0
+ [2020-01-01 07:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, C] - 5 CHP_unit(P_el)|on[2020-01-01 07:00:00, C] ≥ -0.0
+ [2020-01-01 08:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, A] - 5 CHP_unit(P_el)|on[2020-01-01 08:00:00, A] ≥ -0.0
+ [2020-01-01 08:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, B] - 5 CHP_unit(P_el)|on[2020-01-01 08:00:00, B] ≥ -0.0
+ [2020-01-01 08:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, C] - 5 CHP_unit(P_el)|on[2020-01-01 08:00:00, C] ≥ -0.0
+ "CHP_unit(P_el)|total_flow_hours": |-
+ Constraint `CHP_unit(P_el)|total_flow_hours`
+ [scenario: 3]:
+ -----------------------------------------------------------
+ [A]: +1 CHP_unit(P_el)|total_flow_hours[A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, A]... -1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [B]: +1 CHP_unit(P_el)|total_flow_hours[B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, B]... -1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [C]: +1 CHP_unit(P_el)|total_flow_hours[C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, C]... -1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "CHP_unit|conversion_0": |-
+ Constraint `CHP_unit|conversion_0`
+ [time: 9, scenario: 3]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, A] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, B] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +0.5 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, C] - 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "CHP_unit|conversion_1": |-
+ Constraint `CHP_unit|conversion_1`
+ [time: 9, scenario: 3]:
+ ----------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, A] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, B] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +0.4 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, C] - 1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, C] = -0.0
+ "Strom|balance": |-
+ Constraint `Strom|balance`
+ [time: 9, scenario: 3]:
+ --------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, A] + 1 Strom|excess_input[2020-01-01 00:00:00, A] - 1 Strom|excess_output[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, B] + 1 Strom|excess_input[2020-01-01 00:00:00, B] - 1 Strom|excess_output[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 00:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 00:00:00, C] + 1 Strom|excess_input[2020-01-01 00:00:00, C] - 1 Strom|excess_output[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, A] + 1 Strom|excess_input[2020-01-01 01:00:00, A] - 1 Strom|excess_output[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, B] + 1 Strom|excess_input[2020-01-01 01:00:00, B] - 1 Strom|excess_output[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 01:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 01:00:00, C] + 1 Strom|excess_input[2020-01-01 01:00:00, C] - 1 Strom|excess_output[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 02:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 02:00:00, A] + 1 Strom|excess_input[2020-01-01 02:00:00, A] - 1 Strom|excess_output[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 06:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 06:00:00, C] + 1 Strom|excess_input[2020-01-01 06:00:00, C] - 1 Strom|excess_output[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, A] + 1 Strom|excess_input[2020-01-01 07:00:00, A] - 1 Strom|excess_output[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, B] + 1 Strom|excess_input[2020-01-01 07:00:00, B] - 1 Strom|excess_output[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 07:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 07:00:00, C] + 1 Strom|excess_input[2020-01-01 07:00:00, C] - 1 Strom|excess_output[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, A] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, A] + 1 Strom|excess_input[2020-01-01 08:00:00, A] - 1 Strom|excess_output[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, B] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, B] + 1 Strom|excess_input[2020-01-01 08:00:00, B] - 1 Strom|excess_output[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 CHP_unit(P_el)|flow_rate[2020-01-01 08:00:00, C] - 1 Einspeisung(P_el)|flow_rate[2020-01-01 08:00:00, C] + 1 Strom|excess_input[2020-01-01 08:00:00, C] - 1 Strom|excess_output[2020-01-01 08:00:00, C] = -0.0
+ "Strom->Penalty": |-
+ Constraint `Strom->Penalty`
+ ---------------------------
+ +1 Strom->Penalty - 1e+05 Strom|excess_input[2020-01-01 00:00:00, A] - 1e+05 Strom|excess_input[2020-01-01 00:00:00, B]... -1e+05 Strom|excess_output[2020-01-01 08:00:00, A] - 1e+05 Strom|excess_output[2020-01-01 08:00:00, B] - 1e+05 Strom|excess_output[2020-01-01 08:00:00, C] = -0.0
+ "Fernwärme|balance": |-
+ Constraint `Fernwärme|balance`
+ [time: 9, scenario: 3]:
+ ------------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, A] + 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, A] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, A]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, A] + 1 Fernwärme|excess_input[2020-01-01 00:00:00, A] - 1 Fernwärme|excess_output[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, B] + 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, B] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, B]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, B] + 1 Fernwärme|excess_input[2020-01-01 00:00:00, B] - 1 Fernwärme|excess_output[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 00:00:00, C] + 1 Boiler(Q_th)|flow_rate[2020-01-01 00:00:00, C] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 00:00:00, C]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 00:00:00, C] + 1 Fernwärme|excess_input[2020-01-01 00:00:00, C] - 1 Fernwärme|excess_output[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, A] + 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, A] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, A]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, A] + 1 Fernwärme|excess_input[2020-01-01 01:00:00, A] - 1 Fernwärme|excess_output[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, B] + 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, B] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, B]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, B] + 1 Fernwärme|excess_input[2020-01-01 01:00:00, B] - 1 Fernwärme|excess_output[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 01:00:00, C] + 1 Boiler(Q_th)|flow_rate[2020-01-01 01:00:00, C] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 01:00:00, C]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 01:00:00, C] + 1 Fernwärme|excess_input[2020-01-01 01:00:00, C] - 1 Fernwärme|excess_output[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 02:00:00, A] + 1 Boiler(Q_th)|flow_rate[2020-01-01 02:00:00, A] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 02:00:00, A]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 02:00:00, A] + 1 Fernwärme|excess_input[2020-01-01 02:00:00, A] - 1 Fernwärme|excess_output[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 06:00:00, C] + 1 Boiler(Q_th)|flow_rate[2020-01-01 06:00:00, C] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 06:00:00, C]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 06:00:00, C] + 1 Fernwärme|excess_input[2020-01-01 06:00:00, C] - 1 Fernwärme|excess_output[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, A] + 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, A] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, A]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, A] + 1 Fernwärme|excess_input[2020-01-01 07:00:00, A] - 1 Fernwärme|excess_output[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, B] + 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, B] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, B]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, B] + 1 Fernwärme|excess_input[2020-01-01 07:00:00, B] - 1 Fernwärme|excess_output[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 07:00:00, C] + 1 Boiler(Q_th)|flow_rate[2020-01-01 07:00:00, C] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 07:00:00, C]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 07:00:00, C] + 1 Fernwärme|excess_input[2020-01-01 07:00:00, C] - 1 Fernwärme|excess_output[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, A] + 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, A] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, A]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, A] + 1 Fernwärme|excess_input[2020-01-01 08:00:00, A] - 1 Fernwärme|excess_output[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, B] + 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, B] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, B]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, B] + 1 Fernwärme|excess_input[2020-01-01 08:00:00, B] - 1 Fernwärme|excess_output[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Speicher(Q_th_unload)|flow_rate[2020-01-01 08:00:00, C] + 1 Boiler(Q_th)|flow_rate[2020-01-01 08:00:00, C] + 1 CHP_unit(Q_th)|flow_rate[2020-01-01 08:00:00, C]... -1 Wärmelast(Q_th_Last)|flow_rate[2020-01-01 08:00:00, C] + 1 Fernwärme|excess_input[2020-01-01 08:00:00, C] - 1 Fernwärme|excess_output[2020-01-01 08:00:00, C] = -0.0
+ "Fernwärme->Penalty": |-
+ Constraint `Fernwärme->Penalty`
+ -------------------------------
+ +1 Fernwärme->Penalty - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00, A] - 1e+05 Fernwärme|excess_input[2020-01-01 00:00:00, B]... -1e+05 Fernwärme|excess_output[2020-01-01 08:00:00, A] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00, B] - 1e+05 Fernwärme|excess_output[2020-01-01 08:00:00, C] = -0.0
+ "Gas|balance": |-
+ Constraint `Gas|balance`
+ [time: 9, scenario: 3]:
+ ------------------------------------------------
+ [2020-01-01 00:00:00, A]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, A] + 1 Gas|excess_input[2020-01-01 00:00:00, A] - 1 Gas|excess_output[2020-01-01 00:00:00, A] = -0.0
+ [2020-01-01 00:00:00, B]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, B] + 1 Gas|excess_input[2020-01-01 00:00:00, B] - 1 Gas|excess_output[2020-01-01 00:00:00, B] = -0.0
+ [2020-01-01 00:00:00, C]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 00:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 00:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 00:00:00, C] + 1 Gas|excess_input[2020-01-01 00:00:00, C] - 1 Gas|excess_output[2020-01-01 00:00:00, C] = -0.0
+ [2020-01-01 01:00:00, A]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, A] + 1 Gas|excess_input[2020-01-01 01:00:00, A] - 1 Gas|excess_output[2020-01-01 01:00:00, A] = -0.0
+ [2020-01-01 01:00:00, B]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, B] + 1 Gas|excess_input[2020-01-01 01:00:00, B] - 1 Gas|excess_output[2020-01-01 01:00:00, B] = -0.0
+ [2020-01-01 01:00:00, C]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 01:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 01:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 01:00:00, C] + 1 Gas|excess_input[2020-01-01 01:00:00, C] - 1 Gas|excess_output[2020-01-01 01:00:00, C] = -0.0
+ [2020-01-01 02:00:00, A]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 02:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 02:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 02:00:00, A] + 1 Gas|excess_input[2020-01-01 02:00:00, A] - 1 Gas|excess_output[2020-01-01 02:00:00, A] = -0.0
+ ...
+ [2020-01-01 06:00:00, C]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 06:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 06:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 06:00:00, C] + 1 Gas|excess_input[2020-01-01 06:00:00, C] - 1 Gas|excess_output[2020-01-01 06:00:00, C] = -0.0
+ [2020-01-01 07:00:00, A]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, A] + 1 Gas|excess_input[2020-01-01 07:00:00, A] - 1 Gas|excess_output[2020-01-01 07:00:00, A] = -0.0
+ [2020-01-01 07:00:00, B]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, B] + 1 Gas|excess_input[2020-01-01 07:00:00, B] - 1 Gas|excess_output[2020-01-01 07:00:00, B] = -0.0
+ [2020-01-01 07:00:00, C]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 07:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 07:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 07:00:00, C] + 1 Gas|excess_input[2020-01-01 07:00:00, C] - 1 Gas|excess_output[2020-01-01 07:00:00, C] = -0.0
+ [2020-01-01 08:00:00, A]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, A] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, A] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, A] + 1 Gas|excess_input[2020-01-01 08:00:00, A] - 1 Gas|excess_output[2020-01-01 08:00:00, A] = -0.0
+ [2020-01-01 08:00:00, B]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, B] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, B] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, B] + 1 Gas|excess_input[2020-01-01 08:00:00, B] - 1 Gas|excess_output[2020-01-01 08:00:00, B] = -0.0
+ [2020-01-01 08:00:00, C]: +1 Gastarif(Q_Gas)|flow_rate[2020-01-01 08:00:00, C] - 1 Boiler(Q_fu)|flow_rate[2020-01-01 08:00:00, C] - 1 CHP_unit(Q_fu)|flow_rate[2020-01-01 08:00:00, C] + 1 Gas|excess_input[2020-01-01 08:00:00, C] - 1 Gas|excess_output[2020-01-01 08:00:00, C] = -0.0
+ "Gas->Penalty": |-
+ Constraint `Gas->Penalty`
+ -------------------------
+ +1 Gas->Penalty - 1e+05 Gas|excess_input[2020-01-01 00:00:00, A] - 1e+05 Gas|excess_input[2020-01-01 00:00:00, B]... -1e+05 Gas|excess_output[2020-01-01 08:00:00, A] - 1e+05 Gas|excess_output[2020-01-01 08:00:00, B] - 1e+05 Gas|excess_output[2020-01-01 08:00:00, C] = -0.0
+ "Speicher|size|scenario_independent": |-
+ Constraint `Speicher|size|scenario_independent`
+ [scenario: 2]:
+ --------------------------------------------------------------
+ [B]: +1 Speicher|size[A] - 1 Speicher|size[B] = -0.0
+ [C]: +1 Speicher|size[A] - 1 Speicher|size[C] = -0.0
+binaries:
+ - "Speicher(Q_th_load)|on"
+ - "Speicher(Q_th_unload)|on"
+ - "Boiler(Q_th)|on"
+ - "CHP_unit(P_el)|on"
+integers: []
+continuous:
+ - costs(periodic)
+ - costs(temporal)
+ - "costs(temporal)|per_timestep"
+ - costs
+ - CO2(periodic)
+ - CO2(temporal)
+ - "CO2(temporal)|per_timestep"
+ - CO2
+ - Penalty
+ - "CO2(temporal)->costs(temporal)"
+ - "Speicher(Q_th_load)|flow_rate"
+ - "Speicher(Q_th_load)|on_hours_total"
+ - "Speicher(Q_th_load)|total_flow_hours"
+ - "Speicher(Q_th_unload)|flow_rate"
+ - "Speicher(Q_th_unload)|on_hours_total"
+ - "Speicher(Q_th_unload)|total_flow_hours"
+ - "Speicher|charge_state"
+ - "Speicher|netto_discharge"
+ - "Speicher|size"
+ - "Speicher->costs(periodic)"
+ - "Boiler(Q_fu)|flow_rate"
+ - "Boiler(Q_fu)|total_flow_hours"
+ - "Boiler(Q_th)|flow_rate"
+ - "Boiler(Q_th)|on_hours_total"
+ - "Boiler(Q_th)|total_flow_hours"
+ - "Wärmelast(Q_th_Last)|flow_rate"
+ - "Wärmelast(Q_th_Last)|total_flow_hours"
+ - "Gastarif(Q_Gas)|flow_rate"
+ - "Gastarif(Q_Gas)|total_flow_hours"
+ - "Gastarif(Q_Gas)->costs(temporal)"
+ - "Gastarif(Q_Gas)->CO2(temporal)"
+ - "Einspeisung(P_el)|flow_rate"
+ - "Einspeisung(P_el)|total_flow_hours"
+ - "Einspeisung(P_el)->costs(temporal)"
+ - "CHP_unit(Q_fu)|flow_rate"
+ - "CHP_unit(Q_fu)|total_flow_hours"
+ - "CHP_unit(Q_th)|flow_rate"
+ - "CHP_unit(Q_th)|total_flow_hours"
+ - "CHP_unit(P_el)|flow_rate"
+ - "CHP_unit(P_el)|on_hours_total"
+ - "CHP_unit(P_el)|total_flow_hours"
+ - "Strom|excess_input"
+ - "Strom|excess_output"
+ - "Strom->Penalty"
+ - "Fernwärme|excess_input"
+ - "Fernwärme|excess_output"
+ - "Fernwärme->Penalty"
+ - "Gas|excess_input"
+ - "Gas|excess_output"
+ - "Gas->Penalty"
+infeasible_constraints: ''
diff --git a/tests/ressources/v4-api/io_simple_flow_system_scenarios--solution.nc4 b/tests/ressources/v4-api/io_simple_flow_system_scenarios--solution.nc4
new file mode 100644
index 000000000..c626f2dd9
Binary files /dev/null and b/tests/ressources/v4-api/io_simple_flow_system_scenarios--solution.nc4 differ
diff --git a/tests/ressources/v4-api/io_simple_flow_system_scenarios--summary.yaml b/tests/ressources/v4-api/io_simple_flow_system_scenarios--summary.yaml
new file mode 100644
index 000000000..b2b663a0a
--- /dev/null
+++ b/tests/ressources/v4-api/io_simple_flow_system_scenarios--summary.yaml
@@ -0,0 +1,51 @@
+Name: io_simple_flow_system_scenarios
+Number of timesteps: 9
+Calculation Type: FullCalculation
+Constraints: 753
+Variables: 829
+Main Results:
+ Objective: 75.37
+ Penalty: 0.0
+ Effects:
+ CO2 [kg]:
+ temporal: [255.09, 255.09, 255.09]
+ periodic: [-0.0, -0.0, -0.0]
+ total: [255.09, 255.09, 255.09]
+ costs [€]:
+ temporal: [61.88, 56.1, 41.63]
+ periodic: [20.0, 20.0, 20.0]
+ total: [81.88, 76.1, 61.63]
+ Invest-Decisions:
+ Invested:
+ Speicher: [30.0, 30.0, 30.0]
+ Not invested: {}
+ Buses with excess: []
+Durations:
+ modeling: 0.68
+ solving: 0.46
+ saving: 0.0
+Config:
+ config_name: flixopt
+ logging:
+ level: INFO
+ file: null
+ console: false
+ max_file_size: 10485760
+ backup_count: 5
+ verbose_tracebacks: false
+ modeling:
+ big: 10000000
+ epsilon: 1.0e-05
+ big_binary_bound: 100000
+ solving:
+ mip_gap: 0.01
+ time_limit_seconds: 300
+ log_to_console: false
+ log_main_results: false
+ plotting:
+ default_show: false
+ default_engine: plotly
+ default_dpi: 300
+ default_facet_cols: 3
+ default_sequential_colorscale: turbo
+ default_qualitative_colorscale: plotly
diff --git a/tests/test_component.py b/tests/test_component.py
index 41d39b12a..66d09aaee 100644
--- a/tests/test_component.py
+++ b/tests/test_component.py
@@ -10,7 +10,6 @@
assert_sets_equal,
assert_var_equal,
create_linopy_model,
- create_optimization_and_solve,
)
@@ -32,12 +31,12 @@ def test_component(self, basic_flow_system_linopy_coords, coords_config):
"""Test that flow model constraints are correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
inputs = [
- fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
- fx.Flow('In2', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
+ fx.Flow('In1', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
+ fx.Flow('In2', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
]
outputs = [
- fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.01),
- fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.01),
+ fx.Flow('Out1', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
+ fx.Flow('Out2', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
]
comp = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
flow_system.add_elements(comp)
@@ -442,18 +441,18 @@ def test_transmission_basic(self, basic_flow_system, highs_solver):
flow_system.add_elements(transmission, boiler)
- _ = create_optimization_and_solve(flow_system, highs_solver, 'test_transmission_basic')
+ flow_system.optimize(highs_solver)
- # Assertions
+ # Assertions using new API (flow_system.solution)
assert_almost_equal_numeric(
- transmission.in1.submodel.status.status.solution.values,
+ flow_system.solution['Rohr(Rohr1)|status'].values,
np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
'Status does not work properly',
)
assert_almost_equal_numeric(
- transmission.in1.submodel.flow_rate.solution.values * 0.8 - 20,
- transmission.out1.submodel.flow_rate.solution.values,
+ flow_system.solution['Rohr(Rohr1)|flow_rate'].values * 0.8 - 20,
+ flow_system.solution['Rohr(Rohr2)|flow_rate'].values,
'Losses are not computed correctly',
)
@@ -465,7 +464,9 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver):
boiler = fx.linear_converters.Boiler(
'Boiler_Standard',
thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Q_th', bus='Fernwärme', relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])),
+ thermal_flow=fx.Flow(
+ 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ ),
fuel_flow=fx.Flow('Q_fu', bus='Gas'),
)
@@ -499,38 +500,34 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver):
size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1000),
),
out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
- in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters()),
+ in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters(maximum_size=1000)),
out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
balanced=True,
)
flow_system.add_elements(transmission, boiler, boiler2, last2)
- optimization = create_optimization_and_solve(flow_system, highs_solver, 'test_transmission_advanced')
+ flow_system.optimize(highs_solver)
- # Assertions
+ # Assertions using new API (flow_system.solution)
assert_almost_equal_numeric(
- transmission.in1.submodel.status.status.solution.values,
+ flow_system.solution['Rohr(Rohr1a)|status'].values,
np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]),
'Status does not work properly',
)
+ # Verify output flow matches input flow minus losses (relative 20% + absolute 20)
+ in1_flow = flow_system.solution['Rohr(Rohr1a)|flow_rate'].values
+ expected_out1_flow = in1_flow * 0.8 - np.array([20 if val > 0.1 else 0 for val in in1_flow])
assert_almost_equal_numeric(
- optimization.results.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values,
- transmission.out1.submodel.flow_rate.solution.values,
- 'Flow rate of Rohr__Rohr1b is not correct',
- )
-
- assert_almost_equal_numeric(
- transmission.in1.submodel.flow_rate.solution.values * 0.8
- - np.array([20 if val > 0.1 else 0 for val in transmission.in1.submodel.flow_rate.solution.values]),
- transmission.out1.submodel.flow_rate.solution.values,
+ flow_system.solution['Rohr(Rohr1b)|flow_rate'].values,
+ expected_out1_flow,
'Losses are not computed correctly',
)
assert_almost_equal_numeric(
- transmission.in1.submodel._investment.size.solution.item(),
- transmission.in2.submodel._investment.size.solution.item(),
+ flow_system.solution['Rohr(Rohr1a)|size'].item(),
+ flow_system.solution['Rohr(Rohr2a)|size'].item(),
'The Investments are not equated correctly',
)
@@ -542,7 +539,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
boiler = fx.linear_converters.Boiler(
'Boiler_Standard',
thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Q_th', bus='Fernwärme', relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])),
+ thermal_flow=fx.Flow(
+ 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ ),
fuel_flow=fx.Flow('Q_fu', bus='Gas'),
)
@@ -579,7 +578,9 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
in2=fx.Flow(
'Rohr2a',
'Fernwärme',
- size=fx.InvestParameters(effects_of_investment_per_size=100, minimum_size=10, mandatory=True),
+ size=fx.InvestParameters(
+ effects_of_investment_per_size=100, minimum_size=10, maximum_size=1000, mandatory=True
+ ),
),
out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
balanced=False,
@@ -587,32 +588,28 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
flow_system.add_elements(transmission, boiler, boiler2, last2)
- optimization = create_optimization_and_solve(flow_system, highs_solver, 'test_transmission_advanced')
+ flow_system.optimize(highs_solver)
- # Assertions
+ # Assertions using new API (flow_system.solution)
assert_almost_equal_numeric(
- transmission.in1.submodel.status.status.solution.values,
+ flow_system.solution['Rohr(Rohr1a)|status'].values,
np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]),
'Status does not work properly',
)
+ # Verify output flow matches input flow minus losses (relative 20% + absolute 20)
+ in1_flow = flow_system.solution['Rohr(Rohr1a)|flow_rate'].values
+ expected_out1_flow = in1_flow * 0.8 - np.array([20 if val > 0.1 else 0 for val in in1_flow])
assert_almost_equal_numeric(
- optimization.results.model.variables['Rohr(Rohr1b)|flow_rate'].solution.values,
- transmission.out1.submodel.flow_rate.solution.values,
- 'Flow rate of Rohr__Rohr1b is not correct',
- )
-
- assert_almost_equal_numeric(
- transmission.in1.submodel.flow_rate.solution.values * 0.8
- - np.array([20 if val > 0.1 else 0 for val in transmission.in1.submodel.flow_rate.solution.values]),
- transmission.out1.submodel.flow_rate.solution.values,
+ flow_system.solution['Rohr(Rohr1b)|flow_rate'].values,
+ expected_out1_flow,
'Losses are not computed correctly',
)
- assert transmission.in1.submodel._investment.size.solution.item() > 11
+ assert flow_system.solution['Rohr(Rohr1a)|size'].item() > 11
assert_almost_equal_numeric(
- transmission.in2.submodel._investment.size.solution.item(),
+ flow_system.solution['Rohr(Rohr2a)|size'].item(),
10,
'Sizing does not work properly',
)
diff --git a/tests/test_effect.py b/tests/test_effect.py
index 33ce59f9e..015e054eb 100644
--- a/tests/test_effect.py
+++ b/tests/test_effect.py
@@ -1,4 +1,5 @@
import numpy as np
+import pytest
import xarray as xr
import flixopt as fx
@@ -8,7 +9,6 @@
assert_sets_equal,
assert_var_equal,
create_linopy_model,
- create_optimization_and_solve,
)
@@ -224,8 +224,8 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config):
class TestEffectResults:
- def test_shares(self, basic_flow_system_linopy_coords, coords_config):
- flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
+ def test_shares(self, basic_flow_system_linopy_coords, coords_config, highs_solver):
+ flow_system = basic_flow_system_linopy_coords
effect1 = fx.Effect('Effect1', '€', 'Testing Effect', share_from_temporal={'costs': 0.5})
effect2 = fx.Effect(
'Effect2',
@@ -251,13 +251,18 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config):
thermal_flow=fx.Flow(
'Q_th',
bus='Fernwärme',
- size=fx.InvestParameters(effects_of_investment_per_size=10, minimum_size=20, mandatory=True),
+ size=fx.InvestParameters(
+ effects_of_investment_per_size=10, minimum_size=20, maximum_size=200, mandatory=True
+ ),
),
fuel_flow=fx.Flow('Q_fu', bus='Gas'),
),
)
- results = create_optimization_and_solve(flow_system, fx.solvers.HighsSolver(0.01, 60), 'Sim1').results
+ flow_system.optimize(highs_solver)
+
+ # Use the new statistics accessor
+ statistics = flow_system.statistics
effect_share_factors = {
'temporal': {
@@ -274,71 +279,72 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config):
},
}
for key, value in effect_share_factors['temporal'].items():
- np.testing.assert_allclose(results.effect_share_factors['temporal'][key].values, value)
+ np.testing.assert_allclose(statistics.effect_share_factors['temporal'][key].values, value)
for key, value in effect_share_factors['periodic'].items():
- np.testing.assert_allclose(results.effect_share_factors['periodic'][key].values, value)
+ np.testing.assert_allclose(statistics.effect_share_factors['periodic'][key].values, value)
+ # Temporal effects checks using new API
xr.testing.assert_allclose(
- results.effects_per_component['temporal'].sum('component').sel(effect='costs', drop=True),
- results.solution['costs(temporal)|per_timestep'].fillna(0),
+ statistics.temporal_effects['costs'].sum('contributor'),
+ flow_system.solution['costs(temporal)|per_timestep'].fillna(0),
)
xr.testing.assert_allclose(
- results.effects_per_component['temporal'].sum('component').sel(effect='Effect1', drop=True),
- results.solution['Effect1(temporal)|per_timestep'].fillna(0),
+ statistics.temporal_effects['Effect1'].sum('contributor'),
+ flow_system.solution['Effect1(temporal)|per_timestep'].fillna(0),
)
xr.testing.assert_allclose(
- results.effects_per_component['temporal'].sum('component').sel(effect='Effect2', drop=True),
- results.solution['Effect2(temporal)|per_timestep'].fillna(0),
+ statistics.temporal_effects['Effect2'].sum('contributor'),
+ flow_system.solution['Effect2(temporal)|per_timestep'].fillna(0),
)
xr.testing.assert_allclose(
- results.effects_per_component['temporal'].sum('component').sel(effect='Effect3', drop=True),
- results.solution['Effect3(temporal)|per_timestep'].fillna(0),
+ statistics.temporal_effects['Effect3'].sum('contributor'),
+ flow_system.solution['Effect3(temporal)|per_timestep'].fillna(0),
)
- # periodic mode checks
+ # Periodic effects checks using new API
xr.testing.assert_allclose(
- results.effects_per_component['periodic'].sum('component').sel(effect='costs', drop=True),
- results.solution['costs(periodic)'],
+ statistics.periodic_effects['costs'].sum('contributor'),
+ flow_system.solution['costs(periodic)'],
)
xr.testing.assert_allclose(
- results.effects_per_component['periodic'].sum('component').sel(effect='Effect1', drop=True),
- results.solution['Effect1(periodic)'],
+ statistics.periodic_effects['Effect1'].sum('contributor'),
+ flow_system.solution['Effect1(periodic)'],
)
xr.testing.assert_allclose(
- results.effects_per_component['periodic'].sum('component').sel(effect='Effect2', drop=True),
- results.solution['Effect2(periodic)'],
+ statistics.periodic_effects['Effect2'].sum('contributor'),
+ flow_system.solution['Effect2(periodic)'],
)
xr.testing.assert_allclose(
- results.effects_per_component['periodic'].sum('component').sel(effect='Effect3', drop=True),
- results.solution['Effect3(periodic)'],
+ statistics.periodic_effects['Effect3'].sum('contributor'),
+ flow_system.solution['Effect3(periodic)'],
)
- # Total mode checks
+ # Total effects checks using new API
xr.testing.assert_allclose(
- results.effects_per_component['total'].sum('component').sel(effect='costs', drop=True),
- results.solution['costs'],
+ statistics.total_effects['costs'].sum('contributor'),
+ flow_system.solution['costs'],
)
xr.testing.assert_allclose(
- results.effects_per_component['total'].sum('component').sel(effect='Effect1', drop=True),
- results.solution['Effect1'],
+ statistics.total_effects['Effect1'].sum('contributor'),
+ flow_system.solution['Effect1'],
)
xr.testing.assert_allclose(
- results.effects_per_component['total'].sum('component').sel(effect='Effect2', drop=True),
- results.solution['Effect2'],
+ statistics.total_effects['Effect2'].sum('contributor'),
+ flow_system.solution['Effect2'],
)
xr.testing.assert_allclose(
- results.effects_per_component['total'].sum('component').sel(effect='Effect3', drop=True),
- results.solution['Effect3'],
+ statistics.total_effects['Effect3'].sum('contributor'),
+ flow_system.solution['Effect3'],
)
@@ -347,7 +353,6 @@ class TestPenaltyAsObjective:
def test_penalty_cannot_be_created_as_objective(self):
"""Test that creating a Penalty effect with is_objective=True raises ValueError."""
- import pytest
with pytest.raises(ValueError, match='Penalty.*cannot be set as the objective'):
fx.Effect('Penalty', '€', 'Test Penalty', is_objective=True)
@@ -355,7 +360,6 @@ def test_penalty_cannot_be_created_as_objective(self):
def test_penalty_cannot_be_set_as_objective_via_setter(self):
"""Test that setting Penalty as objective via setter raises ValueError."""
import pandas as pd
- import pytest
# Create a fresh flow system without pre-existing objective
flow_system = fx.FlowSystem(timesteps=pd.date_range('2020-01-01', periods=10, freq='h'))
diff --git a/tests/test_flow.py b/tests/test_flow.py
index 0a1a03341..594bc1fbb 100644
--- a/tests/test_flow.py
+++ b/tests/test_flow.py
@@ -593,6 +593,7 @@ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_c
flow = fx.Flow(
'Wärme',
bus='Fernwärme',
+ size=100,
status_parameters=fx.StatusParameters(
effects_per_active_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour}
),
diff --git a/tests/test_flow_system_locking.py b/tests/test_flow_system_locking.py
new file mode 100644
index 000000000..68d3ec010
--- /dev/null
+++ b/tests/test_flow_system_locking.py
@@ -0,0 +1,403 @@
+"""
+Tests for FlowSystem locking behavior (read-only after optimization).
+
+A FlowSystem becomes locked (read-only) when it has a solution.
+This prevents accidental modifications to a system that has already been optimized.
+"""
+
+import copy
+import warnings
+
+import pytest
+
+import flixopt as fx
+
+# Note: We use simple_flow_system fixture from conftest.py
+
+
+class TestIsLocked:
+ """Test the is_locked property."""
+
+ def test_not_locked_initially(self, simple_flow_system):
+ """A new FlowSystem should not be locked."""
+ assert simple_flow_system.is_locked is False
+
+ def test_not_locked_after_build_model(self, simple_flow_system):
+ """FlowSystem should not be locked after build_model (no solution yet)."""
+ simple_flow_system.build_model()
+ assert simple_flow_system.is_locked is False
+
+ def test_locked_after_optimization(self, simple_flow_system, highs_solver):
+ """FlowSystem should be locked after optimization."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.is_locked is True
+
+ def test_not_locked_after_reset(self, simple_flow_system, highs_solver):
+ """FlowSystem should not be locked after reset."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.is_locked is True
+
+ simple_flow_system.reset()
+ assert simple_flow_system.is_locked is False
+
+
+class TestAddElementsLocking:
+ """Test that add_elements respects locking."""
+
+ def test_add_elements_before_optimization(self, simple_flow_system):
+ """Should be able to add elements before optimization."""
+ new_bus = fx.Bus('NewBus')
+ simple_flow_system.add_elements(new_bus)
+ assert 'NewBus' in simple_flow_system.buses
+
+ def test_add_elements_raises_when_locked(self, simple_flow_system, highs_solver):
+ """Should raise RuntimeError when adding elements to a locked FlowSystem."""
+ simple_flow_system.optimize(highs_solver)
+
+ new_bus = fx.Bus('NewBus')
+ with pytest.raises(RuntimeError, match='Cannot add elements.*reset\\(\\)'):
+ simple_flow_system.add_elements(new_bus)
+
+ def test_add_elements_after_reset(self, simple_flow_system, highs_solver):
+ """Should be able to add elements after reset."""
+ simple_flow_system.optimize(highs_solver)
+ simple_flow_system.reset()
+
+ new_bus = fx.Bus('NewBus')
+ simple_flow_system.add_elements(new_bus)
+ assert 'NewBus' in simple_flow_system.buses
+
+ def test_add_elements_invalidates_model(self, simple_flow_system):
+ """Adding elements to a FlowSystem with a model should invalidate the model."""
+ simple_flow_system.build_model()
+ assert simple_flow_system.model is not None
+
+ new_bus = fx.Bus('NewBus')
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter('always')
+ simple_flow_system.add_elements(new_bus)
+ assert len(w) == 1
+ assert 'model will be invalidated' in str(w[0].message)
+
+ assert simple_flow_system.model is None
+
+
+class TestAddCarriersLocking:
+ """Test that add_carriers respects locking."""
+
+ def test_add_carriers_before_optimization(self, simple_flow_system):
+ """Should be able to add carriers before optimization."""
+ carrier = fx.Carrier('biogas', '#00FF00', 'kW')
+ simple_flow_system.add_carriers(carrier)
+ assert 'biogas' in simple_flow_system.carriers
+
+ def test_add_carriers_raises_when_locked(self, simple_flow_system, highs_solver):
+ """Should raise RuntimeError when adding carriers to a locked FlowSystem."""
+ simple_flow_system.optimize(highs_solver)
+
+ carrier = fx.Carrier('biogas', '#00FF00', 'kW')
+ with pytest.raises(RuntimeError, match='Cannot add carriers.*reset\\(\\)'):
+ simple_flow_system.add_carriers(carrier)
+
+ def test_add_carriers_after_reset(self, simple_flow_system, highs_solver):
+ """Should be able to add carriers after reset."""
+ simple_flow_system.optimize(highs_solver)
+ simple_flow_system.reset()
+
+ carrier = fx.Carrier('biogas', '#00FF00', 'kW')
+ simple_flow_system.add_carriers(carrier)
+ assert 'biogas' in simple_flow_system.carriers
+
+ def test_add_carriers_invalidates_model(self, simple_flow_system):
+ """Adding carriers to a FlowSystem with a model should invalidate the model."""
+ simple_flow_system.build_model()
+ assert simple_flow_system.model is not None
+
+ carrier = fx.Carrier('biogas', '#00FF00', 'kW')
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter('always')
+ simple_flow_system.add_carriers(carrier)
+ assert len(w) == 1
+ assert 'model will be invalidated' in str(w[0].message)
+
+ assert simple_flow_system.model is None
+
+
+class TestReset:
+ """Test the reset method."""
+
+ def test_reset_clears_solution(self, simple_flow_system, highs_solver):
+ """Reset should clear the solution."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.solution is not None
+
+ simple_flow_system.reset()
+ assert simple_flow_system.solution is None
+
+ def test_reset_clears_model(self, simple_flow_system, highs_solver):
+ """Reset should clear the model."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.model is not None
+
+ simple_flow_system.reset()
+ assert simple_flow_system.model is None
+
+ def test_reset_clears_element_submodels(self, simple_flow_system, highs_solver):
+ """Reset should clear element submodels."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Check that elements have submodels after optimization
+ boiler = simple_flow_system.components['Boiler']
+ assert boiler.submodel is not None
+ assert len(boiler._variable_names) > 0
+
+ simple_flow_system.reset()
+
+ # Check that submodels are cleared
+ assert boiler.submodel is None
+ assert len(boiler._variable_names) == 0
+
+ def test_reset_returns_self(self, simple_flow_system, highs_solver):
+ """Reset should return self for method chaining."""
+ simple_flow_system.optimize(highs_solver)
+ result = simple_flow_system.reset()
+ assert result is simple_flow_system
+
+ def test_reset_allows_reoptimization(self, simple_flow_system, highs_solver):
+ """After reset, FlowSystem can be optimized again."""
+ simple_flow_system.optimize(highs_solver)
+ original_cost = simple_flow_system.solution['costs'].item()
+
+ simple_flow_system.reset()
+ simple_flow_system.optimize(highs_solver)
+
+ assert simple_flow_system.solution is not None
+ # Cost should be the same since system structure didn't change
+ assert simple_flow_system.solution['costs'].item() == pytest.approx(original_cost)
+
+
+class TestCopy:
+ """Test the copy method."""
+
+ def test_copy_creates_new_instance(self, simple_flow_system):
+ """Copy should create a new FlowSystem instance."""
+ copy_fs = simple_flow_system.copy()
+ assert copy_fs is not simple_flow_system
+
+ def test_copy_preserves_elements(self, simple_flow_system):
+ """Copy should preserve all elements."""
+ copy_fs = simple_flow_system.copy()
+
+ assert set(copy_fs.components.keys()) == set(simple_flow_system.components.keys())
+ assert set(copy_fs.buses.keys()) == set(simple_flow_system.buses.keys())
+
+ def test_copy_does_not_copy_solution(self, simple_flow_system, highs_solver):
+ """Copy should not include the solution."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.solution is not None
+
+ copy_fs = simple_flow_system.copy()
+ assert copy_fs.solution is None
+
+ def test_copy_does_not_copy_model(self, simple_flow_system, highs_solver):
+ """Copy should not include the model."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.model is not None
+
+ copy_fs = simple_flow_system.copy()
+ assert copy_fs.model is None
+
+ def test_copy_is_not_locked(self, simple_flow_system, highs_solver):
+ """Copy should not be locked even if original is."""
+ simple_flow_system.optimize(highs_solver)
+ assert simple_flow_system.is_locked is True
+
+ copy_fs = simple_flow_system.copy()
+ assert copy_fs.is_locked is False
+
+ def test_copy_can_be_modified(self, simple_flow_system, highs_solver):
+ """Copy should be modifiable even if original is locked."""
+ simple_flow_system.optimize(highs_solver)
+
+ copy_fs = simple_flow_system.copy()
+ new_bus = fx.Bus('NewBus')
+ copy_fs.add_elements(new_bus) # Should not raise
+ assert 'NewBus' in copy_fs.buses
+
+ def test_copy_can_be_optimized_independently(self, simple_flow_system, highs_solver):
+ """Copy can be optimized independently of original."""
+ simple_flow_system.optimize(highs_solver)
+ original_cost = simple_flow_system.solution['costs'].item()
+
+ copy_fs = simple_flow_system.copy()
+ copy_fs.optimize(highs_solver)
+
+ # Both should have solutions
+ assert simple_flow_system.solution is not None
+ assert copy_fs.solution is not None
+
+ # Costs should be equal (same system)
+ assert copy_fs.solution['costs'].item() == pytest.approx(original_cost)
+
+ def test_python_copy_uses_copy_method(self, simple_flow_system, highs_solver):
+ """copy.copy() should use the custom copy method."""
+ simple_flow_system.optimize(highs_solver)
+
+ copy_fs = copy.copy(simple_flow_system)
+ assert copy_fs.solution is None
+ assert copy_fs.is_locked is False
+
+ def test_python_deepcopy_uses_copy_method(self, simple_flow_system, highs_solver):
+ """copy.deepcopy() should use the custom copy method."""
+ simple_flow_system.optimize(highs_solver)
+
+ copy_fs = copy.deepcopy(simple_flow_system)
+ assert copy_fs.solution is None
+ assert copy_fs.is_locked is False
+
+
+class TestLoadedFlowSystem:
+ """Test that loaded FlowSystems respect locking."""
+
+ def test_loaded_fs_with_solution_is_locked(self, simple_flow_system, highs_solver, tmp_path):
+ """A FlowSystem loaded from file with solution should be locked."""
+ simple_flow_system.optimize(highs_solver)
+ filepath = tmp_path / 'test_fs.nc'
+ simple_flow_system.to_netcdf(filepath)
+
+ loaded_fs = fx.FlowSystem.from_netcdf(filepath)
+ assert loaded_fs.is_locked is True
+
+ def test_loaded_fs_can_be_reset(self, simple_flow_system, highs_solver, tmp_path):
+ """A loaded FlowSystem can be reset to allow modifications."""
+ simple_flow_system.optimize(highs_solver)
+ filepath = tmp_path / 'test_fs.nc'
+ simple_flow_system.to_netcdf(filepath)
+
+ loaded_fs = fx.FlowSystem.from_netcdf(filepath)
+ loaded_fs.reset()
+
+ assert loaded_fs.is_locked is False
+ new_bus = fx.Bus('NewBus')
+ loaded_fs.add_elements(new_bus) # Should not raise
+
+
+class TestInvalidate:
+ """Test the invalidate method for manual model invalidation."""
+
+ def test_invalidate_resets_connected_and_transformed(self, simple_flow_system):
+ """Invalidate should reset the connected_and_transformed flag."""
+ simple_flow_system.connect_and_transform()
+ assert simple_flow_system.connected_and_transformed is True
+
+ simple_flow_system.invalidate()
+ assert simple_flow_system.connected_and_transformed is False
+
+ def test_invalidate_clears_model(self, simple_flow_system):
+ """Invalidate should clear the model."""
+ simple_flow_system.build_model()
+ assert simple_flow_system.model is not None
+
+ simple_flow_system.invalidate()
+ assert simple_flow_system.model is None
+
+ def test_invalidate_raises_when_locked(self, simple_flow_system, highs_solver):
+ """Invalidate should raise RuntimeError when FlowSystem has a solution."""
+ simple_flow_system.optimize(highs_solver)
+
+ with pytest.raises(RuntimeError, match='Cannot invalidate.*reset\\(\\)'):
+ simple_flow_system.invalidate()
+
+ def test_invalidate_returns_self(self, simple_flow_system):
+ """Invalidate should return self for method chaining."""
+ simple_flow_system.connect_and_transform()
+ result = simple_flow_system.invalidate()
+ assert result is simple_flow_system
+
+ def test_invalidate_allows_retransformation(self, simple_flow_system, highs_solver):
+ """After invalidate, connect_and_transform should run again."""
+ simple_flow_system.connect_and_transform()
+ assert simple_flow_system.connected_and_transformed is True
+
+ simple_flow_system.invalidate()
+ assert simple_flow_system.connected_and_transformed is False
+
+ # Should be able to connect_and_transform again
+ simple_flow_system.connect_and_transform()
+ assert simple_flow_system.connected_and_transformed is True
+
+ def test_modify_element_and_invalidate(self, simple_flow_system, highs_solver):
+ """Test the workflow: optimize -> reset -> modify -> invalidate -> re-optimize."""
+ # First optimization
+ simple_flow_system.optimize(highs_solver)
+ original_cost = simple_flow_system.solution['costs'].item()
+
+ # Reset to unlock
+ simple_flow_system.reset()
+
+ # Modify an element attribute (increase gas price, which should increase costs)
+ gas_tariff = simple_flow_system.components['Gastarif']
+ original_effects = gas_tariff.outputs[0].effects_per_flow_hour
+ # Double the cost effect
+ gas_tariff.outputs[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
+
+ # Invalidate to trigger re-transformation
+ simple_flow_system.invalidate()
+
+ # Re-optimize
+ simple_flow_system.optimize(highs_solver)
+ new_cost = simple_flow_system.solution['costs'].item()
+
+ # Cost should have increased due to higher gas price
+ assert new_cost > original_cost
+
+ def test_invalidate_needed_after_transform_before_optimize(self, simple_flow_system, highs_solver):
+ """Invalidate is needed to apply changes made after connect_and_transform but before optimize."""
+ # Connect and transform (but don't optimize yet)
+ simple_flow_system.connect_and_transform()
+
+ # Modify an attribute - double the gas costs
+ gas_tariff = simple_flow_system.components['Gastarif']
+ original_effects = gas_tariff.outputs[0].effects_per_flow_hour
+ gas_tariff.outputs[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
+
+ # Call invalidate to ensure re-transformation
+ simple_flow_system.invalidate()
+ assert simple_flow_system.connected_and_transformed is False
+
+ # Now optimize - the doubled values should take effect
+ simple_flow_system.optimize(highs_solver)
+ cost_with_doubled = simple_flow_system.solution['costs'].item()
+
+ # Reset and use original values
+ simple_flow_system.reset()
+ gas_tariff.outputs[0].effects_per_flow_hour = {
+ effect: value / 2 for effect, value in gas_tariff.outputs[0].effects_per_flow_hour.items()
+ }
+ simple_flow_system.optimize(highs_solver)
+ cost_with_original = simple_flow_system.solution['costs'].item()
+
+ # The doubled costs should result in higher total cost
+ assert cost_with_doubled > cost_with_original
+
+ def test_reset_already_invalidates(self, simple_flow_system, highs_solver):
+ """Reset already invalidates, so modifications after reset take effect."""
+ # First optimization
+ simple_flow_system.optimize(highs_solver)
+ original_cost = simple_flow_system.solution['costs'].item()
+
+ # Reset - this already calls _invalidate_model()
+ simple_flow_system.reset()
+ assert simple_flow_system.connected_and_transformed is False
+
+ # Modify an element attribute
+ gas_tariff = simple_flow_system.components['Gastarif']
+ original_effects = gas_tariff.outputs[0].effects_per_flow_hour
+ gas_tariff.outputs[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
+
+ # Re-optimize - changes take effect because reset already invalidated
+ simple_flow_system.optimize(highs_solver)
+ new_cost = simple_flow_system.solution['costs'].item()
+
+ # Cost should have increased
+ assert new_cost > original_cost
diff --git a/tests/test_flow_system_resample.py b/tests/test_flow_system_resample.py
index 9ddf4d5e4..7486b173c 100644
--- a/tests/test_flow_system_resample.py
+++ b/tests/test_flow_system_resample.py
@@ -206,11 +206,10 @@ def test_modeling(with_dim):
)
fs_r = fs.resample('4h', method='mean')
- calc = fx.Optimization('test', fs_r)
- calc.do_modeling()
+ fs_r.build_model()
- assert calc.model is not None
- assert len(calc.model.variables) > 0
+ assert fs_r.model is not None
+ assert len(fs_r.model.variables) > 0
def test_model_structure_preserved():
@@ -225,22 +224,18 @@ def test_model_structure_preserved():
fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]),
)
- calc_orig = fx.Optimization('orig', fs)
- calc_orig.do_modeling()
+ fs.build_model()
fs_r = fs.resample('4h', method='mean')
- calc_r = fx.Optimization('resamp', fs_r)
- calc_r.do_modeling()
+ fs_r.build_model()
# Same number of variable/constraint types
- assert len(calc_orig.model.variables) == len(calc_r.model.variables)
- assert len(calc_orig.model.constraints) == len(calc_r.model.constraints)
+ assert len(fs.model.variables) == len(fs_r.model.variables)
+ assert len(fs.model.constraints) == len(fs_r.model.constraints)
# Same names
- assert set(calc_orig.model.variables.labels.data_vars.keys()) == set(calc_r.model.variables.labels.data_vars.keys())
- assert set(calc_orig.model.constraints.labels.data_vars.keys()) == set(
- calc_r.model.constraints.labels.data_vars.keys()
- )
+ assert set(fs.model.variables.labels.data_vars.keys()) == set(fs_r.model.variables.labels.data_vars.keys())
+ assert set(fs.model.constraints.labels.data_vars.keys()) == set(fs_r.model.constraints.labels.data_vars.keys())
# === Advanced Features ===
@@ -276,8 +271,8 @@ def test_frequencies(freq, exp_len):
assert len(fs.resample(freq, method='mean').timesteps) == exp_len
-def test_irregular_timesteps():
- """Test irregular timesteps."""
+def test_irregular_timesteps_error():
+ """Test that resampling irregular timesteps to finer resolution raises error without fill_gaps."""
ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
fs = fx.FlowSystem(ts)
fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
@@ -285,8 +280,26 @@ def test_irregular_timesteps():
fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(3), size=1)])
)
- fs_r = fs.resample('1h', method='mean')
- assert len(fs_r.timesteps) > 0
+ with pytest.raises(ValueError, match='Resampling created gaps'):
+ fs.transform.resample('1h', method='mean')
+
+
+def test_irregular_timesteps_with_fill_gaps():
+ """Test that resampling irregular timesteps works with explicit fill_gaps strategy."""
+ ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
+ fs = fx.FlowSystem(ts)
+ fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add_elements(
+ fx.Sink(
+ label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)]
+ )
+ )
+
+ # Test with ffill
+ fs_r = fs.transform.resample('1h', method='mean', fill_gaps='ffill')
+ assert len(fs_r.timesteps) == 4
+ # Gap at 02:00 should be filled with previous value (2.0)
+ assert_allclose(fs_r.flows['s(in)'].fixed_relative_profile.values, [1.0, 2.0, 2.0, 4.0])
if __name__ == '__main__':
diff --git a/tests/test_functional.py b/tests/test_functional.py
index f351deef5..6d0f8a8fc 100644
--- a/tests/test_functional.py
+++ b/tests/test_functional.py
@@ -93,11 +93,10 @@ def flow_system_minimal(timesteps) -> fx.FlowSystem:
return flow_system
-def solve_and_load(flow_system: fx.FlowSystem, solver) -> fx.results.Results:
- optimization = fx.Optimization('Calculation', flow_system)
- optimization.do_modeling()
- optimization.solve(solver)
- return optimization.results
+def solve_and_load(flow_system: fx.FlowSystem, solver) -> fx.FlowSystem:
+ """Optimize the flow system and return it with the solution."""
+ flow_system.optimize(solver)
+ return flow_system
@pytest.fixture
@@ -106,30 +105,31 @@ def time_steps_fixture(request):
def test_solve_and_load(solver_fixture, time_steps_fixture):
- results = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture)
- assert results is not None
+ flow_system = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture)
+ assert flow_system.solution is not None
def test_minimal_model(solver_fixture, time_steps_fixture):
- results = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture)
- assert_allclose(results.model.variables['costs'].solution.values, 80, rtol=1e-5, atol=1e-10)
+ flow_system = solve_and_load(flow_system_minimal(time_steps_fixture), solver_fixture)
+
+ assert_allclose(flow_system.solution['costs'].values, 80, rtol=1e-5, atol=1e-10)
assert_allclose(
- results.model.variables['Boiler(Q_th)|flow_rate'].solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[-0.0, 10.0, 20.0, -0.0, 10.0],
rtol=1e-5,
atol=1e-10,
)
assert_allclose(
- results.model.variables['costs(temporal)|per_timestep'].solution.values,
+ flow_system.solution['costs(temporal)|per_timestep'].values[:-1],
[-0.0, 20.0, 40.0, -0.0, 20.0],
rtol=1e-5,
atol=1e-10,
)
assert_allclose(
- results.model.variables['Gastarif(Gas)->costs(temporal)'].solution.values,
+ flow_system.solution['Gastarif(Gas)->costs(temporal)'].values[:-1],
[-0.0, 20.0, 40.0, -0.0, 20.0],
rtol=1e-5,
atol=1e-10,
@@ -152,24 +152,22 @@ def test_fixed_size(solver_fixture, time_steps_fixture):
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80 + 1000 * 1 + 10,
rtol=1e-5,
atol=1e-10,
err_msg='The total costs does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.size.solution.item(),
+ flow_system.solution['Boiler(Q_th)|size'].item(),
1000,
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ flow_system.solution['Boiler(Q_th)|invested'].item(),
1,
rtol=1e-5,
atol=1e-10,
@@ -187,30 +185,28 @@ def test_optimize_size(solver_fixture, time_steps_fixture):
thermal_flow=fx.Flow(
'Q_th',
bus='Fernwärme',
- size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1),
+ size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100),
),
)
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80 + 20 * 1 + 10,
rtol=1e-5,
atol=1e-10,
err_msg='The total costs does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.size.solution.item(),
+ flow_system.solution['Boiler(Q_th)|size'].item(),
20,
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ flow_system.solution['Boiler(Q_th)|invested'].item(),
1,
rtol=1e-5,
atol=1e-10,
@@ -228,30 +224,30 @@ def test_size_bounds(solver_fixture, time_steps_fixture):
thermal_flow=fx.Flow(
'Q_th',
bus='Fernwärme',
- size=fx.InvestParameters(minimum_size=40, effects_of_investment=10, effects_of_investment_per_size=1),
+ size=fx.InvestParameters(
+ minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1
+ ),
),
)
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80 + 40 * 1 + 10,
rtol=1e-5,
atol=1e-10,
err_msg='The total costs does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.size.solution.item(),
+ flow_system.solution['Boiler(Q_th)|size'].item(),
40,
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ flow_system.solution['Boiler(Q_th)|invested'].item(),
1,
rtol=1e-5,
atol=1e-10,
@@ -270,7 +266,11 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
'Q_th',
bus='Fernwärme',
size=fx.InvestParameters(
- mandatory=False, minimum_size=40, effects_of_investment=10, effects_of_investment_per_size=1
+ mandatory=False,
+ minimum_size=40,
+ maximum_size=100,
+ effects_of_investment=10,
+ effects_of_investment_per_size=1,
),
),
),
@@ -282,32 +282,33 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
'Q_th',
bus='Fernwärme',
size=fx.InvestParameters(
- mandatory=False, minimum_size=50, effects_of_investment=10, effects_of_investment_per_size=1
+ mandatory=False,
+ minimum_size=50,
+ maximum_size=100,
+ effects_of_investment=10,
+ effects_of_investment_per_size=1,
),
),
),
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- boiler_optional = flow_system['Boiler_optional']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80 + 40 * 1 + 10,
rtol=1e-5,
atol=1e-10,
err_msg='The total costs does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.size.solution.item(),
+ flow_system.solution['Boiler(Q_th)|size'].item(),
40,
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.investment.invested.solution.item(),
+ flow_system.solution['Boiler(Q_th)|invested'].item(),
1,
rtol=1e-5,
atol=1e-10,
@@ -315,14 +316,14 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler_optional.thermal_flow.submodel.investment.size.solution.item(),
+ flow_system.solution['Boiler_optional(Q_th)|size'].item(),
0,
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__Investment_size" does not have the right value',
)
assert_allclose(
- boiler_optional.thermal_flow.submodel.investment.invested.solution.item(),
+ flow_system.solution['Boiler_optional(Q_th)|invested'].item(),
0,
rtol=1e-5,
atol=1e-10,
@@ -343,10 +344,8 @@ def test_on(solver_fixture, time_steps_fixture):
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80,
rtol=1e-5,
atol=1e-10,
@@ -354,14 +353,14 @@ def test_on(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|status'].values[:-1],
[0, 1, 1, 0, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[0, 10, 20, 0, 10],
rtol=1e-5,
atol=1e-10,
@@ -387,10 +386,8 @@ def test_off(solver_fixture, time_steps_fixture):
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80,
rtol=1e-5,
atol=1e-10,
@@ -398,21 +395,21 @@ def test_off(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|status'].values[:-1],
[0, 1, 1, 0, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.status.inactive.solution.values,
- 1 - boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|inactive'].values[:-1],
+ 1 - flow_system.solution['Boiler(Q_th)|status'].values[:-1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__off" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[0, 10, 20, 0, 10],
rtol=1e-5,
atol=1e-10,
@@ -438,10 +435,8 @@ def test_startup_shutdown(solver_fixture, time_steps_fixture):
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
80,
rtol=1e-5,
atol=1e-10,
@@ -449,28 +444,28 @@ def test_startup_shutdown(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|status'].values[:-1],
[0, 1, 1, 0, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.status.startup.solution.values,
+ flow_system.solution['Boiler(Q_th)|startup'].values[:-1],
[0, 1, 0, 0, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__switch_on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.status.shutdown.solution.values,
+ flow_system.solution['Boiler(Q_th)|shutdown'].values[:-1],
[0, 0, 0, 1, 0],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__switch_on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[0, 10, 20, 0, 10],
rtol=1e-5,
atol=1e-10,
@@ -502,10 +497,8 @@ def test_on_total_max(solver_fixture, time_steps_fixture):
)
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
140,
rtol=1e-5,
atol=1e-10,
@@ -513,14 +506,14 @@ def test_on_total_max(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|status'].values[:-1],
[0, 0, 1, 0, 0],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[0, 0, 20, 0, 0],
rtol=1e-5,
atol=1e-10,
@@ -560,11 +553,8 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
) # Else its non deterministic
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- boiler_backup = flow_system['Boiler_backup']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
114,
rtol=1e-5,
atol=1e-10,
@@ -572,14 +562,14 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|status'].values[:-1],
[0, 0, 1, 0, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[0, 0, 20, 0, 12 - 1e-5],
rtol=1e-5,
atol=1e-10,
@@ -587,14 +577,14 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
)
assert_allclose(
- sum(boiler_backup.thermal_flow.submodel.status.status.solution.values),
+ sum(flow_system.solution['Boiler_backup(Q_th)|status'].values[:-1]),
3,
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler_backup__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler_backup.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler_backup(Q_th)|flow_rate'].values[:-1],
[0, 10, 1.0e-05, 0, 1.0e-05],
rtol=1e-5,
atol=1e-10,
@@ -628,11 +618,8 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
# Else its non deterministic
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- boiler_backup = flow_system['Boiler_backup']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
190,
rtol=1e-5,
atol=1e-10,
@@ -640,14 +627,14 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler(Q_th)|status'].values[:-1],
[1, 1, 0, 1, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[5, 10, 0, 18, 12],
rtol=1e-5,
atol=1e-10,
@@ -655,7 +642,7 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler_backup.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler_backup(Q_th)|flow_rate'].values[:-1],
[0, 0, 20, 0, 0],
rtol=1e-5,
atol=1e-10,
@@ -691,11 +678,8 @@ def test_consecutive_off(solver_fixture, time_steps_fixture):
) # Else its non deterministic
solve_and_load(flow_system, solver_fixture)
- boiler = flow_system['Boiler']
- boiler_backup = flow_system['Boiler_backup']
- costs = flow_system.effects['costs']
assert_allclose(
- costs.submodel.total.solution.item(),
+ flow_system.solution['costs'].item(),
110,
rtol=1e-5,
atol=1e-10,
@@ -703,21 +687,21 @@ def test_consecutive_off(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler_backup.thermal_flow.submodel.status.status.solution.values,
+ flow_system.solution['Boiler_backup(Q_th)|status'].values[:-1],
[0, 0, 1, 0, 0],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler_backup__Q_th__on" does not have the right value',
)
assert_allclose(
- boiler_backup.thermal_flow.submodel.status.inactive.solution.values,
+ flow_system.solution['Boiler_backup(Q_th)|inactive'].values[:-1],
[1, 1, 0, 1, 1],
rtol=1e-5,
atol=1e-10,
err_msg='"Boiler_backup__Q_th__off" does not have the right value',
)
assert_allclose(
- boiler_backup.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler_backup(Q_th)|flow_rate'].values[:-1],
[0, 0, 1e-5, 0, 0],
rtol=1e-5,
atol=1e-10,
@@ -725,7 +709,7 @@ def test_consecutive_off(solver_fixture, time_steps_fixture):
)
assert_allclose(
- boiler.thermal_flow.submodel.flow_rate.solution.values,
+ flow_system.solution['Boiler(Q_th)|flow_rate'].values[:-1],
[5, 0, 20 - 1e-5, 18, 12],
rtol=1e-5,
atol=1e-10,
diff --git a/tests/test_integration.py b/tests/test_integration.py
index 35b2fa641..d33bb54e8 100644
--- a/tests/test_integration.py
+++ b/tests/test_integration.py
@@ -1,10 +1,7 @@
import pytest
-import flixopt as fx
-
from .conftest import (
assert_almost_equal_numeric,
- create_optimization_and_solve,
)
@@ -13,75 +10,52 @@ def test_simple_flow_system(self, simple_flow_system, highs_solver):
"""
Test the effects of the simple energy system model
"""
- optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_simple_flow_system')
-
- effects = optimization.flow_system.effects
+ simple_flow_system.optimize(highs_solver)
- # Cost assertions
+ # Cost assertions using new API (flow_system.solution)
assert_almost_equal_numeric(
- effects['costs'].submodel.total.solution.item(), 81.88394666666667, 'costs doesnt match expected value'
+ simple_flow_system.solution['costs'].item(), 81.88394666666667, 'costs doesnt match expected value'
)
# CO2 assertions
assert_almost_equal_numeric(
- effects['CO2'].submodel.total.solution.item(), 255.09184, 'CO2 doesnt match expected value'
+ simple_flow_system.solution['CO2'].item(), 255.09184, 'CO2 doesnt match expected value'
)
def test_model_components(self, simple_flow_system, highs_solver):
"""
Test the component flows of the simple energy system model
"""
- optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_model_components')
- comps = optimization.flow_system.components
+ simple_flow_system.optimize(highs_solver)
- # Boiler assertions
+ # Boiler assertions using new API
assert_almost_equal_numeric(
- comps['Boiler'].thermal_flow.submodel.flow_rate.solution.values,
+ simple_flow_system.solution['Boiler(Q_th)|flow_rate'].values,
[0, 0, 0, 28.4864, 35, 0, 0, 0, 0],
'Q_th doesnt match expected value',
)
- # CHP unit assertions
+ # CHP unit assertions using new API
assert_almost_equal_numeric(
- comps['CHP_unit'].thermal_flow.submodel.flow_rate.solution.values,
+ simple_flow_system.solution['CHP_unit(Q_th)|flow_rate'].values,
[30.0, 26.66666667, 75.0, 75.0, 75.0, 20.0, 20.0, 20.0, 20.0],
'Q_th doesnt match expected value',
)
- def test_results_persistence(self, simple_flow_system, highs_solver):
- """
- Test saving and loading results
- """
- # Save results to file
- optimization = create_optimization_and_solve(simple_flow_system, highs_solver, 'test_model_components')
-
- optimization.results.to_file()
-
- # Load results from file
- results = fx.results.Results.from_file(optimization.folder, optimization.name)
-
- # Verify key variables from loaded results
- assert_almost_equal_numeric(
- results.solution['costs'].values,
- 81.88394666666667,
- 'costs doesnt match expected value',
- )
- assert_almost_equal_numeric(results.solution['CO2'].values, 255.09184, 'CO2 doesnt match expected value')
-
class TestComplex:
def test_basic_flow_system(self, flow_system_base, highs_solver):
- optimization = create_optimization_and_solve(flow_system_base, highs_solver, 'test_basic_flow_system')
+ flow_system_base.optimize(highs_solver)
- # Assertions
+ # Assertions using flow_system.solution (the new API)
assert_almost_equal_numeric(
- optimization.results.model['costs'].solution.item(),
+ flow_system_base.solution['costs'].item(),
-11597.873624489237,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['costs(temporal)|per_timestep'].solution.values,
+ flow_system_base.solution['costs(temporal)|per_timestep'].values,
[
-2.38500000e03,
-2.21681333e03,
@@ -97,66 +71,66 @@ def test_basic_flow_system(self, flow_system_base, highs_solver):
)
assert_almost_equal_numeric(
- sum(optimization.results.model['CO2(temporal)->costs(temporal)'].solution.values),
+ flow_system_base.solution['CO2(temporal)->costs(temporal)'].sum().item(),
258.63729669618675,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- sum(optimization.results.model['Kessel(Q_th)->costs(temporal)'].solution.values),
+ flow_system_base.solution['Kessel(Q_th)->costs(temporal)'].sum().item(),
0.01,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- sum(optimization.results.model['Kessel->costs(temporal)'].solution.values),
+ flow_system_base.solution['Kessel->costs(temporal)'].sum().item(),
-0.0,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- sum(optimization.results.model['Gastarif(Q_Gas)->costs(temporal)'].solution.values),
+ flow_system_base.solution['Gastarif(Q_Gas)->costs(temporal)'].sum().item(),
39.09153113079115,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- sum(optimization.results.model['Einspeisung(P_el)->costs(temporal)'].solution.values),
+ flow_system_base.solution['Einspeisung(P_el)->costs(temporal)'].sum().item(),
-14196.61245231646,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- sum(optimization.results.model['KWK->costs(temporal)'].solution.values),
+ flow_system_base.solution['KWK->costs(temporal)'].sum().item(),
0.0,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['Kessel(Q_th)->costs(periodic)'].solution.values,
+ flow_system_base.solution['Kessel(Q_th)->costs(periodic)'].values,
1000 + 500,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['Speicher->costs(periodic)'].solution.values,
+ flow_system_base.solution['Speicher->costs(periodic)'].values,
800 + 1,
'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['CO2(temporal)'].solution.values,
+ flow_system_base.solution['CO2(temporal)'].values,
1293.1864834809337,
'CO2 doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['CO2(periodic)'].solution.values,
+ flow_system_base.solution['CO2(periodic)'].values,
0.9999999999999994,
'CO2 doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['Kessel(Q_th)|flow_rate'].solution.values,
+ flow_system_base.solution['Kessel(Q_th)|flow_rate'].values,
[0, 0, 0, 45, 0, 0, 0, 0, 0],
'Kessel doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['KWK(Q_th)|flow_rate'].solution.values,
+ flow_system_base.solution['KWK(Q_th)|flow_rate'].values,
[
7.50000000e01,
6.97111111e01,
@@ -171,7 +145,7 @@ def test_basic_flow_system(self, flow_system_base, highs_solver):
'KWK Q_th doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['KWK(P_el)|flow_rate'].solution.values,
+ flow_system_base.solution['KWK(P_el)|flow_rate'].values,
[
6.00000000e01,
5.57688889e01,
@@ -187,139 +161,65 @@ def test_basic_flow_system(self, flow_system_base, highs_solver):
)
assert_almost_equal_numeric(
- optimization.results.model['Speicher|netto_discharge'].solution.values,
+ flow_system_base.solution['Speicher|netto_discharge'].values,
[-45.0, -69.71111111, 15.0, -10.0, 36.06697198, -55.0, 20.0, 20.0, 20.0],
'Speicher nettoFlow doesnt match expected value',
)
+ # charge_state includes extra timestep for final charge state (len = timesteps + 1)
assert_almost_equal_numeric(
- optimization.results.model['Speicher|charge_state'].solution.values,
+ flow_system_base.solution['Speicher|charge_state'].values,
[0.0, 40.5, 100.0, 77.0, 79.84, 37.38582802, 83.89496178, 57.18336484, 32.60869565, 10.0],
- 'Speicher nettoFlow doesnt match expected value',
+ 'Speicher charge_state doesnt match expected value',
)
assert_almost_equal_numeric(
- optimization.results.model['Speicher|PiecewiseEffects|costs'].solution.values,
+ flow_system_base.solution['Speicher|PiecewiseEffects|costs'].values,
800,
'Speicher|PiecewiseEffects|costs doesnt match expected value',
)
def test_piecewise_conversion(self, flow_system_piecewise_conversion, highs_solver):
- optimization = create_optimization_and_solve(
- flow_system_piecewise_conversion, highs_solver, 'test_piecewise_conversion'
- )
-
- effects = optimization.flow_system.effects
- comps = optimization.flow_system.components
+ flow_system_piecewise_conversion.optimize(highs_solver)
- # Compare expected values with actual values
+ # Compare expected values with actual values using new API
assert_almost_equal_numeric(
- effects['costs'].submodel.total.solution.item(), -10710.997365760755, 'costs doesnt match expected value'
+ flow_system_piecewise_conversion.solution['costs'].item(),
+ -10710.997365760755,
+ 'costs doesnt match expected value',
)
assert_almost_equal_numeric(
- effects['CO2'].submodel.total.solution.item(), 1278.7939026086956, 'CO2 doesnt match expected value'
+ flow_system_piecewise_conversion.solution['CO2'].item(),
+ 1278.7939026086956,
+ 'CO2 doesnt match expected value',
)
assert_almost_equal_numeric(
- comps['Kessel'].thermal_flow.submodel.flow_rate.solution.values,
+ flow_system_piecewise_conversion.solution['Kessel(Q_th)|flow_rate'].values,
[0, 0, 0, 45, 0, 0, 0, 0, 0],
'Kessel doesnt match expected value',
)
- kwk_flows = {flow.label: flow for flow in comps['KWK'].inputs + comps['KWK'].outputs}
assert_almost_equal_numeric(
- kwk_flows['Q_th'].submodel.flow_rate.solution.values,
+ flow_system_piecewise_conversion.solution['KWK(Q_th)|flow_rate'].values,
[45.0, 45.0, 64.5962087, 100.0, 61.3136, 45.0, 45.0, 12.86469565, 0.0],
'KWK Q_th doesnt match expected value',
)
assert_almost_equal_numeric(
- kwk_flows['P_el'].submodel.flow_rate.solution.values,
+ flow_system_piecewise_conversion.solution['KWK(P_el)|flow_rate'].values,
[40.0, 40.0, 47.12589407, 60.0, 45.93221818, 40.0, 40.0, 10.91784108, -0.0],
'KWK P_el doesnt match expected value',
)
assert_almost_equal_numeric(
- comps['Speicher'].submodel.netto_discharge.solution.values,
+ flow_system_piecewise_conversion.solution['Speicher|netto_discharge'].values,
[-15.0, -45.0, 25.4037913, -35.0, 48.6864, -25.0, -25.0, 7.13530435, 20.0],
'Speicher nettoFlow doesnt match expected value',
)
assert_almost_equal_numeric(
- comps['Speicher'].submodel.variables['Speicher|PiecewiseEffects|costs'].solution.values,
+ flow_system_piecewise_conversion.solution['Speicher|PiecewiseEffects|costs'].values,
454.74666666666667,
'Speicher investcosts_segmented_costs doesnt match expected value',
)
-@pytest.mark.slow
-class TestModelingTypes:
- @pytest.fixture(params=['full', 'segmented', 'aggregated'])
- def modeling_calculation(self, request, flow_system_long, highs_solver):
- """
- Fixture to run optimizations with different modeling types
- """
- # Extract flow system and data from the fixture
- flow_system = flow_system_long[0]
- thermal_load_ts = flow_system_long[1]['thermal_load_ts']
- electrical_load_ts = flow_system_long[1]['electrical_load_ts']
-
- # Create calculation based on modeling type
- modeling_type = request.param
- if modeling_type == 'full':
- calc = fx.Optimization('fullModel', flow_system)
- calc.do_modeling()
- calc.solve(highs_solver)
- elif modeling_type == 'segmented':
- calc = fx.SegmentedOptimization('segModel', flow_system, timesteps_per_segment=96, overlap_timesteps=1)
- calc.do_modeling_and_solve(highs_solver)
- elif modeling_type == 'aggregated':
- calc = fx.ClusteredOptimization(
- 'aggModel',
- flow_system,
- fx.ClusteringParameters(
- hours_per_period=6,
- nr_of_periods=4,
- fix_storage_flows=False,
- aggregate_data_and_fix_non_binary_vars=True,
- percentage_of_period_freedom=0,
- penalty_of_period_freedom=0,
- time_series_for_low_peaks=[electrical_load_ts, thermal_load_ts],
- time_series_for_high_peaks=[thermal_load_ts],
- ),
- )
- calc.do_modeling()
- calc.solve(highs_solver)
-
- return calc, modeling_type
-
- def test_modeling_types_costs(self, modeling_calculation):
- """
- Test total costs for different modeling types
- """
- calc, modeling_type = modeling_calculation
-
- expected_costs = {
- 'full': 343613,
- 'segmented': 343613, # Approximate value
- 'aggregated': 342967.0,
- }
-
- if modeling_type in ['full', 'aggregated']:
- assert_almost_equal_numeric(
- calc.results.model['costs'].solution.item(),
- expected_costs[modeling_type],
- f'costs do not match for {modeling_type} modeling type',
- )
- else:
- assert_almost_equal_numeric(
- calc.results.solution_without_overlap('costs(temporal)|per_timestep').sum(),
- expected_costs[modeling_type],
- f'costs do not match for {modeling_type} modeling type',
- )
-
- def test_segmented_io(self, modeling_calculation):
- calc, modeling_type = modeling_calculation
- if modeling_type == 'segmented':
- calc.results.to_file()
- _ = fx.results.SegmentedResults.from_file(calc.folder, calc.name)
-
-
if __name__ == '__main__':
pytest.main(['-v'])
diff --git a/tests/test_io.py b/tests/test_io.py
index 9f54799b8..9a00549d7 100644
--- a/tests/test_io.py
+++ b/tests/test_io.py
@@ -1,12 +1,14 @@
-import uuid
+"""Tests for I/O functionality.
+
+Tests for deprecated Results.to_file() and Results.from_file() API
+have been moved to tests/deprecated/test_results_io.py.
+"""
import pytest
import flixopt as fx
-from flixopt.io import ResultsPaths
from .conftest import (
- assert_almost_equal_numeric,
flow_system_base,
flow_system_long,
flow_system_segments_of_flows_2,
@@ -32,40 +34,6 @@ def flow_system(request):
return fs[0]
-@pytest.mark.slow
-def test_flow_system_file_io(flow_system, highs_solver, request):
- # Use UUID to ensure unique names across parallel test workers
- unique_id = uuid.uuid4().hex[:12]
- worker_id = getattr(request.config, 'workerinput', {}).get('workerid', 'main')
- test_id = f'{worker_id}-{unique_id}'
-
- calculation_0 = fx.Optimization(f'IO-{test_id}', flow_system=flow_system)
- calculation_0.do_modeling()
- calculation_0.solve(highs_solver)
- calculation_0.flow_system.plot_network()
-
- calculation_0.results.to_file()
- paths = ResultsPaths(calculation_0.folder, calculation_0.name)
- flow_system_1 = fx.FlowSystem.from_netcdf(paths.flow_system)
-
- calculation_1 = fx.Optimization(f'Loaded_IO-{test_id}', flow_system=flow_system_1)
- calculation_1.do_modeling()
- calculation_1.solve(highs_solver)
- calculation_1.flow_system.plot_network()
-
- assert_almost_equal_numeric(
- calculation_0.results.model.objective.value,
- calculation_1.results.model.objective.value,
- 'objective of loaded flow_system doesnt match the original',
- )
-
- assert_almost_equal_numeric(
- calculation_0.results.solution['costs'].values,
- calculation_1.results.solution['costs'].values,
- 'costs doesnt match expected value',
- )
-
-
def test_flow_system_io(flow_system):
flow_system.to_json('fs.json')
diff --git a/tests/test_io_conversion.py b/tests/test_io_conversion.py
new file mode 100644
index 000000000..33bda8c91
--- /dev/null
+++ b/tests/test_io_conversion.py
@@ -0,0 +1,773 @@
+"""Tests for the IO conversion utilities for backwards compatibility."""
+
+import pathlib
+
+import pytest
+import xarray as xr
+
+from flixopt.io import (
+ PARAMETER_RENAMES,
+ VALUE_RENAMES,
+ _rename_keys_recursive,
+ convert_old_dataset,
+ convert_old_netcdf,
+ load_dataset_from_netcdf,
+ save_dataset_to_netcdf,
+)
+
+
+class TestRenameKeysRecursive:
+ """Tests for the _rename_keys_recursive function."""
+
+ def test_simple_key_rename(self):
+ """Test basic key renaming."""
+ old = {'minimum_operation': 100}
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert 'minimum_temporal' in result
+ assert 'minimum_operation' not in result
+ assert result['minimum_temporal'] == 100
+
+ def test_nested_key_rename(self):
+ """Test key renaming in nested structures."""
+ old = {
+ 'components': {
+ 'Boiler': {
+ 'on_off_parameters': {
+ 'on_hours_total_max': 50,
+ }
+ }
+ }
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert 'status_parameters' in result['components']['Boiler']
+ assert 'on_off_parameters' not in result['components']['Boiler']
+ assert result['components']['Boiler']['status_parameters']['on_hours_max'] == 50
+
+ def test_class_name_rename(self):
+ """Test that __class__ values are also renamed."""
+ old = {
+ '__class__': 'OnOffParameters',
+ 'on_hours_total_max': 100,
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['__class__'] == 'StatusParameters'
+ assert result['on_hours_max'] == 100
+
+ def test_value_rename(self):
+ """Test value renaming for specific keys."""
+ old = {'initial_charge_state': 'lastValueOfSim'}
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['initial_charge_state'] == 'equals_final'
+
+ def test_list_handling(self):
+ """Test that lists are processed correctly."""
+ old = {
+ 'flows': [
+ {'flow_hours_total_max': 100},
+ {'flow_hours_total_min': 50},
+ ]
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['flows'][0]['flow_hours_max'] == 100
+ assert result['flows'][1]['flow_hours_min'] == 50
+
+ def test_unchanged_keys_preserved(self):
+ """Test that keys not in rename map are preserved."""
+ old = {'label': 'MyComponent', 'size': 100}
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['label'] == 'MyComponent'
+ assert result['size'] == 100
+
+ def test_empty_dict(self):
+ """Test handling of empty dict."""
+ result = _rename_keys_recursive({}, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result == {}
+
+ def test_empty_list(self):
+ """Test handling of empty list."""
+ result = _rename_keys_recursive([], PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result == []
+
+ def test_scalar_values(self):
+ """Test that scalar values are returned unchanged."""
+ assert _rename_keys_recursive(42, PARAMETER_RENAMES, VALUE_RENAMES) == 42
+ assert _rename_keys_recursive('string', PARAMETER_RENAMES, VALUE_RENAMES) == 'string'
+ assert _rename_keys_recursive(None, PARAMETER_RENAMES, VALUE_RENAMES) is None
+
+
+class TestParameterRenames:
+ """Tests to verify all expected parameter renames are in the mapping."""
+
+ def test_effect_parameters(self):
+ """Test Effect parameter renames are defined."""
+ assert PARAMETER_RENAMES['minimum_operation'] == 'minimum_temporal'
+ assert PARAMETER_RENAMES['maximum_operation'] == 'maximum_temporal'
+ assert PARAMETER_RENAMES['minimum_invest'] == 'minimum_periodic'
+ assert PARAMETER_RENAMES['maximum_invest'] == 'maximum_periodic'
+ assert PARAMETER_RENAMES['minimum_investment'] == 'minimum_periodic'
+ assert PARAMETER_RENAMES['maximum_investment'] == 'maximum_periodic'
+ assert PARAMETER_RENAMES['minimum_operation_per_hour'] == 'minimum_per_hour'
+ assert PARAMETER_RENAMES['maximum_operation_per_hour'] == 'maximum_per_hour'
+
+ def test_invest_parameters(self):
+ """Test InvestParameters renames are defined."""
+ assert PARAMETER_RENAMES['fix_effects'] == 'effects_of_investment'
+ assert PARAMETER_RENAMES['specific_effects'] == 'effects_of_investment_per_size'
+ assert PARAMETER_RENAMES['divest_effects'] == 'effects_of_retirement'
+ assert PARAMETER_RENAMES['piecewise_effects'] == 'piecewise_effects_of_investment'
+
+ def test_flow_parameters(self):
+ """Test Flow/OnOffParameters renames are defined."""
+ assert PARAMETER_RENAMES['flow_hours_total_max'] == 'flow_hours_max'
+ assert PARAMETER_RENAMES['flow_hours_total_min'] == 'flow_hours_min'
+ assert PARAMETER_RENAMES['on_hours_total_max'] == 'on_hours_max'
+ assert PARAMETER_RENAMES['on_hours_total_min'] == 'on_hours_min'
+ assert PARAMETER_RENAMES['switch_on_total_max'] == 'switch_on_max'
+
+ def test_bus_parameters(self):
+ """Test Bus parameter renames are defined."""
+ assert PARAMETER_RENAMES['excess_penalty_per_flow_hour'] == 'imbalance_penalty_per_flow_hour'
+
+ def test_component_parameters(self):
+ """Test component parameter renames are defined."""
+ assert PARAMETER_RENAMES['source'] == 'outputs'
+ assert PARAMETER_RENAMES['sink'] == 'inputs'
+ assert PARAMETER_RENAMES['prevent_simultaneous_sink_and_source'] == 'prevent_simultaneous_flow_rates'
+
+ def test_linear_converter_parameters(self):
+ """Test linear converter parameter renames are defined."""
+ assert PARAMETER_RENAMES['Q_fu'] == 'fuel_flow'
+ assert PARAMETER_RENAMES['P_el'] == 'electrical_flow'
+ assert PARAMETER_RENAMES['Q_th'] == 'thermal_flow'
+ assert PARAMETER_RENAMES['Q_ab'] == 'heat_source_flow'
+ assert PARAMETER_RENAMES['eta'] == 'thermal_efficiency'
+ assert PARAMETER_RENAMES['eta_th'] == 'thermal_efficiency'
+ assert PARAMETER_RENAMES['eta_el'] == 'electrical_efficiency'
+ assert PARAMETER_RENAMES['COP'] == 'cop'
+
+ def test_class_renames(self):
+ """Test class name renames are defined."""
+ assert PARAMETER_RENAMES['OnOffParameters'] == 'StatusParameters'
+ assert PARAMETER_RENAMES['on_off_parameters'] == 'status_parameters'
+ assert PARAMETER_RENAMES['FullCalculation'] == 'Optimization'
+ assert PARAMETER_RENAMES['AggregatedCalculation'] == 'ClusteredOptimization'
+ assert PARAMETER_RENAMES['SegmentedCalculation'] == 'SegmentedOptimization'
+ assert PARAMETER_RENAMES['CalculationResults'] == 'Results'
+ assert PARAMETER_RENAMES['AggregationParameters'] == 'ClusteringParameters'
+
+ def test_time_series_data_parameters(self):
+ """Test TimeSeriesData parameter renames are defined."""
+ assert PARAMETER_RENAMES['agg_group'] == 'aggregation_group'
+ assert PARAMETER_RENAMES['agg_weight'] == 'aggregation_weight'
+
+
+class TestValueRenames:
+ """Tests for value renaming."""
+
+ def test_initial_charge_state_value(self):
+ """Test initial_charge_state value rename is defined."""
+ assert VALUE_RENAMES['initial_charge_state']['lastValueOfSim'] == 'equals_final'
+
+
+class TestConvertOldDataset:
+ """Tests for convert_old_dataset function."""
+
+ def test_converts_attrs(self):
+ """Test that dataset attrs are converted."""
+ ds = xr.Dataset(attrs={'minimum_operation': 100, 'maximum_invest': 500})
+ result = convert_old_dataset(ds)
+ assert 'minimum_temporal' in result.attrs
+ assert 'maximum_periodic' in result.attrs
+ assert 'minimum_operation' not in result.attrs
+ assert 'maximum_invest' not in result.attrs
+
+ def test_nested_attrs_conversion(self):
+ """Test conversion of nested attrs structures."""
+ ds = xr.Dataset(
+ attrs={
+ 'components': {
+ 'Boiler': {
+ '__class__': 'OnOffParameters',
+ 'on_hours_total_max': 100,
+ }
+ }
+ }
+ )
+ result = convert_old_dataset(ds)
+ assert result.attrs['components']['Boiler']['__class__'] == 'StatusParameters'
+ assert result.attrs['components']['Boiler']['on_hours_max'] == 100
+
+ def test_custom_renames(self):
+ """Test that custom renames can be provided."""
+ ds = xr.Dataset(attrs={'custom_old': 'value'})
+ result = convert_old_dataset(ds, key_renames={'custom_old': 'custom_new'}, value_renames={})
+ assert 'custom_new' in result.attrs
+ assert 'custom_old' not in result.attrs
+
+ def test_returns_same_object(self):
+ """Test that the function modifies and returns the same dataset object."""
+ ds = xr.Dataset(attrs={'minimum_operation': 100})
+ result = convert_old_dataset(ds)
+ # Note: attrs are modified in place, so the object should be the same
+ assert result is ds
+
+
+class TestConvertOldNetcdf:
+ """Tests for convert_old_netcdf function."""
+
+ def test_load_and_convert(self, tmp_path):
+ """Test loading and converting a netCDF file."""
+ # Create an old-style dataset and save it
+ old_ds = xr.Dataset(
+ {'var1': (['time'], [1, 2, 3])},
+ coords={'time': [0, 1, 2]},
+ attrs={
+ 'components': {
+ 'Boiler': {
+ '__class__': 'OnOffParameters',
+ 'on_hours_total_max': 100,
+ }
+ }
+ },
+ )
+ input_path = tmp_path / 'old_system.nc'
+ save_dataset_to_netcdf(old_ds, input_path)
+
+ # Convert
+ result = convert_old_netcdf(input_path)
+
+ # Verify conversion
+ assert result.attrs['components']['Boiler']['__class__'] == 'StatusParameters'
+ assert result.attrs['components']['Boiler']['on_hours_max'] == 100
+
+ def test_load_convert_and_save(self, tmp_path):
+ """Test loading, converting, and saving to new file."""
+ # Create an old-style dataset and save it
+ old_ds = xr.Dataset(
+ {'var1': (['time'], [1, 2, 3])},
+ coords={'time': [0, 1, 2]},
+ attrs={'minimum_operation': 100},
+ )
+ input_path = tmp_path / 'old_system.nc'
+ output_path = tmp_path / 'new_system.nc'
+ save_dataset_to_netcdf(old_ds, input_path)
+
+ # Convert and save
+ convert_old_netcdf(input_path, output_path)
+
+ # Load the new file and verify
+ loaded = load_dataset_from_netcdf(output_path)
+ assert 'minimum_temporal' in loaded.attrs
+ assert loaded.attrs['minimum_temporal'] == 100
+
+
+class TestFullConversionScenario:
+ """Integration tests for full conversion scenarios."""
+
+ def test_complex_flowsystem_structure(self):
+ """Test conversion of a complex FlowSystem-like structure."""
+ old_structure = {
+ '__class__': 'FlowSystem',
+ 'components': {
+ 'Boiler': {
+ '__class__': 'LinearConverter',
+ 'Q_fu': ':::Boiler|fuel',
+ 'eta': 0.9,
+ 'on_off_parameters': {
+ '__class__': 'OnOffParameters',
+ 'on_hours_total_max': 100,
+ 'switch_on_total_max': 10,
+ },
+ },
+ 'HeatPump': {
+ '__class__': 'HeatPumpWithSource',
+ 'COP': 3.5,
+ 'Q_ab': ':::HeatPump|ambient',
+ },
+ 'Battery': {
+ '__class__': 'Storage',
+ 'initial_charge_state': 'lastValueOfSim',
+ },
+ 'Grid': {
+ '__class__': 'Source',
+ 'source': [{'__class__': 'Flow', 'flow_hours_total_max': 1000}],
+ },
+ 'Demand': {
+ '__class__': 'Sink',
+ 'sink': [{'__class__': 'Flow', 'flow_hours_total_min': 500}],
+ },
+ },
+ 'effects': {
+ 'costs': {
+ '__class__': 'Effect',
+ 'minimum_operation': 0,
+ 'maximum_invest': 1000,
+ 'minimum_operation_per_hour': 0,
+ },
+ },
+ 'buses': {
+ 'heat_bus': {
+ '__class__': 'Bus',
+ 'excess_penalty_per_flow_hour': 1000,
+ },
+ },
+ }
+
+ result = _rename_keys_recursive(old_structure, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ # Verify component conversions
+ boiler = result['components']['Boiler']
+ assert boiler['fuel_flow'] == ':::Boiler|fuel'
+ assert boiler['thermal_efficiency'] == 0.9
+ assert boiler['status_parameters']['__class__'] == 'StatusParameters'
+ assert boiler['status_parameters']['on_hours_max'] == 100
+ assert boiler['status_parameters']['switch_on_max'] == 10
+
+ heat_pump = result['components']['HeatPump']
+ assert heat_pump['cop'] == 3.5
+ assert heat_pump['heat_source_flow'] == ':::HeatPump|ambient'
+
+ battery = result['components']['Battery']
+ assert battery['initial_charge_state'] == 'equals_final'
+
+ grid = result['components']['Grid']
+ assert 'outputs' in grid
+ assert grid['outputs'][0]['flow_hours_max'] == 1000
+
+ demand = result['components']['Demand']
+ assert 'inputs' in demand
+ assert demand['inputs'][0]['flow_hours_min'] == 500
+
+ # Verify effect conversions
+ costs = result['effects']['costs']
+ assert costs['minimum_temporal'] == 0
+ assert costs['maximum_periodic'] == 1000
+ assert costs['minimum_per_hour'] == 0
+
+ # Verify bus conversions
+ heat_bus = result['buses']['heat_bus']
+ assert heat_bus['imbalance_penalty_per_flow_hour'] == 1000
+
+ def test_invest_parameters_conversion(self):
+ """Test conversion of InvestParameters."""
+ old_structure = {
+ '__class__': 'InvestParameters',
+ 'fix_effects': {'costs': 1000},
+ 'specific_effects': {'costs': 100},
+ 'divest_effects': {'costs': 500},
+ 'piecewise_effects': {'__class__': 'PiecewiseEffects'},
+ }
+
+ result = _rename_keys_recursive(old_structure, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ assert result['effects_of_investment'] == {'costs': 1000}
+ assert result['effects_of_investment_per_size'] == {'costs': 100}
+ assert result['effects_of_retirement'] == {'costs': 500}
+ assert result['piecewise_effects_of_investment']['__class__'] == 'PiecewiseEffects'
+
+
+class TestEdgeCases:
+ """Tests for edge cases and potential issues."""
+
+ def test_effect_dict_keys_not_renamed(self):
+ """Effect dict keys are effect labels, not parameter names - should NOT be renamed."""
+ old = {
+ 'effects_per_flow_hour': {'costs': 100, 'CO2': 50},
+ 'fix_effects': {'costs': 1000}, # key should be renamed, but 'costs' value key should not
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ # 'costs' and 'CO2' are effect labels, not parameter names
+ assert result['effects_per_flow_hour'] == {'costs': 100, 'CO2': 50}
+ # 'fix_effects' key should be renamed to 'effects_of_investment'
+ assert 'effects_of_investment' in result
+ # But the nested 'costs' key should remain (it's an effect label)
+ assert result['effects_of_investment'] == {'costs': 1000}
+
+ def test_deeply_nested_structure(self):
+ """Test handling of deeply nested structures (5+ levels)."""
+ old = {
+ 'level1': {
+ 'level2': {
+ 'level3': {
+ 'level4': {
+ 'level5': {
+ 'on_hours_total_max': 100,
+ }
+ }
+ }
+ }
+ }
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['level1']['level2']['level3']['level4']['level5']['on_hours_max'] == 100
+
+ def test_mixed_old_and_new_parameters(self):
+ """Test structure with both old and new parameter names."""
+ old = {
+ 'minimum_operation': 0, # old
+ 'minimum_temporal': 10, # new (should not be double-renamed)
+ 'maximum_periodic': 1000, # new
+ 'maximum_invest': 500, # old
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ # Old should be renamed
+ assert 'minimum_temporal' in result
+ assert 'maximum_periodic' in result
+
+ # Values should be correct (old one gets overwritten if both exist)
+ # This is a potential issue - if both old and new exist, new gets overwritten
+ # In practice this shouldn't happen, but let's document the behavior
+ assert result['minimum_temporal'] == 10 # new value preserved (processed second)
+ assert result['maximum_periodic'] in [500, 1000] # either could win
+
+ def test_none_values_preserved(self):
+ """Test that None values are preserved."""
+ old = {
+ 'minimum_operation': None,
+ 'some_param': None,
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['minimum_temporal'] is None
+ assert result['some_param'] is None
+
+ def test_boolean_values_preserved(self):
+ """Test that boolean values are preserved."""
+ old = {
+ 'mandatory': True,
+ 'is_standard': False,
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['mandatory'] is True
+ assert result['is_standard'] is False
+
+ def test_numeric_edge_cases(self):
+ """Test numeric edge cases (0, negative, floats)."""
+ old = {
+ 'minimum_operation': 0,
+ 'maximum_operation': -100, # negative (unusual but possible)
+ 'eta': 0.95,
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+ assert result['minimum_temporal'] == 0
+ assert result['maximum_temporal'] == -100
+ assert result['thermal_efficiency'] == 0.95
+
+ def test_dataarray_reference_strings_preserved(self):
+ """Test that DataArray reference strings are preserved as-is.
+
+ Note: We don't rename inside reference strings like ':::Boiler|Q_fu'
+ because those reference the actual DataArray variable names, which
+ would need separate handling if they also need renaming.
+ """
+ old = {
+ 'Q_fu': ':::Boiler|Q_fu', # key renamed, but ref string preserved
+ 'eta': ':::Boiler|eta',
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ # Keys should be renamed
+ assert 'fuel_flow' in result
+ assert 'thermal_efficiency' in result
+
+ # Reference strings should be preserved (they point to DataArray names)
+ assert result['fuel_flow'] == ':::Boiler|Q_fu'
+ assert result['thermal_efficiency'] == ':::Boiler|eta'
+
+ def test_list_of_dicts(self):
+ """Test conversion of lists containing dictionaries."""
+ old = {
+ 'flows': [
+ {
+ '__class__': 'Flow',
+ 'on_off_parameters': {'__class__': 'OnOffParameters'},
+ 'flow_hours_total_max': 100,
+ },
+ {
+ '__class__': 'Flow',
+ 'flow_hours_total_min': 50,
+ },
+ ]
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ assert len(result['flows']) == 2
+ assert result['flows'][0]['status_parameters']['__class__'] == 'StatusParameters'
+ assert result['flows'][0]['flow_hours_max'] == 100
+ assert result['flows'][1]['flow_hours_min'] == 50
+
+ def test_special_characters_in_labels(self):
+ """Test that special characters in component labels are preserved."""
+ old = {
+ 'components': {
+ 'CHP_Unit-1': {
+ '__class__': 'CHP',
+ 'eta_th': 0.4,
+ },
+ 'Heat Pump (Main)': {
+ '__class__': 'HeatPump',
+ 'COP': 3.5,
+ },
+ }
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ # Labels should be preserved exactly
+ assert 'CHP_Unit-1' in result['components']
+ assert 'Heat Pump (Main)' in result['components']
+
+ # Parameters should still be renamed
+ assert result['components']['CHP_Unit-1']['thermal_efficiency'] == 0.4
+ assert result['components']['Heat Pump (Main)']['cop'] == 3.5
+
+ def test_value_rename_only_for_specific_keys(self):
+ """Test that value renames only apply to specific keys."""
+ old = {
+ 'initial_charge_state': 'lastValueOfSim', # should be renamed
+ 'other_param': 'lastValueOfSim', # should NOT be renamed (different key)
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ assert result['initial_charge_state'] == 'equals_final'
+ assert result['other_param'] == 'lastValueOfSim' # unchanged
+
+ def test_value_rename_with_non_string_value(self):
+ """Test that value renames don't break with non-string values."""
+ old = {
+ 'initial_charge_state': 0.5, # numeric, not string
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ # Should be preserved as-is (value rename only applies to strings)
+ assert result['initial_charge_state'] == 0.5
+
+
+class TestRealWorldScenarios:
+ """Tests with real-world-like data structures."""
+
+ def test_source_with_investment(self):
+ """Test Source component with investment parameters."""
+ old = {
+ '__class__': 'Source',
+ 'label': 'GasGrid',
+ 'source': [
+ {
+ '__class__': 'Flow',
+ 'label': 'gas',
+ 'bus': 'gas_bus',
+ 'flow_hours_total_max': 10000,
+ 'invest_parameters': {
+ '__class__': 'InvestParameters',
+ 'fix_effects': {'costs': 5000},
+ 'specific_effects': {'costs': 100},
+ },
+ }
+ ],
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ assert 'outputs' in result
+ assert result['outputs'][0]['flow_hours_max'] == 10000
+ assert result['outputs'][0]['invest_parameters']['effects_of_investment'] == {'costs': 5000}
+ assert result['outputs'][0]['invest_parameters']['effects_of_investment_per_size'] == {'costs': 100}
+
+ def test_storage_with_all_old_parameters(self):
+ """Test Storage component with various old parameters."""
+ old = {
+ '__class__': 'Storage',
+ 'label': 'Battery',
+ 'initial_charge_state': 'lastValueOfSim',
+ 'charging': {
+ '__class__': 'Flow',
+ 'on_off_parameters': {
+ '__class__': 'OnOffParameters',
+ 'on_hours_total_max': 100,
+ 'on_hours_total_min': 10,
+ 'switch_on_total_max': 50,
+ },
+ },
+ 'discharging': {
+ '__class__': 'Flow',
+ 'flow_hours_total_max': 500,
+ },
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ assert result['initial_charge_state'] == 'equals_final'
+ assert result['charging']['status_parameters']['on_hours_max'] == 100
+ assert result['charging']['status_parameters']['on_hours_min'] == 10
+ assert result['charging']['status_parameters']['switch_on_max'] == 50
+ assert result['discharging']['flow_hours_max'] == 500
+
+ def test_effect_with_all_old_parameters(self):
+ """Test Effect with all old parameter names."""
+ old = {
+ '__class__': 'Effect',
+ 'label': 'costs',
+ 'unit': '€',
+ 'minimum_operation': 0,
+ 'maximum_operation': 1000000,
+ 'minimum_invest': 0,
+ 'maximum_invest': 500000,
+ 'minimum_operation_per_hour': 0,
+ 'maximum_operation_per_hour': 10000,
+ }
+ result = _rename_keys_recursive(old, PARAMETER_RENAMES, VALUE_RENAMES)
+
+ assert result['minimum_temporal'] == 0
+ assert result['maximum_temporal'] == 1000000
+ assert result['minimum_periodic'] == 0
+ assert result['maximum_periodic'] == 500000
+ assert result['minimum_per_hour'] == 0
+ assert result['maximum_per_hour'] == 10000
+
+ # Labels should be preserved
+ assert result['label'] == 'costs'
+ assert result['unit'] == '€'
+
+
+class TestFlowSystemFromOldResults:
+ """Tests for FlowSystem.from_old_results() method."""
+
+ def test_load_old_results_from_resources(self):
+ """Test loading old results files from test resources."""
+ import pathlib
+
+ import flixopt as fx
+
+ resources_path = pathlib.Path(__file__).parent / 'ressources'
+
+ # Load old results using new method
+ fs = fx.FlowSystem.from_old_results(resources_path, 'Sim1')
+
+ # Verify FlowSystem was loaded
+ assert fs is not None
+ assert fs.name == 'Sim1'
+
+ # Verify solution was attached
+ assert fs.solution is not None
+ assert len(fs.solution.data_vars) > 0
+
+ def test_old_results_can_be_saved_new_format(self, tmp_path):
+ """Test that old results can be saved in new single-file format."""
+ import pathlib
+
+ import flixopt as fx
+
+ resources_path = pathlib.Path(__file__).parent / 'ressources'
+
+ # Load old results
+ fs = fx.FlowSystem.from_old_results(resources_path, 'Sim1')
+
+ # Save in new format
+ new_path = tmp_path / 'migrated.nc'
+ fs.to_netcdf(new_path)
+
+ # Verify the new file exists and can be loaded
+ assert new_path.exists()
+ loaded = fx.FlowSystem.from_netcdf(new_path)
+ assert loaded is not None
+ assert loaded.solution is not None
+
+
+class TestV4APIConversion:
+ """Tests for converting v4 API result files to the new format."""
+
+ V4_API_PATH = pathlib.Path(__file__).parent / 'ressources' / 'v4-api'
+
+ # All result names in the v4-api folder
+ V4_RESULT_NAMES = [
+ '00_minimal',
+ '01_simple',
+ '02_complex',
+ '04_scenarios',
+ 'io_flow_system_base',
+ 'io_flow_system_long',
+ 'io_flow_system_segments',
+ 'io_simple_flow_system',
+ 'io_simple_flow_system_scenarios',
+ ]
+
+ @pytest.mark.parametrize('result_name', V4_RESULT_NAMES)
+ def test_v4_results_can_be_loaded(self, result_name):
+ """Test that v4 API results can be loaded."""
+ import flixopt as fx
+
+ fs = fx.FlowSystem.from_old_results(self.V4_API_PATH, result_name)
+
+ # Verify FlowSystem was loaded
+ assert fs is not None
+ assert fs.name == result_name
+
+ # Verify solution was attached
+ assert fs.solution is not None
+ assert len(fs.solution.data_vars) > 0
+
+ # Verify we have components
+ assert len(fs.components) > 0
+
+ @pytest.mark.parametrize('result_name', V4_RESULT_NAMES)
+ def test_v4_results_can_be_saved_and_reloaded(self, result_name, tmp_path):
+ """Test that v4 API results can be saved in new format and reloaded."""
+ import flixopt as fx
+
+ # Load old results
+ fs = fx.FlowSystem.from_old_results(self.V4_API_PATH, result_name)
+
+ # Save in new format
+ new_path = tmp_path / f'{result_name}_migrated.nc'
+ fs.to_netcdf(new_path)
+
+ # Reload and verify
+ loaded = fx.FlowSystem.from_netcdf(new_path)
+ assert loaded is not None
+ assert loaded.solution is not None
+ assert len(loaded.solution.data_vars) == len(fs.solution.data_vars)
+ assert len(loaded.components) == len(fs.components)
+
+ @pytest.mark.parametrize('result_name', V4_RESULT_NAMES)
+ def test_v4_solution_variables_accessible(self, result_name):
+ """Test that solution variables from v4 results are accessible."""
+ import flixopt as fx
+
+ fs = fx.FlowSystem.from_old_results(self.V4_API_PATH, result_name)
+
+ # Check that we can access solution variables
+ for var_name in list(fs.solution.data_vars)[:5]: # Check first 5 variables
+ var = fs.solution[var_name]
+ assert var is not None
+ # Variables should have data
+ assert var.size > 0
+
+ @pytest.mark.parametrize('result_name', V4_RESULT_NAMES)
+ def test_v4_reoptimized_objective_matches_original(self, result_name):
+ """Test that re-solving the migrated FlowSystem gives the same objective effect."""
+ import flixopt as fx
+
+ # Load old results
+ fs = fx.FlowSystem.from_old_results(self.V4_API_PATH, result_name)
+
+ # Get the objective effect label
+ objective_effect_label = fs.effects.objective_effect.label
+
+ # Get the original effect total from the old solution (sum for multi-scenario)
+ old_effect_total = float(fs.solution[objective_effect_label].values.sum())
+ old_objective = float(fs.solution['objective'].values.sum())
+
+ # Re-solve the FlowSystem
+ fs.optimize(fx.solvers.HighsSolver(mip_gap=0))
+
+ # Get new objective effect total (sum for multi-scenario)
+ new_objective = float(fs.solution['objective'].item())
+ new_effect_total = float(fs.solution[objective_effect_label].sum().item())
+
+ # Verify objective matches (within tolerance)
+ assert new_objective == pytest.approx(old_objective, rel=1e-5, abs=1), (
+ f'Objective mismatch for {result_name}: new={new_objective}, old={old_objective}'
+ )
+
+ assert new_effect_total == pytest.approx(old_effect_total, rel=1e-5, abs=1), (
+ f'Effect {objective_effect_label} mismatch for {result_name}: '
+ f'new={new_effect_total}, old={old_effect_total}'
+ )
diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py
index a5eb3d6a2..b4a1cd161 100644
--- a/tests/test_scenarios.py
+++ b/tests/test_scenarios.py
@@ -1,5 +1,3 @@
-import tempfile
-
import numpy as np
import pandas as pd
import pytest
@@ -11,7 +9,7 @@
from flixopt.elements import Bus, Flow
from flixopt.flow_system import FlowSystem
-from .conftest import create_linopy_model, create_optimization_and_solve
+from .conftest import create_linopy_model
@pytest.fixture
@@ -214,10 +212,10 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) ->
flow_system.add_elements(
fx.LinearConverter(
'KWK',
- inputs=[fx.Flow('Q_fu', bus='Gas')],
+ inputs=[fx.Flow('Q_fu', bus='Gas', size=200)],
outputs=[
fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Q_th', bus='Fernwärme'),
+ fx.Flow('Q_th', bus='Fernwärme', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -296,47 +294,39 @@ def test_full_scenario_optimization(flow_system_piecewise_conversion_scenarios):
scenarios = flow_system_piecewise_conversion_scenarios.scenarios
weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios)))
flow_system_piecewise_conversion_scenarios.scenario_weights = weights
- calc = create_optimization_and_solve(
- flow_system_piecewise_conversion_scenarios,
- solver=fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60),
- name='test_full_scenario',
- )
- calc.results.to_file()
- res = fx.results.Results.from_file('results', 'test_full_scenario')
- fx.FlowSystem.from_dataset(res.flow_system_data)
- _ = create_optimization_and_solve(
- flow_system_piecewise_conversion_scenarios,
- solver=fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60),
- name='test_full_scenario_2',
- )
+ # Optimize using new API
+ flow_system_piecewise_conversion_scenarios.optimize(fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60))
+
+ # Verify solution exists and has scenario dimension
+ assert flow_system_piecewise_conversion_scenarios.solution is not None
+ assert 'scenario' in flow_system_piecewise_conversion_scenarios.solution.dims
@pytest.mark.skip(reason='This test is taking too long with highs and is too big for gurobipy free')
-def test_io_persistence(flow_system_piecewise_conversion_scenarios):
+def test_io_persistence(flow_system_piecewise_conversion_scenarios, tmp_path):
"""Test a full optimization with scenarios and verify results."""
scenarios = flow_system_piecewise_conversion_scenarios.scenarios
weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios)))
flow_system_piecewise_conversion_scenarios.scenario_weights = weights
- calc = create_optimization_and_solve(
- flow_system_piecewise_conversion_scenarios,
- solver=fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60),
- name='test_io_persistence',
- )
- calc.results.to_file()
- res = fx.results.Results.from_file('results', 'test_io_persistence')
- flow_system_2 = fx.FlowSystem.from_dataset(res.flow_system_data)
- calc_2 = create_optimization_and_solve(
- flow_system_2,
- solver=fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60),
- name='test_io_persistence_2',
- )
+ # Optimize using new API
+ flow_system_piecewise_conversion_scenarios.optimize(fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60))
+ original_objective = flow_system_piecewise_conversion_scenarios.solution['objective'].item()
+
+ # Save and restore
+ filepath = tmp_path / 'flow_system_scenarios.nc4'
+ flow_system_piecewise_conversion_scenarios.to_netcdf(filepath)
+ flow_system_2 = fx.FlowSystem.from_netcdf(filepath)
- np.testing.assert_allclose(calc.results.objective, calc_2.results.objective, rtol=0.001)
+ # Re-optimize restored flow system
+ flow_system_2.optimize(fx.solvers.HighsSolver(mip_gap=0.001, time_limit_seconds=60))
+
+ np.testing.assert_allclose(original_objective, flow_system_2.solution['objective'].item(), rtol=0.001)
def test_scenarios_selection(flow_system_piecewise_conversion_scenarios):
+ """Test scenario selection/subsetting functionality."""
flow_system_full = flow_system_piecewise_conversion_scenarios
scenarios = flow_system_full.scenarios
scenario_weights = np.linspace(0.5, 1, len(scenarios)) / np.sum(np.linspace(0.5, 1, len(scenarios)))
@@ -347,22 +337,22 @@ def test_scenarios_selection(flow_system_piecewise_conversion_scenarios):
np.testing.assert_allclose(flow_system.scenario_weights.values, flow_system_full.scenario_weights[0:2])
- calc = fx.Optimization(flow_system=flow_system, name='test_scenarios_selection', normalize_weights=False)
- calc.do_modeling()
- calc.solve(fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60))
-
- calc.results.to_file()
+ # Optimize using new API with normalize_weights=False
+ flow_system.optimize(
+ fx.solvers.GurobiSolver(mip_gap=0.01, time_limit_seconds=60),
+ normalize_weights=False,
+ )
# Penalty has same structure as other effects: 'Penalty' is the total, 'Penalty(temporal)' and 'Penalty(periodic)' are components
np.testing.assert_allclose(
- calc.results.objective,
+ flow_system.solution['objective'].item(),
(
- (calc.results.solution['costs'] * flow_system.scenario_weights).sum()
- + (calc.results.solution['Penalty'] * flow_system.scenario_weights).sum()
+ (flow_system.solution['costs'] * flow_system.scenario_weights).sum()
+ + (flow_system.solution['Penalty'] * flow_system.scenario_weights).sum()
).item(),
) ## Account for rounding errors
- assert calc.results.solution.indexes['scenario'].equals(flow_system_full.scenarios[0:2])
+ assert flow_system.solution.indexes['scenario'].equals(flow_system_full.scenarios[0:2])
def test_sizes_per_scenario_default():
@@ -496,11 +486,10 @@ def test_size_equality_constraints():
fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
- calc = fx.Optimization('test', fs)
- calc.do_modeling()
+ fs.build_model()
# Check that size equality constraint exists
- constraint_names = [str(c) for c in calc.model.constraints]
+ constraint_names = [str(c) for c in fs.model.constraints]
size_constraints = [c for c in constraint_names if 'scenario_independent' in c and 'size' in c]
assert len(size_constraints) > 0, 'Size equality constraint should exist'
@@ -536,11 +525,10 @@ def test_flow_rate_equality_constraints():
fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
- calc = fx.Optimization('test', fs)
- calc.do_modeling()
+ fs.build_model()
# Check that flow_rate equality constraint exists
- constraint_names = [str(c) for c in calc.model.constraints]
+ constraint_names = [str(c) for c in fs.model.constraints]
flow_rate_constraints = [c for c in constraint_names if 'scenario_independent' in c and 'flow_rate' in c]
assert len(flow_rate_constraints) > 0, 'Flow rate equality constraint should exist'
@@ -578,10 +566,9 @@ def test_selective_scenario_independence():
fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
- calc = fx.Optimization('test', fs)
- calc.do_modeling()
+ fs.build_model()
- constraint_names = [str(c) for c in calc.model.constraints]
+ constraint_names = [str(c) for c in fs.model.constraints]
# Solar SHOULD have size constraints (it's in the list, so equalized)
solar_size_constraints = [c for c in constraint_names if 'solar(out)|size' in c and 'scenario_independent' in c]
@@ -646,10 +633,8 @@ def test_scenario_parameters_io_persistence():
assert fs_loaded.scenario_independent_flow_rates == fs_original.scenario_independent_flow_rates
-def test_scenario_parameters_io_with_calculation():
+def test_scenario_parameters_io_with_calculation(tmp_path):
"""Test that scenario parameters persist through full calculation IO."""
- import shutil
-
timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
scenarios = pd.Index(['base', 'high'], name='scenario')
@@ -680,39 +665,29 @@ def test_scenario_parameters_io_with_calculation():
fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
- # Create temp directory for results
- temp_dir = tempfile.mkdtemp()
+ # Solve using new API
+ fs.optimize(fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=60))
+ original_model = fs.model
- try:
- # Solve and save
- calc = fx.Optimization('test_io', fs, folder=temp_dir)
- calc.do_modeling()
- calc.solve(fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=60))
- calc.results.to_file()
+ # Save and restore
+ filepath = tmp_path / 'flow_system_scenarios.nc4'
+ fs.to_netcdf(filepath)
+ fs_loaded = fx.FlowSystem.from_netcdf(filepath)
- # Load results
- results = fx.results.Results.from_file(temp_dir, 'test_io')
- fs_loaded = fx.FlowSystem.from_dataset(results.flow_system_data)
-
- # Verify parameters persisted
- assert fs_loaded.scenario_independent_sizes == fs.scenario_independent_sizes
- assert fs_loaded.scenario_independent_flow_rates == fs.scenario_independent_flow_rates
-
- # Verify constraints are recreated correctly
- calc2 = fx.Optimization('test_io_2', fs_loaded, folder=temp_dir)
- calc2.do_modeling()
+ # Verify parameters persisted
+ assert fs_loaded.scenario_independent_sizes == fs.scenario_independent_sizes
+ assert fs_loaded.scenario_independent_flow_rates == fs.scenario_independent_flow_rates
- constraint_names1 = [str(c) for c in calc.model.constraints]
- constraint_names2 = [str(c) for c in calc2.model.constraints]
+ # Verify constraints are recreated correctly when building model
+ fs_loaded.build_model()
- size_constraints1 = [c for c in constraint_names1 if 'scenario_independent' in c and 'size' in c]
- size_constraints2 = [c for c in constraint_names2 if 'scenario_independent' in c and 'size' in c]
+ constraint_names1 = [str(c) for c in original_model.constraints]
+ constraint_names2 = [str(c) for c in fs_loaded.model.constraints]
- assert len(size_constraints1) == len(size_constraints2)
+ size_constraints1 = [c for c in constraint_names1 if 'scenario_independent' in c and 'size' in c]
+ size_constraints2 = [c for c in constraint_names2 if 'scenario_independent' in c and 'size' in c]
- finally:
- # Clean up
- shutil.rmtree(temp_dir)
+ assert len(size_constraints1) == len(size_constraints2)
def test_weights_io_persistence():
diff --git a/tests/test_solution_and_plotting.py b/tests/test_solution_and_plotting.py
new file mode 100644
index 000000000..c9c64e65c
--- /dev/null
+++ b/tests/test_solution_and_plotting.py
@@ -0,0 +1,781 @@
+"""Tests for the new solution access API and plotting functionality.
+
+This module tests:
+- flow_system.solution access (xarray Dataset)
+- element.solution access (filtered view)
+- plotting module functions with realistic optimization data
+- heatmap time reshaping
+- network visualization
+"""
+
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+import flixopt as fx
+from flixopt import plotting
+
+# ============================================================================
+# SOLUTION ACCESS TESTS
+# ============================================================================
+
+
+class TestFlowSystemSolution:
+ """Tests for flow_system.solution API."""
+
+ def test_solution_is_xarray_dataset(self, simple_flow_system, highs_solver):
+ """Verify solution is an xarray Dataset."""
+ simple_flow_system.optimize(highs_solver)
+ assert isinstance(simple_flow_system.solution, xr.Dataset)
+
+ def test_solution_has_time_dimension(self, simple_flow_system, highs_solver):
+ """Verify solution has time dimension."""
+ simple_flow_system.optimize(highs_solver)
+ assert 'time' in simple_flow_system.solution.dims
+
+ def test_solution_contains_effect_totals(self, simple_flow_system, highs_solver):
+ """Verify solution contains effect totals (costs, CO2)."""
+ simple_flow_system.optimize(highs_solver)
+ solution = simple_flow_system.solution
+
+ # Check that effects are present
+ assert 'costs' in solution
+ assert 'CO2' in solution
+
+ # Verify they are scalar values
+ assert solution['costs'].dims == ()
+ assert solution['CO2'].dims == ()
+
+ def test_solution_contains_temporal_effects(self, simple_flow_system, highs_solver):
+ """Verify solution contains temporal effect components."""
+ simple_flow_system.optimize(highs_solver)
+ solution = simple_flow_system.solution
+
+ # Check temporal components
+ assert 'costs(temporal)' in solution
+ assert 'costs(temporal)|per_timestep' in solution
+
+ def test_solution_contains_flow_rates(self, simple_flow_system, highs_solver):
+ """Verify solution contains flow rate variables."""
+ simple_flow_system.optimize(highs_solver)
+ solution = simple_flow_system.solution
+
+ # Check flow rates for known components
+ flow_rate_vars = [v for v in solution.data_vars if '|flow_rate' in v]
+ assert len(flow_rate_vars) > 0
+
+ # Verify flow rates have time dimension
+ for var in flow_rate_vars:
+ assert 'time' in solution[var].dims
+
+ def test_solution_contains_storage_variables(self, simple_flow_system, highs_solver):
+ """Verify solution contains storage-specific variables."""
+ simple_flow_system.optimize(highs_solver)
+ solution = simple_flow_system.solution
+
+ # Check storage charge state (includes extra timestep for final state)
+ assert 'Speicher|charge_state' in solution
+
+ def test_solution_item_returns_scalar(self, simple_flow_system, highs_solver):
+ """Verify .item() returns Python scalar for 0-d arrays."""
+ simple_flow_system.optimize(highs_solver)
+
+ costs = simple_flow_system.solution['costs'].item()
+ assert isinstance(costs, (int, float))
+
+ def test_solution_values_returns_numpy_array(self, simple_flow_system, highs_solver):
+ """Verify .values returns numpy array for multi-dimensional data."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Find a flow rate variable
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ flow_rate = simple_flow_system.solution[flow_vars[0]].values
+ assert isinstance(flow_rate, np.ndarray)
+
+ def test_solution_sum_over_time(self, simple_flow_system, highs_solver):
+ """Verify xarray operations work on solution data."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Sum flow rate over time
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ total_flow = simple_flow_system.solution[flow_vars[0]].sum(dim='time')
+ assert total_flow.dims == ()
+
+ def test_solution_to_dataframe(self, simple_flow_system, highs_solver):
+ """Verify solution can be converted to DataFrame."""
+ simple_flow_system.optimize(highs_solver)
+
+ df = simple_flow_system.solution.to_dataframe()
+ assert isinstance(df, pd.DataFrame)
+
+ def test_solution_none_before_optimization(self, simple_flow_system):
+ """Verify solution is None before optimization."""
+ assert simple_flow_system.solution is None
+
+
+class TestElementSolution:
+ """Tests for element.solution API (filtered view of flow_system.solution)."""
+
+ def test_element_solution_is_filtered_dataset(self, simple_flow_system, highs_solver):
+ """Verify element.solution returns filtered Dataset."""
+ simple_flow_system.optimize(highs_solver)
+
+ boiler = simple_flow_system.components['Boiler']
+ element_solution = boiler.solution
+
+ assert isinstance(element_solution, xr.Dataset)
+
+ def test_element_solution_contains_only_element_variables(self, simple_flow_system, highs_solver):
+ """Verify element.solution only contains variables for that element."""
+ simple_flow_system.optimize(highs_solver)
+
+ boiler = simple_flow_system.components['Boiler']
+ element_solution = boiler.solution
+
+ # All variables should start with 'Boiler'
+ for var in element_solution.data_vars:
+ assert 'Boiler' in var, f"Variable {var} should contain 'Boiler'"
+
+ def test_storage_element_solution(self, simple_flow_system, highs_solver):
+ """Verify storage element solution contains charge state."""
+ simple_flow_system.optimize(highs_solver)
+
+ storage = simple_flow_system.components['Speicher']
+ element_solution = storage.solution
+
+ # Should contain charge state variables
+ charge_vars = [v for v in element_solution.data_vars if 'charge_state' in v]
+ assert len(charge_vars) > 0
+
+ def test_element_solution_raises_for_unlinked_element(self):
+ """Verify accessing solution for unlinked element raises error."""
+ boiler = fx.linear_converters.Boiler(
+ 'TestBoiler',
+ thermal_efficiency=0.9,
+ thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ )
+ with pytest.raises(ValueError, match='not linked to a FlowSystem'):
+ _ = boiler.solution
+
+
+# ============================================================================
+# STATISTICS ACCESSOR TESTS
+# ============================================================================
+
+
+class TestStatisticsAccessor:
+ """Tests for flow_system.statistics accessor."""
+
+ def test_statistics_sizes_includes_all_flows(self, simple_flow_system, highs_solver):
+ """Test that statistics.sizes includes all flow and storage sizes (from InvestParameters)."""
+ simple_flow_system.optimize(highs_solver)
+
+ sizes = simple_flow_system.statistics.sizes
+
+ assert isinstance(sizes, xr.Dataset)
+ # Should have sizes for flows with InvestParameters
+ assert len(sizes.data_vars) > 0
+
+ # Check that all size labels are valid flow or storage labels
+ flow_labels = [f.label_full for f in simple_flow_system.flows.values()]
+ storage_labels = [s.label_full for s in simple_flow_system.storages.values()]
+ valid_labels = flow_labels + storage_labels
+ for label in sizes.data_vars:
+ assert label in valid_labels, f'Size label {label} should be a valid flow or storage'
+
+ def test_statistics_sizes_returns_correct_values(self, simple_flow_system, highs_solver):
+ """Test that statistics.sizes returns correct size values."""
+ simple_flow_system.optimize(highs_solver)
+
+ sizes = simple_flow_system.statistics.sizes
+
+ # Check that all values are positive (sizes should be > 0)
+ for label in sizes.data_vars:
+ value = float(sizes[label].values) if sizes[label].dims == () else float(sizes[label].max().values)
+ assert value > 0, f'Size for {label} should be positive'
+
+ def test_statistics_flow_rates(self, simple_flow_system, highs_solver):
+ """Test that statistics.flow_rates returns flow rate data."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_rates = simple_flow_system.statistics.flow_rates
+
+ assert isinstance(flow_rates, xr.Dataset)
+ assert len(flow_rates.data_vars) > 0
+ # Flow rates should have time dimension
+ assert 'time' in flow_rates.dims
+
+ def test_statistics_flow_hours(self, simple_flow_system, highs_solver):
+ """Test that statistics.flow_hours returns energy data."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_hours = simple_flow_system.statistics.flow_hours
+
+ assert isinstance(flow_hours, xr.Dataset)
+ assert len(flow_hours.data_vars) > 0
+
+
+# ============================================================================
+# PLOTTING WITH OPTIMIZED DATA TESTS
+# ============================================================================
+
+
+class TestPlottingWithOptimizedData:
+ """Tests for plotting functions using actual optimization results."""
+
+ def test_plot_flow_rates_with_plotly(self, simple_flow_system, highs_solver):
+ """Test plotting flow rates with Plotly."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Extract flow rate data
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ flow_data = simple_flow_system.solution[flow_vars[:3]] # Take first 3
+
+ fig = plotting.with_plotly(flow_data, mode='stacked_bar')
+ assert fig is not None
+ assert len(fig.data) > 0
+
+ def test_plot_flow_rates_with_matplotlib(self, simple_flow_system, highs_solver):
+ """Test plotting flow rates with Matplotlib."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Extract flow rate data
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ flow_data = simple_flow_system.solution[flow_vars[:3]]
+
+ fig, ax = plotting.with_matplotlib(flow_data, mode='stacked_bar')
+ assert fig is not None
+ assert ax is not None
+ plt.close(fig)
+
+ def test_plot_line_mode(self, simple_flow_system, highs_solver):
+ """Test line plotting mode."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ flow_data = simple_flow_system.solution[flow_vars[:3]]
+
+ fig = plotting.with_plotly(flow_data, mode='line')
+ assert fig is not None
+
+ fig2, ax2 = plotting.with_matplotlib(flow_data, mode='line')
+ assert fig2 is not None
+ plt.close(fig2)
+
+ def test_plot_area_mode(self, simple_flow_system, highs_solver):
+ """Test area plotting mode (Plotly only)."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ flow_data = simple_flow_system.solution[flow_vars[:3]]
+
+ fig = plotting.with_plotly(flow_data, mode='area')
+ assert fig is not None
+
+ def test_plot_with_custom_colors(self, simple_flow_system, highs_solver):
+ """Test plotting with custom colors."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v][:2]
+ flow_data = simple_flow_system.solution[flow_vars]
+
+ # Test with color list
+ fig1 = plotting.with_plotly(flow_data, mode='line', colors=['red', 'blue'])
+ assert fig1 is not None
+
+ # Test with color dict
+ color_dict = {flow_vars[0]: '#ff0000', flow_vars[1]: '#0000ff'}
+ fig2 = plotting.with_plotly(flow_data, mode='line', colors=color_dict)
+ assert fig2 is not None
+
+ # Test with colorscale name
+ fig3 = plotting.with_plotly(flow_data, mode='line', colors='turbo')
+ assert fig3 is not None
+
+ def test_plot_with_title_and_labels(self, simple_flow_system, highs_solver):
+ """Test plotting with custom title and axis labels."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v]
+ flow_data = simple_flow_system.solution[flow_vars[:2]]
+
+ fig = plotting.with_plotly(flow_data, mode='line', title='Energy Flows', xlabel='Time (h)', ylabel='Power (kW)')
+ assert fig.layout.title.text == 'Energy Flows'
+
+ def test_plot_scalar_effects(self, simple_flow_system, highs_solver):
+ """Test plotting scalar effect values."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Create dataset with scalar values
+ effects_data = xr.Dataset(
+ {
+ 'costs': simple_flow_system.solution['costs'],
+ 'CO2': simple_flow_system.solution['CO2'],
+ }
+ )
+
+ # This should handle scalar data gracefully
+ fig, ax = plotting.with_matplotlib(effects_data, mode='stacked_bar')
+ assert fig is not None
+ # Verify plot has visual content
+ assert len(ax.patches) > 0 or len(ax.lines) > 0 or len(ax.containers) > 0, 'Plot should contain visual elements'
+ plt.close(fig)
+
+
+class TestDualPiePlots:
+ """Tests for dual pie chart functionality."""
+
+ def test_dual_pie_with_effects(self, simple_flow_system, highs_solver):
+ """Test dual pie chart with effect contributions."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Get temporal costs per timestep (summed to scalar for pie)
+ temporal_vars = [v for v in simple_flow_system.solution.data_vars if '->costs(temporal)' in v]
+
+ if len(temporal_vars) >= 2:
+ # Sum over time to get total contributions
+ left_data = xr.Dataset({v: simple_flow_system.solution[v].sum() for v in temporal_vars[:2]})
+ right_data = xr.Dataset({v: simple_flow_system.solution[v].sum() for v in temporal_vars[:2]})
+
+ fig = plotting.dual_pie_with_plotly(left_data, right_data)
+ assert fig is not None
+
+ def test_dual_pie_with_matplotlib(self, simple_flow_system, highs_solver):
+ """Test dual pie chart with matplotlib backend."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Simple scalar data
+ left_data = xr.Dataset({'A': xr.DataArray(30), 'B': xr.DataArray(70)})
+ right_data = xr.Dataset({'A': xr.DataArray(50), 'B': xr.DataArray(50)})
+
+ fig, axes = plotting.dual_pie_with_matplotlib(left_data, right_data)
+ assert fig is not None
+ assert len(axes) == 2
+ plt.close(fig)
+
+
+# ============================================================================
+# HEATMAP TESTS
+# ============================================================================
+
+
+class TestHeatmapReshaping:
+ """Tests for heatmap time reshaping functionality."""
+
+ @pytest.fixture
+ def long_time_data(self):
+ """Create data with longer time series for heatmap testing."""
+ time = pd.date_range('2020-01-01', periods=72, freq='h') # 3 days
+ rng = np.random.default_rng(42)
+ data = xr.DataArray(rng.random(72) * 100, coords={'time': time}, dims=['time'], name='power')
+ return data
+
+ def test_reshape_auto_mode(self, long_time_data):
+ """Test automatic time reshaping."""
+ reshaped = plotting.reshape_data_for_heatmap(long_time_data, reshape_time='auto')
+
+ # Auto mode should attempt reshaping; verify it either reshaped or returned original
+ if 'timestep' in reshaped.dims or 'timeframe' in reshaped.dims:
+ # Reshaping occurred - verify 2D structure
+ assert len(reshaped.dims) == 2, 'Reshaped data should have 2 dimensions'
+ else:
+ # Reshaping not possible for this data - verify original structure preserved
+ assert reshaped.dims == long_time_data.dims, (
+ 'Original structure should be preserved if reshaping not applied'
+ )
+
+ def test_reshape_explicit_daily_hourly(self, long_time_data):
+ """Test explicit daily-hourly reshaping."""
+ reshaped = plotting.reshape_data_for_heatmap(long_time_data, reshape_time=('D', 'h'))
+
+ # Should have timeframe (days) and timestep (hours) dimensions
+ if 'timestep' in reshaped.dims:
+ assert 'timeframe' in reshaped.dims
+ # With 72 hours (3 days), we should have 3 timeframes and up to 24 timesteps
+ assert reshaped.sizes['timeframe'] == 3
+
+ def test_reshape_none_preserves_data(self, long_time_data):
+ """Test that reshape_time=None preserves original structure."""
+ reshaped = plotting.reshape_data_for_heatmap(long_time_data, reshape_time=None)
+ assert 'time' in reshaped.dims
+ xr.testing.assert_equal(reshaped, long_time_data)
+
+ def test_heatmap_with_plotly_v2(self, long_time_data):
+ """Test heatmap plotting with Plotly."""
+ # Reshape data first (heatmap_with_plotly_v2 requires pre-reshaped data)
+ reshaped = plotting.reshape_data_for_heatmap(long_time_data, reshape_time=('D', 'h'))
+
+ fig = plotting.heatmap_with_plotly_v2(reshaped)
+ assert fig is not None
+
+ def test_heatmap_with_matplotlib(self, long_time_data):
+ """Test heatmap plotting with Matplotlib."""
+ fig, ax = plotting.heatmap_with_matplotlib(long_time_data, reshape_time=('D', 'h'))
+ assert fig is not None
+ assert ax is not None
+ plt.close(fig)
+
+
+# ============================================================================
+# NETWORK VISUALIZATION TESTS
+# ============================================================================
+
+
+class TestNetworkVisualization:
+ """Tests for network visualization functionality."""
+
+ def test_topology_plot_returns_figure(self, simple_flow_system):
+ """Test that topology.plot() returns a PlotResult with Plotly Figure."""
+ import plotly.graph_objects as go
+
+ result = simple_flow_system.topology.plot(show=False)
+ assert result is not None
+ assert hasattr(result, 'figure')
+ assert isinstance(result.figure, go.Figure)
+
+ def test_topology_plot_creates_html(self, simple_flow_system, tmp_path):
+ """Test that topology.plot() figure can be saved to HTML file."""
+ html_path = tmp_path / 'network.html'
+ result = simple_flow_system.topology.plot(show=False)
+ result.figure.write_html(str(html_path))
+ assert html_path.exists()
+
+ def test_topology_plot_contains_all_buses(self, simple_flow_system):
+ """Test that topology plot contains all buses in the Sankey diagram."""
+ result = simple_flow_system.topology.plot(show=False)
+
+ # Get node labels from the Sankey diagram
+ sankey_data = result.figure.data[0]
+ node_labels = list(sankey_data.node.label)
+
+ # Check that buses are in network
+ for bus_label in simple_flow_system.buses.keys():
+ assert bus_label in node_labels
+
+
+# ============================================================================
+# VARIABLE NAMING CONVENTION TESTS
+# ============================================================================
+
+
+class TestVariableNamingConvention:
+ """Tests verifying the new variable naming convention."""
+
+ def test_flow_rate_naming_pattern(self, simple_flow_system, highs_solver):
+ """Test Component(Flow)|flow_rate naming pattern."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Check Boiler flow rate follows pattern
+ assert 'Boiler(Q_th)|flow_rate' in simple_flow_system.solution
+
+ def test_status_variable_naming(self, simple_flow_system, highs_solver):
+ """Test status variable naming pattern."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Components with status should have status variables
+ status_vars = [v for v in simple_flow_system.solution.data_vars if '|status' in v]
+ # At least one component should have status
+ assert len(status_vars) >= 0 # May be 0 if no status tracking
+
+ def test_storage_naming_pattern(self, simple_flow_system, highs_solver):
+ """Test Storage|variable naming pattern."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Storage charge state follows pattern
+ assert 'Speicher|charge_state' in simple_flow_system.solution
+ assert 'Speicher|netto_discharge' in simple_flow_system.solution
+
+ def test_effect_naming_patterns(self, simple_flow_system, highs_solver):
+ """Test effect naming patterns."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Total effect
+ assert 'costs' in simple_flow_system.solution
+
+ # Temporal component
+ assert 'costs(temporal)' in simple_flow_system.solution
+
+ # Per timestep
+ assert 'costs(temporal)|per_timestep' in simple_flow_system.solution
+
+ def test_list_all_variables(self, simple_flow_system, highs_solver):
+ """Test that all variables can be listed."""
+ simple_flow_system.optimize(highs_solver)
+
+ variables = list(simple_flow_system.solution.data_vars)
+ assert len(variables) > 0, f'Expected variables in solution, got {len(variables)}'
+
+
+# ============================================================================
+# EDGE CASES AND ERROR HANDLING
+# ============================================================================
+
+
+class TestPlottingEdgeCases:
+ """Tests for edge cases in plotting."""
+
+ def test_empty_dataset_returns_empty_figure(self, caplog):
+ """Test that empty dataset returns an empty figure."""
+ import logging
+
+ empty_data = xr.Dataset()
+ with caplog.at_level(logging.ERROR):
+ fig = plotting.with_plotly(empty_data)
+ # Empty dataset should produce figure with no data traces
+ assert len(fig.data) == 0, 'Empty dataset should produce figure with no data traces'
+
+ def test_non_numeric_data_raises_error(self):
+ """Test that non-numeric data raises appropriate error."""
+ string_data = xr.Dataset({'var': (['time'], ['a', 'b', 'c'])}, coords={'time': [0, 1, 2]})
+ with pytest.raises(TypeError, match='non-numeric'):
+ plotting.with_plotly(string_data)
+
+ def test_single_value_plotting(self):
+ """Test plotting with single data point."""
+ single_data = xr.Dataset({'var': (['time'], [42.0])}, coords={'time': [0]})
+
+ fig = plotting.with_plotly(single_data, mode='stacked_bar')
+ assert fig is not None
+
+ def test_all_zero_data_plotting(self):
+ """Test plotting with all zero values."""
+ zero_data = xr.Dataset(
+ {'var1': (['time'], [0.0, 0.0, 0.0]), 'var2': (['time'], [0.0, 0.0, 0.0])}, coords={'time': [0, 1, 2]}
+ )
+
+ fig = plotting.with_plotly(zero_data, mode='stacked_bar')
+ assert fig is not None
+
+ def test_nan_values_handled(self):
+ """Test that NaN values are handled gracefully (no exceptions raised)."""
+ nan_data = xr.Dataset({'var': (['time'], [1.0, np.nan, 3.0, np.nan, 5.0])}, coords={'time': [0, 1, 2, 3, 4]})
+
+ # Should not raise - NaN values should be handled gracefully
+ fig = plotting.with_plotly(nan_data, mode='line')
+ assert fig is not None
+ # Verify that plot was created with some data
+ assert len(fig.data) > 0, 'Figure should have data traces even with NaN values'
+
+ def test_negative_values_in_stacked_bar(self):
+ """Test handling of negative values in stacked bar charts."""
+ mixed_data = xr.Dataset(
+ {'positive': (['time'], [1.0, 2.0, 3.0]), 'negative': (['time'], [-1.0, -2.0, -3.0])},
+ coords={'time': [0, 1, 2]},
+ )
+
+ fig = plotting.with_plotly(mixed_data, mode='stacked_bar')
+ assert fig is not None
+
+ fig2, ax2 = plotting.with_matplotlib(mixed_data, mode='stacked_bar')
+ assert fig2 is not None
+ plt.close(fig2)
+
+
+# ============================================================================
+# COLOR PROCESSING TESTS
+# ============================================================================
+
+
+class TestColorProcessing:
+ """Tests for color processing functionality."""
+
+ def test_colorscale_name(self):
+ """Test processing colorscale by name."""
+ from flixopt.color_processing import process_colors
+
+ colors = process_colors('turbo', ['A', 'B', 'C'])
+ assert isinstance(colors, dict)
+ assert 'A' in colors
+ assert 'B' in colors
+ assert 'C' in colors
+
+ def test_color_list(self):
+ """Test processing explicit color list."""
+ from flixopt.color_processing import process_colors
+
+ color_list = ['#ff0000', '#00ff00', '#0000ff']
+ colors = process_colors(color_list, ['A', 'B', 'C'])
+ assert colors['A'] == '#ff0000'
+ assert colors['B'] == '#00ff00'
+ assert colors['C'] == '#0000ff'
+
+ def test_color_dict(self):
+ """Test processing color dictionary."""
+ from flixopt.color_processing import process_colors
+
+ color_dict = {'A': 'red', 'B': 'blue'}
+ colors = process_colors(color_dict, ['A', 'B', 'C'])
+ assert colors['A'] == 'red'
+ assert colors['B'] == 'blue'
+ # C should get a default color
+ assert 'C' in colors
+
+ def test_insufficient_colors_cycles(self):
+ """Test that insufficient colors cycle properly."""
+ from flixopt.color_processing import process_colors
+
+ # Only 2 colors for 5 labels
+ colors = process_colors(['red', 'blue'], ['A', 'B', 'C', 'D', 'E'])
+ assert len(colors) == 5
+ # Should cycle
+ assert colors['A'] == 'red'
+ assert colors['B'] == 'blue'
+ assert colors['C'] == 'red' # Cycles back
+
+
+# ============================================================================
+# EXPORT FUNCTIONALITY TESTS
+# ============================================================================
+
+
+class TestExportFunctionality:
+ """Tests for figure export functionality."""
+
+ def test_export_plotly_to_html(self, simple_flow_system, highs_solver, tmp_path):
+ """Test exporting Plotly figure to HTML."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v][:2]
+ flow_data = simple_flow_system.solution[flow_vars]
+
+ fig = plotting.with_plotly(flow_data, mode='line')
+
+ html_path = tmp_path / 'plot.html'
+ # export_figure expects pathlib.Path and save=True to actually save
+ plotting.export_figure(fig, default_path=html_path, save=True, show=False)
+ assert html_path.exists()
+
+ def test_export_matplotlib_to_png(self, simple_flow_system, highs_solver, tmp_path):
+ """Test exporting Matplotlib figure to PNG."""
+ simple_flow_system.optimize(highs_solver)
+
+ flow_vars = [v for v in simple_flow_system.solution.data_vars if '|flow_rate' in v][:2]
+ flow_data = simple_flow_system.solution[flow_vars]
+
+ fig, ax = plotting.with_matplotlib(flow_data, mode='line')
+
+ png_path = tmp_path / 'plot.png'
+ # export_figure expects pathlib.Path and save=True to actually save
+ plotting.export_figure((fig, ax), default_path=png_path, save=True, show=False)
+ assert png_path.exists()
+ plt.close(fig)
+
+
+# ============================================================================
+# SANKEY DIAGRAM TESTS
+# ============================================================================
+
+
+class TestSankeyDiagram:
+ """Tests for Sankey diagram functionality."""
+
+ def test_sankey_flows(self, simple_flow_system, highs_solver):
+ """Test Sankey diagram with flows() method."""
+ simple_flow_system.optimize(highs_solver)
+
+ result = simple_flow_system.statistics.plot.sankey.flows(show=False)
+
+ assert result.figure is not None
+ assert result.data is not None
+ assert 'value' in result.data
+ assert 'source' in result.data.coords
+ assert 'target' in result.data.coords
+ assert len(result.data.link) > 0
+
+ def test_sankey_peak_flow(self, simple_flow_system, highs_solver):
+ """Test Sankey diagram with peak_flow() method."""
+ simple_flow_system.optimize(highs_solver)
+
+ result = simple_flow_system.statistics.plot.sankey.peak_flow(show=False)
+
+ assert result.figure is not None
+ assert result.data is not None
+ assert len(result.data.link) > 0
+
+ def test_sankey_sizes(self, simple_flow_system, highs_solver):
+ """Test Sankey diagram with sizes() method shows investment sizes."""
+ simple_flow_system.optimize(highs_solver)
+
+ result = simple_flow_system.statistics.plot.sankey.sizes(show=False)
+
+ assert result.figure is not None
+ assert result.data is not None
+ # Should have some flows with investment sizes
+ assert len(result.data.link) > 0
+
+ def test_sankey_sizes_max_size_filter(self, simple_flow_system, highs_solver):
+ """Test that max_size parameter filters large sizes."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Get all sizes (no filter)
+ result_all = simple_flow_system.statistics.plot.sankey.sizes(max_size=None, show=False)
+
+ # Get filtered sizes
+ result_filtered = simple_flow_system.statistics.plot.sankey.sizes(max_size=100, show=False)
+
+ # Filtered should have fewer or equal links
+ assert len(result_filtered.data.link) <= len(result_all.data.link)
+
+ def test_sankey_effects(self, simple_flow_system, highs_solver):
+ """Test Sankey diagram with effects() method."""
+ simple_flow_system.optimize(highs_solver)
+
+ result = simple_flow_system.statistics.plot.sankey.effects(show=False)
+
+ assert result.figure is not None
+ assert result.data is not None
+ # Should have component -> effect links
+ assert len(result.data.link) > 0
+ # Effects should appear in targets with bracket notation
+ targets = list(result.data.target.values)
+ assert any('[' in str(t) for t in targets), 'Effects should appear as [effect_name] in targets'
+
+ def test_sankey_effects_includes_costs_and_co2(self, simple_flow_system, highs_solver):
+ """Test that effects() method includes both costs and CO2."""
+ simple_flow_system.optimize(highs_solver)
+
+ result = simple_flow_system.statistics.plot.sankey.effects(show=False)
+
+ targets = [str(t) for t in result.data.target.values]
+ # Should have at least costs effect
+ assert '[costs]' in targets, 'Should include costs effect'
+
+ def test_sankey_flows_with_time_select(self, simple_flow_system, highs_solver):
+ """Test Sankey flows with specific time selection."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Get first timestamp from the data
+ first_time = simple_flow_system.statistics.flow_hours.time.values[0]
+ result = simple_flow_system.statistics.plot.sankey.flows(select={'time': first_time}, show=False)
+
+ assert result.figure is not None
+ assert len(result.data.link) > 0
+
+ def test_sankey_flows_with_mean_aggregate(self, simple_flow_system, highs_solver):
+ """Test Sankey flows with mean aggregation."""
+ simple_flow_system.optimize(highs_solver)
+
+ result_sum = simple_flow_system.statistics.plot.sankey.flows(aggregate='sum', show=False)
+ result_mean = simple_flow_system.statistics.plot.sankey.flows(aggregate='mean', show=False)
+
+ # Both should produce valid results
+ assert result_sum.figure is not None
+ assert result_mean.figure is not None
+ # Mean values should be smaller than sum values
+ sum_total = sum(result_sum.data.value.values)
+ mean_total = sum(result_mean.data.value.values)
+ assert mean_total < sum_total, 'Mean should produce smaller values than sum'
+
+ def test_sankey_returns_plot_result(self, simple_flow_system, highs_solver):
+ """Test that sankey returns PlotResult with figure and data."""
+ simple_flow_system.optimize(highs_solver)
+
+ result = simple_flow_system.statistics.plot.sankey.flows(show=False)
+
+ # Check PlotResult structure
+ assert hasattr(result, 'figure')
+ assert hasattr(result, 'data')
+ assert isinstance(result.data, xr.Dataset)
diff --git a/tests/test_solution_persistence.py b/tests/test_solution_persistence.py
new file mode 100644
index 000000000..f825f64a8
--- /dev/null
+++ b/tests/test_solution_persistence.py
@@ -0,0 +1,496 @@
+"""Tests for the new solution persistence API.
+
+This module tests the direct solution storage on FlowSystem and Element classes:
+- FlowSystem.solution: xr.Dataset containing all solution variables
+- Element.solution: subset of FlowSystem.solution for that element's variables
+- Element._variable_names: list of variable names for each element
+- Serialization/deserialization of solution with FlowSystem
+"""
+
+import pytest
+import xarray as xr
+
+import flixopt as fx
+
+from .conftest import (
+ assert_almost_equal_numeric,
+ flow_system_base,
+ flow_system_long,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+ simple_flow_system_scenarios,
+)
+
+
+@pytest.fixture(
+ params=[
+ flow_system_base,
+ simple_flow_system_scenarios,
+ flow_system_segments_of_flows_2,
+ simple_flow_system,
+ flow_system_long,
+ ]
+)
+def flow_system(request):
+ fs = request.getfixturevalue(request.param.__name__)
+ if isinstance(fs, fx.FlowSystem):
+ return fs
+ else:
+ return fs[0]
+
+
+class TestSolutionOnFlowSystem:
+ """Tests for FlowSystem.solution attribute."""
+
+ def test_solution_none_before_solve(self, simple_flow_system):
+ """FlowSystem.solution should be None before optimization."""
+ assert simple_flow_system.solution is None
+
+ def test_solution_set_after_solve(self, simple_flow_system, highs_solver):
+ """FlowSystem.solution should be set after solve()."""
+ simple_flow_system.optimize(highs_solver)
+
+ assert simple_flow_system.solution is not None
+ assert isinstance(simple_flow_system.solution, xr.Dataset)
+
+ def test_solution_contains_all_variables(self, simple_flow_system, highs_solver):
+ """FlowSystem.solution should contain all model variables."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Solution should have variables
+ assert len(simple_flow_system.solution.data_vars) > 0
+
+ # Check that known variables are present (from the simple flow system)
+ solution_vars = set(simple_flow_system.solution.data_vars.keys())
+ # Should have flow rates, costs, etc.
+ assert any('flow_rate' in v for v in solution_vars)
+ assert any('costs' in v for v in solution_vars)
+
+
+class TestSolutionOnElement:
+ """Tests for Element.solution property."""
+
+ def test_element_solution_raises_before_linked(self, simple_flow_system):
+ """Element.solution should raise if element not linked to FlowSystem."""
+ # Create an unlinked element
+ bus = fx.Bus('TestBus')
+ with pytest.raises(ValueError, match='not linked to a FlowSystem'):
+ _ = bus.solution
+
+ def test_element_solution_raises_before_solve(self, simple_flow_system):
+ """Element.solution should raise if no solution available."""
+ boiler = simple_flow_system.components['Boiler']
+ with pytest.raises(ValueError, match='No solution available'):
+ _ = boiler.solution
+
+ def test_element_solution_raises_before_modeling(self, simple_flow_system, highs_solver):
+ """Element.solution should work after modeling and solve."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Create a new element not in the flow system - this is a special case
+ # The actual elements in the flow system should work fine
+ boiler = simple_flow_system.components['Boiler']
+ # This should work since boiler was modeled
+ solution = boiler.solution
+ assert isinstance(solution, xr.Dataset)
+
+ def test_element_solution_contains_element_variables(self, simple_flow_system, highs_solver):
+ """Element.solution should contain only that element's variables."""
+ simple_flow_system.optimize(highs_solver)
+
+ boiler = simple_flow_system.components['Boiler']
+ boiler_solution = boiler.solution
+
+ # All variables in element solution should start with element's label
+ for var_name in boiler_solution.data_vars:
+ assert var_name.startswith(boiler.label_full), f'{var_name} does not start with {boiler.label_full}'
+
+ def test_different_elements_have_different_solutions(self, simple_flow_system, highs_solver):
+ """Different elements should have different solution subsets."""
+ simple_flow_system.optimize(highs_solver)
+
+ boiler = simple_flow_system.components['Boiler']
+ chp = simple_flow_system.components['CHP_unit']
+
+ boiler_vars = set(boiler.solution.data_vars.keys())
+ chp_vars = set(chp.solution.data_vars.keys())
+
+ # They should have different variables
+ assert boiler_vars != chp_vars
+ # And they shouldn't overlap
+ assert len(boiler_vars & chp_vars) == 0
+
+
+class TestVariableNamesPopulation:
+ """Tests for Element._variable_names population after modeling."""
+
+ def test_variable_names_empty_before_modeling(self, simple_flow_system):
+ """Element._variable_names should be empty before modeling."""
+ boiler = simple_flow_system.components['Boiler']
+ assert boiler._variable_names == []
+
+ def test_variable_names_populated_after_modeling(self, simple_flow_system, highs_solver):
+ """Element._variable_names should be populated after modeling."""
+ simple_flow_system.build_model()
+
+ boiler = simple_flow_system.components['Boiler']
+ assert len(boiler._variable_names) > 0
+
+ def test_constraint_names_populated_after_modeling(self, simple_flow_system):
+ """Element._constraint_names should be populated after modeling."""
+ simple_flow_system.build_model()
+
+ boiler = simple_flow_system.components['Boiler']
+ # Boiler should have some constraints
+ assert len(boiler._constraint_names) >= 0 # Some elements might have no constraints
+
+ def test_all_elements_have_variable_names(self, simple_flow_system):
+ """All elements with submodels should have _variable_names populated."""
+ simple_flow_system.build_model()
+
+ for element in simple_flow_system.values():
+ if element.submodel is not None:
+ # Element was modeled, should have variable names
+ assert isinstance(element._variable_names, list)
+
+
+class TestSolutionPersistence:
+ """Tests for solution serialization/deserialization with FlowSystem."""
+
+ @pytest.mark.slow
+ def test_solution_persisted_in_dataset(self, flow_system, highs_solver):
+ """Solution should be included when saving FlowSystem to dataset."""
+ flow_system.optimize(highs_solver)
+
+ # Save to dataset
+ ds = flow_system.to_dataset()
+
+ # Check solution variables are in the dataset with 'solution|' prefix
+ solution_vars = [v for v in ds.data_vars if v.startswith('solution|')]
+ assert len(solution_vars) > 0, 'No solution variables in dataset'
+
+ # Check has_solution attribute
+ assert ds.attrs.get('has_solution', False) is True
+
+ @pytest.mark.slow
+ def test_solution_restored_from_dataset(self, flow_system, highs_solver):
+ """Solution should be restored when loading FlowSystem from dataset."""
+ flow_system.optimize(highs_solver)
+
+ # Save and restore
+ ds = flow_system.to_dataset()
+ restored_fs = fx.FlowSystem.from_dataset(ds)
+
+ # Check solution is restored
+ assert restored_fs.solution is not None
+ assert isinstance(restored_fs.solution, xr.Dataset)
+
+ # Check same number of variables
+ assert len(restored_fs.solution.data_vars) == len(flow_system.solution.data_vars)
+
+ @pytest.mark.slow
+ def test_solution_values_match_after_restore(self, flow_system, highs_solver):
+ """Solution values should match after save/restore cycle."""
+ flow_system.optimize(highs_solver)
+
+ original_solution = flow_system.solution.copy(deep=True)
+
+ # Save and restore
+ ds = flow_system.to_dataset()
+ restored_fs = fx.FlowSystem.from_dataset(ds)
+
+ # Check values match exactly
+ for var_name in original_solution.data_vars:
+ xr.testing.assert_equal(
+ original_solution[var_name],
+ restored_fs.solution[var_name],
+ )
+
+ @pytest.mark.slow
+ def test_element_solution_works_after_restore(self, flow_system, highs_solver):
+ """Element.solution should work on restored FlowSystem."""
+ flow_system.optimize(highs_solver)
+
+ # Get an element and its solution
+ element_label = list(flow_system.components.keys())[0]
+ original_element = flow_system.components[element_label]
+ original_element_solution = original_element.solution.copy(deep=True)
+
+ # Save and restore
+ ds = flow_system.to_dataset()
+ restored_fs = fx.FlowSystem.from_dataset(ds)
+
+ # Get the same element from restored flow system
+ restored_element = restored_fs.components[element_label]
+
+ # Element.solution should work
+ restored_element_solution = restored_element.solution
+
+ # Values should match exactly
+ for var_name in original_element_solution.data_vars:
+ xr.testing.assert_equal(
+ original_element_solution[var_name],
+ restored_element_solution[var_name],
+ )
+
+ @pytest.mark.slow
+ def test_variable_names_persisted(self, flow_system, highs_solver):
+ """Element._variable_names should be persisted and restored."""
+ flow_system.optimize(highs_solver)
+
+ # Get original variable names
+ element_label = list(flow_system.components.keys())[0]
+ original_element = flow_system.components[element_label]
+ original_var_names = original_element._variable_names.copy()
+
+ # Save and restore
+ ds = flow_system.to_dataset()
+ restored_fs = fx.FlowSystem.from_dataset(ds)
+
+ # Get restored element
+ restored_element = restored_fs.components[element_label]
+
+ # Variable names should match
+ assert restored_element._variable_names == original_var_names
+
+
+class TestFlowSystemFileIO:
+ """Tests for file-based persistence of FlowSystem with solution."""
+
+ @pytest.mark.slow
+ def test_netcdf_roundtrip_with_solution(self, flow_system, highs_solver, tmp_path):
+ """FlowSystem with solution should survive netCDF roundtrip."""
+ flow_system.optimize(highs_solver)
+
+ original_solution = flow_system.solution.copy(deep=True)
+
+ # Save to netCDF
+ filepath = tmp_path / 'flow_system_with_solution.nc4'
+ flow_system.to_netcdf(filepath)
+
+ # Load from netCDF
+ restored_fs = fx.FlowSystem.from_netcdf(filepath)
+
+ # Check solution is restored
+ assert restored_fs.solution is not None
+
+ # Check values match exactly
+ for var_name in original_solution.data_vars:
+ xr.testing.assert_equal(
+ original_solution[var_name],
+ restored_fs.solution[var_name],
+ )
+
+ @pytest.mark.slow
+ def test_loaded_flow_system_can_be_reoptimized(self, flow_system, highs_solver, tmp_path):
+ """Loaded FlowSystem should be able to run new optimization."""
+ flow_system.optimize(highs_solver)
+ original_objective = flow_system.solution['objective'].item()
+
+ # Save and load
+ filepath = tmp_path / 'flow_system_for_reopt.nc4'
+ flow_system.to_netcdf(filepath)
+ restored_fs = fx.FlowSystem.from_netcdf(filepath)
+
+ # Run new optimization
+ restored_fs.optimize(highs_solver)
+
+ # Should get same objective value
+ assert_almost_equal_numeric(
+ original_objective,
+ restored_fs.solution['objective'].item(),
+ 'Objective mismatch after reload',
+ )
+
+
+class TestNoSolutionPersistence:
+ """Tests for FlowSystem without solution (before optimization)."""
+
+ def test_flow_system_without_solution_saves(self, simple_flow_system):
+ """FlowSystem without solution should save successfully."""
+ ds = simple_flow_system.to_dataset()
+ assert ds.attrs.get('has_solution', True) is False
+
+ def test_flow_system_without_solution_loads(self, simple_flow_system):
+ """FlowSystem without solution should load successfully."""
+ ds = simple_flow_system.to_dataset()
+ restored_fs = fx.FlowSystem.from_dataset(ds)
+
+ assert restored_fs.solution is None
+
+ def test_loaded_flow_system_without_solution_can_optimize(self, simple_flow_system, highs_solver):
+ """Loaded FlowSystem (no prior solution) should optimize successfully."""
+ ds = simple_flow_system.to_dataset()
+ restored_fs = fx.FlowSystem.from_dataset(ds)
+
+ restored_fs.optimize(highs_solver)
+
+ # Should have solution now
+ assert restored_fs.solution is not None
+
+
+class TestEdgeCases:
+ """Edge cases and error handling."""
+
+ def test_empty_variable_names_handled(self, simple_flow_system, highs_solver):
+ """Elements with no variables should be handled gracefully."""
+ simple_flow_system.optimize(highs_solver)
+
+ # Buses typically have no variables of their own in some configurations
+ for bus in simple_flow_system.buses.values():
+ # Should not raise, even if empty
+ if bus._variable_names:
+ _ = bus.solution
+ # If no variable names, solution access would raise - that's expected
+
+ def test_solution_cleared_on_new_optimization(self, simple_flow_system, highs_solver):
+ """New optimization should update solution, not accumulate."""
+ simple_flow_system.optimize(highs_solver)
+
+ first_solution_vars = set(simple_flow_system.solution.data_vars.keys())
+
+ # Reset for re-optimization
+ simple_flow_system.model = None
+ simple_flow_system.solution = None
+ for element in simple_flow_system.values():
+ element._variable_names = []
+ element._constraint_names = []
+ element.submodel = None
+
+ # Re-optimize
+ simple_flow_system.optimize(highs_solver)
+
+ second_solution_vars = set(simple_flow_system.solution.data_vars.keys())
+
+ # Should have same variables (not accumulated)
+ assert first_solution_vars == second_solution_vars
+
+
+class TestFlowSystemDirectMethods:
+ """Tests for FlowSystem.build_model(), solve(), and optimize() methods."""
+
+ def test_build_model_creates_model(self, simple_flow_system):
+ """build_model() should create and populate the model."""
+ assert simple_flow_system.model is None
+
+ result = simple_flow_system.build_model()
+
+ # Should return self for method chaining
+ assert result is simple_flow_system
+ # Model should be created
+ assert simple_flow_system.model is not None
+ # Model should have variables
+ assert len(simple_flow_system.model.variables) > 0
+
+ def test_build_model_with_normalize_weights_false(self, simple_flow_system):
+ """build_model() should respect normalize_weights parameter."""
+ simple_flow_system.build_model(normalize_weights=False)
+
+ # Model should be created
+ assert simple_flow_system.model is not None
+
+ def test_solve_without_build_model_raises(self, simple_flow_system, highs_solver):
+ """solve() should raise if model not built."""
+ with pytest.raises(RuntimeError, match='Model has not been built'):
+ simple_flow_system.solve(highs_solver)
+
+ def test_solve_after_build_model(self, simple_flow_system, highs_solver):
+ """solve() should work after build_model()."""
+ simple_flow_system.build_model()
+
+ result = simple_flow_system.solve(highs_solver)
+
+ # Should return self for method chaining
+ assert result is simple_flow_system
+ # Solution should be populated
+ assert simple_flow_system.solution is not None
+ assert isinstance(simple_flow_system.solution, xr.Dataset)
+
+ def test_solve_populates_element_variable_names(self, simple_flow_system, highs_solver):
+ """solve() should have element variable names available."""
+ simple_flow_system.build_model()
+ simple_flow_system.solve(highs_solver)
+
+ # Elements should have variable names populated
+ boiler = simple_flow_system.components['Boiler']
+ assert len(boiler._variable_names) > 0
+
+ def test_optimize_convenience_method(self, simple_flow_system, highs_solver):
+ """optimize() should build and solve in one step."""
+ assert simple_flow_system.model is None
+ assert simple_flow_system.solution is None
+
+ result = simple_flow_system.optimize(highs_solver)
+
+ # Should return self for method chaining
+ assert result is simple_flow_system
+ # Model should be created
+ assert simple_flow_system.model is not None
+ # Solution should be populated
+ assert simple_flow_system.solution is not None
+
+ def test_optimize_method_chaining(self, simple_flow_system, highs_solver):
+ """optimize() should support method chaining to access solution."""
+ solution = simple_flow_system.optimize(highs_solver).solution
+
+ assert solution is not None
+ assert isinstance(solution, xr.Dataset)
+ assert len(solution.data_vars) > 0
+
+ def test_optimize_with_normalize_weights_false(self, simple_flow_system, highs_solver):
+ """optimize() should respect normalize_weights parameter."""
+ simple_flow_system.optimize(highs_solver, normalize_weights=False)
+
+ assert simple_flow_system.solution is not None
+
+ def test_model_accessible_after_build(self, simple_flow_system):
+ """Model should be inspectable after build_model()."""
+ simple_flow_system.build_model()
+
+ # User should be able to inspect model variables
+ model = simple_flow_system.model
+ assert hasattr(model, 'variables')
+ assert hasattr(model, 'constraints')
+
+ # Variables should exist
+ assert len(model.variables) > 0
+
+ def test_element_solution_after_optimize(self, simple_flow_system, highs_solver):
+ """Element.solution should work after optimize()."""
+ simple_flow_system.optimize(highs_solver)
+
+ boiler = simple_flow_system.components['Boiler']
+ boiler_solution = boiler.solution
+
+ assert isinstance(boiler_solution, xr.Dataset)
+ # All variables should belong to boiler
+ for var_name in boiler_solution.data_vars:
+ assert var_name.startswith(boiler.label_full)
+
+ def test_repeated_optimization_produces_consistent_results(self, simple_flow_system, highs_solver):
+ """Repeated optimization should produce consistent results."""
+ # First optimization
+ simple_flow_system.optimize(highs_solver)
+ first_solution = simple_flow_system.solution.copy(deep=True)
+
+ # Reset for re-optimization
+ simple_flow_system.model = None
+ simple_flow_system.solution = None
+ for element in simple_flow_system.values():
+ element._variable_names = []
+ element._constraint_names = []
+ element.submodel = None
+
+ # Second optimization
+ simple_flow_system.optimize(highs_solver)
+
+ # Solutions should match
+ assert set(first_solution.data_vars.keys()) == set(simple_flow_system.solution.data_vars.keys())
+
+ # Values should be very close (same optimization problem)
+ for var_name in first_solution.data_vars:
+ xr.testing.assert_allclose(
+ first_solution[var_name],
+ simple_flow_system.solution[var_name],
+ rtol=1e-5,
+ )
diff --git a/tests/test_storage.py b/tests/test_storage.py
index a5d2c7a19..15170a321 100644
--- a/tests/test_storage.py
+++ b/tests/test_storage.py
@@ -451,6 +451,7 @@ def test_investment_parameters(
'effects_of_investment': 100,
'effects_of_investment_per_size': 10,
'mandatory': mandatory,
+ 'maximum_size': 100,
}
if minimum_size is not None:
invest_params['minimum_size'] = minimum_size
diff --git a/tests/test_topology_accessor.py b/tests/test_topology_accessor.py
new file mode 100644
index 000000000..09f789b2b
--- /dev/null
+++ b/tests/test_topology_accessor.py
@@ -0,0 +1,183 @@
+"""Tests for the TopologyAccessor class."""
+
+import tempfile
+from pathlib import Path
+
+import plotly.graph_objects as go
+import pytest
+
+import flixopt as fx
+
+
+@pytest.fixture
+def flow_system(simple_flow_system):
+ """Get a simple flow system for testing."""
+ if isinstance(simple_flow_system, fx.FlowSystem):
+ return simple_flow_system
+ return simple_flow_system[0]
+
+
+class TestTopologyInfos:
+ """Tests for topology.infos() method."""
+
+ def test_infos_returns_tuple(self, flow_system):
+ """Test that infos() returns a tuple of two dicts."""
+ result = flow_system.topology.infos()
+ assert isinstance(result, tuple)
+ assert len(result) == 2
+ nodes, edges = result
+ assert isinstance(nodes, dict)
+ assert isinstance(edges, dict)
+
+ def test_infos_nodes_have_correct_structure(self, flow_system):
+ """Test that nodes have label, class, and infos keys."""
+ nodes, _ = flow_system.topology.infos()
+ for node_data in nodes.values():
+ assert 'label' in node_data
+ assert 'class' in node_data
+ assert 'infos' in node_data
+ assert node_data['class'] in ('Bus', 'Component')
+
+ def test_infos_edges_have_correct_structure(self, flow_system):
+ """Test that edges have label, start, end, and infos keys."""
+ _, edges = flow_system.topology.infos()
+ for edge_data in edges.values():
+ assert 'label' in edge_data
+ assert 'start' in edge_data
+ assert 'end' in edge_data
+ assert 'infos' in edge_data
+
+ def test_infos_contains_all_elements(self, flow_system):
+ """Test that infos contains all components, buses, and flows."""
+ nodes, edges = flow_system.topology.infos()
+
+ # Check components
+ for comp in flow_system.components.values():
+ assert comp.label in nodes
+
+ # Check buses
+ for bus in flow_system.buses.values():
+ assert bus.label in nodes
+
+ # Check flows
+ for flow in flow_system.flows.values():
+ assert flow.label_full in edges
+
+
+class TestTopologyPlot:
+ """Tests for topology.plot() method (Sankey-based)."""
+
+ def test_plot_returns_plotly_figure(self, flow_system):
+ """Test that plot() returns a PlotResult with Plotly Figure."""
+ result = flow_system.topology.plot(show=False)
+ assert hasattr(result, 'figure')
+ assert isinstance(result.figure, go.Figure)
+
+ def test_plot_contains_sankey_trace(self, flow_system):
+ """Test that the figure contains a Sankey trace."""
+ result = flow_system.topology.plot(show=False)
+ assert len(result.figure.data) == 1
+ assert isinstance(result.figure.data[0], go.Sankey)
+
+ def test_plot_has_correct_title(self, flow_system):
+ """Test that the figure has the correct title."""
+ result = flow_system.topology.plot(show=False)
+ assert result.figure.layout.title.text == 'Flow System Topology'
+
+ def test_plot_with_custom_title(self, flow_system):
+ """Test that custom title can be passed via plotly_kwargs."""
+ result = flow_system.topology.plot(show=False, title='Custom Title')
+ assert result.figure.layout.title.text == 'Custom Title'
+
+ def test_plot_contains_all_nodes(self, flow_system):
+ """Test that the Sankey contains all buses and components as nodes."""
+ result = flow_system.topology.plot(show=False)
+ sankey = result.figure.data[0]
+ node_labels = set(sankey.node.label)
+
+ # All buses should be in nodes
+ for bus in flow_system.buses.values():
+ assert bus.label in node_labels
+
+ # All components should be in nodes
+ for comp in flow_system.components.values():
+ assert comp.label in node_labels
+
+ def test_plot_contains_all_flows_as_links(self, flow_system):
+ """Test that all flows are represented as links."""
+ result = flow_system.topology.plot(show=False)
+ sankey = result.figure.data[0]
+ link_labels = set(sankey.link.label)
+
+ # All flows should be represented as links
+ for flow in flow_system.flows.values():
+ assert flow.label_full in link_labels
+
+ def test_plot_with_colors(self, flow_system):
+ """Test that colors parameter is accepted."""
+ # Should not raise
+ flow_system.topology.plot(colors='Viridis', show=False)
+ flow_system.topology.plot(colors=['red', 'blue', 'green'], show=False)
+
+
+class TestTopologyPlotLegacy:
+ """Tests for topology.plot_legacy() method (PyVis-based)."""
+
+ def test_plot_legacy_returns_network_or_none(self, flow_system):
+ """Test that plot_legacy() returns a pyvis Network or None."""
+ try:
+ import pyvis
+
+ result = flow_system.topology.plot_legacy(path=False, show=False)
+ assert result is None or isinstance(result, pyvis.network.Network)
+ except ImportError:
+ # pyvis not installed, should return None
+ result = flow_system.topology.plot_legacy(path=False, show=False)
+ assert result is None
+
+ def test_plot_legacy_creates_html_file(self, flow_system):
+ """Test that plot_legacy() creates an HTML file when path is specified."""
+ pytest.importorskip('pyvis')
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ html_path = Path(tmpdir) / 'network.html'
+ flow_system.topology.plot_legacy(path=str(html_path), show=False)
+ assert html_path.exists()
+ content = html_path.read_text()
+ assert '' in content.lower() or '