diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 54d9a211..defdcf5a 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -36,7 +36,7 @@ jobs:
permissions:
id-token: write
steps:
- - uses: actions/download-artifact@v7
+ - uses: actions/download-artifact@v8
with:
name: Packages
path: dist
diff --git a/.github/workflows/test-models.yml b/.github/workflows/test-models.yml
index d5c14d4a..ded75685 100644
--- a/.github/workflows/test-models.yml
+++ b/.github/workflows/test-models.yml
@@ -101,7 +101,7 @@ jobs:
- name: Upload artifacts
if: env.pinned == 'false'
- uses: actions/upload-artifact@v6
+ uses: actions/upload-artifact@v7
with:
name: results-pypsa-eur-${{ matrix.version }}
path: |
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 2253d2cf..6484ef3e 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -64,7 +64,7 @@ jobs:
- name: Set up windows package manager
if: matrix.os == 'windows-latest'
- uses: crazy-max/ghaction-chocolatey@v3
+ uses: crazy-max/ghaction-chocolatey@v4
with:
args: -h
@@ -74,7 +74,7 @@ jobs:
choco install glpk
- name: Download package
- uses: actions/download-artifact@v7
+ uses: actions/download-artifact@v8
with:
name: Packages
path: dist
@@ -112,7 +112,7 @@ jobs:
python-version: 3.12
- name: Download package
- uses: actions/download-artifact@v7
+ uses: actions/download-artifact@v8
with:
name: Packages
path: dist
diff --git a/.gitignore b/.gitignore
index 7b962a6b..10ac8e45 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,3 +50,4 @@ benchmark/scripts/leftovers/
# direnv
.envrc
AGENTS.md
+coverage.xml
diff --git a/CLAUDE.md b/CLAUDE.md
index 67155ae3..1f696a0b 100644
--- a/CLAUDE.md
+++ b/CLAUDE.md
@@ -110,27 +110,6 @@ When modifying the codebase, maintain consistency with these patterns and ensure
* Always create a feature branch for new features or bug fixes.
* Use the github cli (gh) to interact with the Github repository.
-### GitHub Claude Code Integration
-
-This repository includes Claude Code GitHub Actions for automated assistance:
-
-1. **Automated PR Reviews** (`claude-code-review.yml`):
- - Automatically reviews PRs only when first created (opened)
- - Subsequent reviews require manual `@claude` mention
- - Focuses on Python best practices, xarray patterns, and optimization correctness
- - Can run tests and linting as part of the review
- - **Skip initial review by**: Adding `[skip-review]` or `[WIP]` to PR title, or using draft PRs
-
-2. **Manual Claude Assistance** (`claude.yml`):
- - Trigger by mentioning `@claude` in any:
- - Issue comments
- - Pull request comments
- - Pull request reviews
- - New issue body or title
- - Claude can help with bug fixes, feature implementation, code explanations, etc.
-
-**Note**: Both workflows require the `ANTHROPIC_API_KEY` secret to be configured in the repository settings.
-
## Development Guidelines
@@ -140,3 +119,4 @@ This repository includes Claude Code GitHub Actions for automated assistance:
4. Use type hints and mypy for type checking.
5. Always write tests into the `test` directory, following the naming convention `test_*.py`.
6. Always write temporary and non git-tracked code in the `dev-scripts` directory.
+7. In test scripts use linopy assertions from the testing.py module where useful (assert_linequal, assert_varequal, etc.)
diff --git a/doc/api.rst b/doc/api.rst
index 57a61e3e..20958857 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -19,8 +19,9 @@ Creating a model
model.Model.add_constraints
model.Model.add_objective
model.Model.add_piecewise_constraints
- model.Model.add_disjunctive_piecewise_constraints
+ piecewise.piecewise
piecewise.breakpoints
+ piecewise.segments
model.Model.linexpr
model.Model.remove_constraints
diff --git a/doc/index.rst b/doc/index.rst
index 6801aeaf..fd7f9ed8 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -111,6 +111,7 @@ This package is published under MIT license.
creating-variables
creating-expressions
creating-constraints
+ coordinate-alignment
sos-constraints
piecewise-linear-constraints
piecewise-linear-constraints-tutorial
diff --git a/doc/piecewise-linear-constraints.rst b/doc/piecewise-linear-constraints.rst
index b4c6336d..9278248a 100644
--- a/doc/piecewise-linear-constraints.rst
+++ b/doc/piecewise-linear-constraints.rst
@@ -7,17 +7,44 @@ Piecewise linear (PWL) constraints approximate nonlinear functions as connected
linear segments, allowing you to model cost curves, efficiency curves, or
production functions within a linear programming framework.
-Linopy provides two methods:
-
-- :py:meth:`~linopy.model.Model.add_piecewise_constraints` -- for
- **continuous** piecewise linear functions (segments connected end-to-end).
-- :py:meth:`~linopy.model.Model.add_disjunctive_piecewise_constraints` -- for
- **disconnected** segments (with gaps between them).
+Use :py:func:`~linopy.piecewise.piecewise` to describe the function and
+:py:meth:`~linopy.model.Model.add_piecewise_constraints` to add it to a model.
.. contents::
:local:
:depth: 2
+Quick Start
+-----------
+
+.. code-block:: python
+
+ import linopy
+
+ m = linopy.Model()
+ x = m.add_variables(name="x", lower=0, upper=100)
+ y = m.add_variables(name="y")
+
+ # y equals a piecewise linear function of x
+ x_pts = linopy.breakpoints([0, 30, 60, 100])
+ y_pts = linopy.breakpoints([0, 36, 84, 170])
+
+ m.add_piecewise_constraints(linopy.piecewise(x, x_pts, y_pts) == y)
+
+The ``piecewise()`` call creates a lazy descriptor. Comparing it with a
+variable (``==``, ``<=``, ``>=``) produces a
+:class:`~linopy.piecewise.PiecewiseConstraintDescriptor` that
+``add_piecewise_constraints`` knows how to process.
+
+.. note::
+
+ The ``piecewise(...)`` expression can appear on either side of the
+ comparison operator. These forms are equivalent::
+
+ piecewise(x, x_pts, y_pts) == y
+ y == piecewise(x, x_pts, y_pts)
+
+
Formulations
------------
@@ -36,22 +63,18 @@ introduces interpolation variables :math:`\lambda_i` such that:
The SOS2 constraint ensures that **at most two adjacent** :math:`\lambda_i` can
be non-zero, so :math:`x` is interpolated within one segment.
-**Dict (multi-variable) case.** When multiple variables share the same lambdas,
-breakpoints carry an extra *link* dimension :math:`v \in V` and linking becomes
-:math:`x_v = \sum_i \lambda_i \, b_{v,i}` for all :math:`v`.
-
.. note::
SOS2 is a combinatorial constraint handled via branch-and-bound, similar to
- integer variables. It cannot be reformulated as a pure LP. Prefer the
- incremental method (``method="incremental"`` or ``method="auto"``) when
- breakpoints are monotonic.
+ integer variables. Prefer the incremental method
+ (``method="incremental"`` or ``method="auto"``) when breakpoints are
+ monotonic.
Incremental (Delta) Formulation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For **strictly monotonic** breakpoints :math:`b_0 < b_1 < \cdots < b_n`, the
-incremental formulation is a **pure LP** (no SOS2 or binary variables):
+incremental formulation uses fill-fraction variables:
.. math::
@@ -60,12 +83,27 @@ incremental formulation is a **pure LP** (no SOS2 or binary variables):
x = b_0 + \sum_{i=1}^{n} \delta_i \, (b_i - b_{i-1})
The filling-order constraints enforce that segment :math:`i+1` cannot be
-partially filled unless segment :math:`i` is completely filled.
+partially filled unless segment :math:`i` is completely filled. Binary
+indicator variables enforce integrality.
+
+**Limitation:** Breakpoints must be strictly monotonic. For non-monotonic
+curves, use SOS2.
-**Limitation:** Breakpoints must be strictly monotonic for every linked
-variable. In the dict case, each variable is checked independently -- e.g.
-power increasing while fuel decreases is fine, but a curve that rises then
-falls is not. For non-monotonic curves, use SOS2.
+LP (Tangent-Line) Formulation
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For **inequality** constraints where the function is **convex** (for ``>=``)
+or **concave** (for ``<=``), a pure LP formulation adds one tangent-line
+constraint per segment — no SOS2 or binary variables needed.
+
+.. math::
+
+ y \le m_k \, x + c_k \quad \text{for each segment } k \text{ (concave case)}
+
+Domain bounds :math:`x_{\min} \le x \le x_{\max}` are added automatically.
+
+**Limitation:** Only valid for inequality constraints with the correct
+convexity; not valid for equality constraints.
Disjunctive (Disaggregated Convex Combination)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -84,228 +122,332 @@ Given :math:`K` segments, each with breakpoints :math:`b_{k,0}, \ldots, b_{k,n_k
\sum_{i} \lambda_{k,i} = y_k, \quad
x = \sum_{k} \sum_{i} \lambda_{k,i} \, b_{k,i}
+
.. _choosing-a-formulation:
Choosing a Formulation
~~~~~~~~~~~~~~~~~~~~~~
-The incremental method is the fastest to solve (pure LP), but requires strictly
-monotonic breakpoints. Pass ``method="auto"`` to use it automatically when
-applicable, falling back to SOS2 otherwise.
+Pass ``method="auto"`` (the default) and linopy will pick the best
+formulation automatically:
+
+- **Equality + monotonic x** → incremental
+- **Inequality + correct convexity** → LP
+- Otherwise → SOS2
+- Disjunctive (segments) → always SOS2 with binary selection
.. list-table::
:header-rows: 1
- :widths: 25 25 25 25
+ :widths: 25 20 20 15 20
* - Property
- SOS2
- Incremental
+ - LP
- Disjunctive
* - Segments
- Connected
- Connected
- - Disconnected (gaps allowed)
+ - Connected
+ - Disconnected
+ * - Constraint type
+ - ``==``, ``<=``, ``>=``
+ - ``==``, ``<=``, ``>=``
+ - ``<=``, ``>=`` only
+ - ``==``, ``<=``, ``>=``
* - Breakpoint order
- Any
- Strictly monotonic
+ - Strictly increasing
- Any (per segment)
+ * - Convexity requirement
+ - None
+ - None
+ - Concave (≤) or convex (≥)
+ - None
* - Variable types
- Continuous + SOS2
- - Continuous only (pure LP)
+ - Continuous + binary
+ - Continuous only
- Binary + SOS2
* - Solver support
- - Solvers with SOS2 support
+ - SOS2-capable
+ - MIP-capable
- **Any LP solver**
- - Solvers with SOS2 + MIP support
+ - SOS2 + MIP
+
Basic Usage
-----------
-Single variable
-~~~~~~~~~~~~~~~
+Equality constraint
+~~~~~~~~~~~~~~~~~~~
+
+Link ``y`` to a piecewise linear function of ``x``:
.. code-block:: python
import linopy
m = linopy.Model()
- x = m.add_variables(name="x")
+ x = m.add_variables(name="x", lower=0, upper=100)
+ y = m.add_variables(name="y")
- bp = linopy.breakpoints([0, 10, 50, 100])
- m.add_piecewise_constraints(x, bp, dim="breakpoint")
+ x_pts = linopy.breakpoints([0, 30, 60, 100])
+ y_pts = linopy.breakpoints([0, 36, 84, 170])
-Dict of variables
-~~~~~~~~~~~~~~~~~~
+ m.add_piecewise_constraints(linopy.piecewise(x, x_pts, y_pts) == y)
+
+Inequality constraints
+~~~~~~~~~~~~~~~~~~~~~~
-Link multiple variables through shared interpolation weights. For example, a
-turbine where power input determines power output (via a nonlinear efficiency
-factor):
+Use ``<=`` or ``>=`` to bound ``y`` by the piecewise function:
.. code-block:: python
- m = linopy.Model()
+ pw = linopy.piecewise(x, x_pts, y_pts)
- power_in = m.add_variables(name="power_in")
- power_out = m.add_variables(name="power_out")
+ # y must be at most the piecewise function of x (pw >= y ↔ y <= pw)
+ m.add_piecewise_constraints(pw >= y)
- bp = linopy.breakpoints(
- power_in=[0, 50, 100],
- power_out=[0, 47.5, 90],
- )
+ # y must be at least the piecewise function of x (pw <= y ↔ y >= pw)
+ m.add_piecewise_constraints(pw <= y)
- m.add_piecewise_constraints(
- {"power_in": power_in, "power_out": power_out},
- bp,
- dim="breakpoint",
- )
-
-Incremental method
-~~~~~~~~~~~~~~~~~~~
+Choosing a method
+~~~~~~~~~~~~~~~~~
.. code-block:: python
- m.add_piecewise_constraints(x, bp, dim="breakpoint", method="incremental")
+ pw = linopy.piecewise(x, x_pts, y_pts)
+
+ # Explicit SOS2
+ m.add_piecewise_constraints(pw == y, method="sos2")
+
+ # Explicit incremental (requires monotonic x_pts)
+ m.add_piecewise_constraints(pw == y, method="incremental")
-Pass ``method="auto"`` to automatically select incremental when breakpoints are
-strictly monotonic, falling back to SOS2 otherwise.
+ # Explicit LP (requires inequality + correct convexity + increasing x_pts)
+ m.add_piecewise_constraints(pw >= y, method="lp")
+
+ # Auto-select best method (default)
+ m.add_piecewise_constraints(pw == y, method="auto")
Disjunctive (disconnected segments)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Use :func:`~linopy.piecewise.segments` to define breakpoints with gaps:
+
.. code-block:: python
m = linopy.Model()
- x = m.add_variables(name="x")
+ x = m.add_variables(name="x", lower=0, upper=100)
+ y = m.add_variables(name="y")
+
+ # Two disconnected segments: [0,10] and [50,100]
+ x_seg = linopy.segments([(0, 10), (50, 100)])
+ y_seg = linopy.segments([(0, 15), (60, 130)])
+
+ m.add_piecewise_constraints(linopy.piecewise(x, x_seg, y_seg) == y)
+
+The disjunctive formulation is selected automatically when
+``x_points`` / ``y_points`` have a segment dimension (created by
+:func:`~linopy.piecewise.segments`).
- bp = linopy.breakpoints.segments([(0, 10), (50, 100)])
- m.add_disjunctive_piecewise_constraints(x, bp)
Breakpoints Factory
-------------------
-The ``linopy.breakpoints()`` factory simplifies creating breakpoint DataArrays
-with correct dimensions and coordinates.
+The :func:`~linopy.piecewise.breakpoints` factory creates DataArrays with
+the correct ``_breakpoint`` dimension. It accepts several input types
+(``BreaksLike``):
From a list
~~~~~~~~~~~
.. code-block:: python
- # 1D breakpoints (dims: [breakpoint])
+ # 1D breakpoints (dims: [_breakpoint])
bp = linopy.breakpoints([0, 50, 100])
-From keyword arguments (multi-variable)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+From a pandas Series
+~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ import pandas as pd
+
+ bp = linopy.breakpoints(pd.Series([0, 50, 100]))
+
+From a DataFrame (per-entity, requires ``dim``)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
- # 2D breakpoints (dims: [var, breakpoint])
- bp = linopy.breakpoints(power=[0, 50, 100], fuel=[0, 60, 140])
+ # rows = entities, columns = breakpoints
+ df = pd.DataFrame(
+ {"bp0": [0, 0], "bp1": [50, 80], "bp2": [100, float("nan")]},
+ index=["gen1", "gen2"],
+ )
+ bp = linopy.breakpoints(df, dim="generator")
From a dict (per-entity, ragged lengths allowed)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
- # 2D breakpoints (dims: [generator, breakpoint]), NaN-padded
+ # NaN-padded to the longest entry
bp = linopy.breakpoints(
{"gen1": [0, 50, 100], "gen2": [0, 80]},
dim="generator",
)
-Per-entity with multiple variables
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+From a DataArray (pass-through)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
- # 3D breakpoints (dims: [generator, var, breakpoint])
- bp = linopy.breakpoints(
- power={"gen1": [0, 50, 100], "gen2": [0, 80]},
- fuel={"gen1": [0, 60, 140], "gen2": [0, 100]},
- dim="generator",
+ import xarray as xr
+
+ arr = xr.DataArray([0, 50, 100], dims=["_breakpoint"])
+ bp = linopy.breakpoints(arr) # returned as-is
+
+Slopes mode
+~~~~~~~~~~~
+
+Compute y-breakpoints from segment slopes and an initial y-value:
+
+.. code-block:: python
+
+ y_pts = linopy.breakpoints(
+ slopes=[1.2, 1.4, 1.7],
+ x_points=[0, 30, 60, 100],
+ y0=0,
)
+ # Equivalent to breakpoints([0, 36, 78, 146])
-Segments (for disjunctive constraints)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Segments Factory
+----------------
+
+The :func:`~linopy.piecewise.segments` factory creates DataArrays with both
+``_segment`` and ``_breakpoint`` dimensions (``SegmentsLike``):
+
+From a list of sequences
+~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
- # 2D breakpoints (dims: [segment, breakpoint])
- bp = linopy.breakpoints.segments([(0, 10), (50, 100)])
+ # dims: [_segment, _breakpoint]
+ seg = linopy.segments([(0, 10), (50, 100)])
- # Per-entity segments
- bp = linopy.breakpoints.segments(
+From a dict (per-entity)
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ seg = linopy.segments(
{"gen1": [(0, 10), (50, 100)], "gen2": [(0, 80)]},
dim="generator",
)
+From a DataFrame
+~~~~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ # rows = segments, columns = breakpoints
+ seg = linopy.segments(pd.DataFrame([[0, 10], [50, 100]]))
+
+
Auto-broadcasting
-----------------
Breakpoints are automatically broadcast to match the dimensions of the
-expression or variable. This means you don't need to manually call
-``expand_dims`` when your variables have extra dimensions (e.g. ``time``):
+expressions. You don't need ``expand_dims`` when your variables have extra
+dimensions (e.g. ``time``):
.. code-block:: python
+ import pandas as pd
+ import linopy
+
m = linopy.Model()
time = pd.Index([1, 2, 3], name="time")
- x = m.add_variables(name="x", coords=[time])
+ x = m.add_variables(name="x", lower=0, upper=100, coords=[time])
+ y = m.add_variables(name="y", coords=[time])
- # 1D breakpoints are auto-expanded to match x's time dimension
- bp = linopy.breakpoints([0, 50, 100])
- m.add_piecewise_constraints(x, bp, dim="breakpoint")
+ # 1D breakpoints auto-expand to match x's time dimension
+ x_pts = linopy.breakpoints([0, 50, 100])
+ y_pts = linopy.breakpoints([0, 70, 150])
+ m.add_piecewise_constraints(linopy.piecewise(x, x_pts, y_pts) == y)
-This also works for ``add_disjunctive_piecewise_constraints`` and dict
-expressions.
Method Signatures
-----------------
+``piecewise``
+~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ linopy.piecewise(expr, x_points, y_points)
+
+- ``expr`` -- ``Variable`` or ``LinearExpression``. The "x" side expression.
+- ``x_points`` -- ``BreaksLike``. Breakpoint x-coordinates.
+- ``y_points`` -- ``BreaksLike``. Breakpoint y-coordinates.
+
+Returns a :class:`~linopy.piecewise.PiecewiseExpression` that supports
+``==``, ``<=``, ``>=`` comparison with another expression.
+
``add_piecewise_constraints``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
Model.add_piecewise_constraints(
- expr,
- breakpoints,
- dim="breakpoint",
- mask=None,
+ descriptor,
+ method="auto",
name=None,
skip_nan_check=False,
- method="sos2",
)
-- ``expr`` -- ``Variable``, ``LinearExpression``, or ``dict`` of these.
-- ``breakpoints`` -- ``xr.DataArray`` with breakpoint values. Must have ``dim``
- as a dimension. For the dict case, must also have a dimension whose
- coordinates match the dict keys.
-- ``dim`` -- ``str``, default ``"breakpoint"``. Breakpoint-index dimension.
-- ``mask`` -- ``xr.DataArray``, optional. Boolean mask for valid constraints.
+- ``descriptor`` -- :class:`~linopy.piecewise.PiecewiseConstraintDescriptor`.
+ Created by comparing a ``PiecewiseExpression`` with an expression, e.g.
+ ``piecewise(x, x_pts, y_pts) == y``.
+- ``method`` -- ``"auto"`` (default), ``"sos2"``, ``"incremental"``, or ``"lp"``.
- ``name`` -- ``str``, optional. Base name for generated variables/constraints.
- ``skip_nan_check`` -- ``bool``, default ``False``.
-- ``method`` -- ``"sos2"`` (default), ``"incremental"``, or ``"auto"``.
-``add_disjunctive_piecewise_constraints``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Returns a :class:`~linopy.constraints.Constraint`, but the returned object is
+formulation-dependent: typically ``{name}_convex`` (SOS2), ``{name}_fill`` or
+``{name}_y_link`` (incremental), and ``{name}_select`` (disjunctive). For
+inequality constraints, the returned constraint is the core piecewise
+formulation constraint, not ``{name}_ineq``.
+
+``breakpoints``
+~~~~~~~~~~~~~~~~
.. code-block:: python
- Model.add_disjunctive_piecewise_constraints(
- expr,
- breakpoints,
- dim="breakpoint",
- segment_dim="segment",
- mask=None,
- name=None,
- skip_nan_check=False,
- )
+ linopy.breakpoints(values, dim=None)
+ linopy.breakpoints(slopes, x_points, y0, dim=None)
-Same as above, plus:
+- ``values`` -- ``BreaksLike`` (list, Series, DataFrame, DataArray, or dict).
+- ``slopes``, ``x_points``, ``y0`` -- for slopes mode (mutually exclusive with
+ ``values``).
+- ``dim`` -- ``str``, required when ``values`` or ``slopes`` is a DataFrame or dict.
+
+``segments``
+~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ linopy.segments(values, dim=None)
+
+- ``values`` -- ``SegmentsLike`` (list of sequences, DataFrame, DataArray, or
+ dict).
+- ``dim`` -- ``str``, required when ``values`` is a dict.
-- ``segment_dim`` -- ``str``, default ``"segment"``. Dimension indexing
- segments. Use NaN in breakpoints to pad segments with fewer breakpoints.
Generated Variables and Constraints
------------------------------------
@@ -327,9 +469,18 @@ Given base name ``name``, the following objects are created:
* - ``{name}_convex``
- Constraint
- :math:`\sum_i \lambda_i = 1`.
- * - ``{name}_link``
+ * - ``{name}_x_link``
+ - Constraint
+ - :math:`x = \sum_i \lambda_i \, x_i`.
+ * - ``{name}_y_link``
+ - Constraint
+ - :math:`y = \sum_i \lambda_i \, y_i`.
+ * - ``{name}_aux``
+ - Variable
+ - Auxiliary variable :math:`z` (inequality constraints only).
+ * - ``{name}_ineq``
- Constraint
- - :math:`x = \sum_i \lambda_i \, b_i`.
+ - :math:`y \le z` or :math:`y \ge z` (inequality only).
**Incremental method:**
@@ -343,12 +494,49 @@ Given base name ``name``, the following objects are created:
* - ``{name}_delta``
- Variable
- Fill-fraction variables :math:`\delta_i \in [0, 1]`.
+ * - ``{name}_inc_binary``
+ - Variable
+ - Binary indicators for each segment.
+ * - ``{name}_inc_link``
+ - Constraint
+ - :math:`\delta_i \le y_i` (delta bounded by binary).
* - ``{name}_fill``
- Constraint
- - :math:`\delta_{i+1} \le \delta_i` (only if 3+ breakpoints).
- * - ``{name}_link``
+ - :math:`\delta_{i+1} \le \delta_i` (fill order, 3+ breakpoints).
+ * - ``{name}_inc_order``
+ - Constraint
+ - :math:`y_{i+1} \le \delta_i` (binary ordering, 3+ breakpoints).
+ * - ``{name}_x_link``
+ - Constraint
+ - :math:`x = x_0 + \sum_i \delta_i \, \Delta x_i`.
+ * - ``{name}_y_link``
- Constraint
- - :math:`x = b_0 + \sum_i \delta_i \, s_i`.
+ - :math:`y = y_0 + \sum_i \delta_i \, \Delta y_i`.
+ * - ``{name}_aux``
+ - Variable
+ - Auxiliary variable :math:`z` (inequality constraints only).
+ * - ``{name}_ineq``
+ - Constraint
+ - :math:`y \le z` or :math:`y \ge z` (inequality only).
+
+**LP method:**
+
+.. list-table::
+ :header-rows: 1
+ :widths: 30 15 55
+
+ * - Name
+ - Type
+ - Description
+ * - ``{name}_lp``
+ - Constraint
+ - Tangent-line constraints (one per segment).
+ * - ``{name}_lp_domain_lo``
+ - Constraint
+ - :math:`x \ge x_{\min}`.
+ * - ``{name}_lp_domain_hi``
+ - Constraint
+ - :math:`x \le x_{\max}`.
**Disjunctive method:**
@@ -371,14 +559,23 @@ Given base name ``name``, the following objects are created:
* - ``{name}_convex``
- Constraint
- :math:`\sum_i \lambda_{k,i} = y_k`.
- * - ``{name}_link``
+ * - ``{name}_x_link``
+ - Constraint
+ - :math:`x = \sum_k \sum_i \lambda_{k,i} \, x_{k,i}`.
+ * - ``{name}_y_link``
+ - Constraint
+ - :math:`y = \sum_k \sum_i \lambda_{k,i} \, y_{k,i}`.
+ * - ``{name}_aux``
+ - Variable
+ - Auxiliary variable :math:`z` (inequality constraints only).
+ * - ``{name}_ineq``
- Constraint
- - :math:`x = \sum_k \sum_i \lambda_{k,i} \, b_{k,i}`.
+ - :math:`y \le z` or :math:`y \ge z` (inequality only).
See Also
--------
-- :doc:`piecewise-linear-constraints-tutorial` -- Worked examples with all three formulations
+- :doc:`piecewise-linear-constraints-tutorial` -- Worked examples covering SOS2, incremental, LP, and disjunctive usage
- :doc:`sos-constraints` -- Low-level SOS1/SOS2 constraint API
- :doc:`creating-constraints` -- General constraint creation
- :doc:`user-guide` -- Overall linopy usage patterns
diff --git a/doc/release_notes.rst b/doc/release_notes.rst
index 59b4456f..b4a92e64 100644
--- a/doc/release_notes.rst
+++ b/doc/release_notes.rst
@@ -4,11 +4,29 @@ Release Notes
Upcoming Version
----------------
-* Add ``add_piecewise_constraints()`` for piecewise linear constraints with SOS2 and incremental (pure LP) formulations.
-* Add ``add_disjunctive_piecewise_constraints()`` for disconnected piecewise linear segments (e.g. forbidden operating zones).
-* Add ``linopy.breakpoints()`` factory for convenient breakpoint construction from lists, dicts, or keyword arguments. Includes ``breakpoints.segments()`` for disjunctive formulations.
+* Harmonize coordinate alignment for operations with subset/superset objects:
+ - Multiplication and division fill missing coords with 0 (variable doesn't participate)
+ - Addition and subtraction of constants fill missing coords with 0 (identity element) and pin result to LHS coords
+ - Comparison operators (``==``, ``<=``, ``>=``) fill missing RHS coords with NaN (no constraint created)
+ - Fixes crash on ``subset + var`` / ``subset + expr`` reverse addition
+ - Fixes superset DataArrays expanding result coords beyond the variable's coordinate space
+* Add ``add_piecewise_constraints()`` with SOS2, incremental, LP, and disjunctive formulations (``linopy.piecewise(x, x_pts, y_pts) == y``).
+* Add ``linopy.piecewise()`` to create piecewise linear function descriptors (`PiecewiseExpression`) from separate x/y breakpoint arrays.
+* Add ``linopy.breakpoints()`` factory for convenient breakpoint construction from lists, Series, DataFrames, DataArrays, or dicts. Supports slopes mode.
+* Add ``linopy.segments()`` factory for disjunctive (disconnected) breakpoints.
+* Add ``active`` parameter to ``piecewise()`` for gating piecewise linear functions with a binary variable (e.g. unit commitment). Supported for incremental, SOS2, and disjunctive methods.
* Add the `sphinx-copybutton` to the documentation
* Add SOS1 and SOS2 reformulations for solvers not supporting them.
+* Add semi-continous variables for solvers that support them
+* Improve handling of CPLEX solver quality attributes to ensure metrics such are extracted correctly when available.
+* Fix Xpress IIS label mapping for masked constraints and add a regression test for matching infeasible coordinates.
+* Enable quadratic problems with SCIP on windows.
+
+
+Version 0.6.5
+-------------
+
+* Expose the knitro context to allow for more flexible use of the knitro python API.
Version 0.6.4
diff --git a/examples/coordinate-alignment.ipynb b/examples/coordinate-alignment.ipynb
new file mode 100644
index 00000000..1547bd9d
--- /dev/null
+++ b/examples/coordinate-alignment.ipynb
@@ -0,0 +1,488 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Coordinate Alignment\n",
+ "\n",
+ "Since linopy builds on xarray, coordinate alignment matters when combining variables or expressions that live on different coordinates. By default, linopy aligns operands automatically and fills missing entries with sensible defaults. This guide shows how alignment works and how to control it with the ``join`` parameter."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import xarray as xr\n",
+ "\n",
+ "import linopy"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Default Alignment Behavior\n",
+ "\n",
+ "When two operands share a dimension but have different coordinates, linopy keeps the **larger** (superset) coordinate range and fills missing positions with zeros (for addition) or zero coefficients (for multiplication)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "m = linopy.Model()\n",
+ "\n",
+ "time = pd.RangeIndex(5, name=\"time\")\n",
+ "x = m.add_variables(lower=0, coords=[time], name=\"x\")\n",
+ "\n",
+ "subset_time = pd.RangeIndex(3, name=\"time\")\n",
+ "y = m.add_variables(lower=0, coords=[subset_time], name=\"y\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Adding ``x`` (5 time steps) and ``y`` (3 time steps) gives an expression over all 5 time steps. Where ``y`` has no entry (time 3, 4), the coefficient is zero — i.e. ``y`` simply drops out of the sum at those positions."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "x + y"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The same applies when multiplying by a constant that covers only a subset of coordinates. Missing positions get a coefficient of zero:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "factor = xr.DataArray([2, 3, 4], dims=[\"time\"], coords={\"time\": [0, 1, 2]})\n",
+ "x * factor"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Adding a constant subset also fills missing coordinates with zero:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "x + factor"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Constraints with Subset RHS\n",
+ "\n",
+ "For constraints, missing right-hand-side values are filled with ``NaN``, which tells linopy to **skip** the constraint at those positions:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "rhs = xr.DataArray([10, 20, 30], dims=[\"time\"], coords={\"time\": [0, 1, 2]})\n",
+ "con = x <= rhs\n",
+ "con"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The constraint only applies at time 0, 1, 2. At time 3 and 4 the RHS is ``NaN``, so no constraint is created."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "### Same-Shape Operands: Positional Alignment\n\nWhen two operands have the **same shape** on a shared dimension, linopy uses **positional alignment** by default — coordinate labels are ignored and the left operand's labels are kept. This is a performance optimization but can be surprising:"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "offset_const = xr.DataArray(\n",
+ " [10, 20, 30, 40, 50], dims=[\"time\"], coords={\"time\": [5, 6, 7, 8, 9]}\n",
+ ")\n",
+ "x + offset_const"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "Even though ``offset_const`` has coordinates ``[5, 6, 7, 8, 9]`` and ``x`` has ``[0, 1, 2, 3, 4]``, the result uses ``x``'s labels. The values are aligned by **position**, not by label. The same applies when adding two variables or expressions of identical shape:"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "z = m.add_variables(lower=0, coords=[pd.RangeIndex(5, 10, name=\"time\")], name=\"z\")\n",
+ "x + z"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "``x`` (time 0–4) and ``z`` (time 5–9) share no coordinate labels, yet the result has 5 entries under ``x``'s coordinates — because they have the same shape, positions are matched directly.\n\nTo force **label-based** alignment, pass an explicit ``join``:"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "x.add(z, join=\"outer\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "With ``join=\"outer\"``, the result spans all 10 time steps (union of 0–4 and 5–9), filling missing positions with zeros. This is the correct label-based alignment. The same-shape positional shortcut is equivalent to ``join=\"override\"`` — see below."
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## The ``join`` Parameter\n",
+ "\n",
+ "For explicit control over alignment, use the ``.add()``, ``.sub()``, ``.mul()``, and ``.div()`` methods with a ``join`` parameter. The supported values follow xarray conventions:\n",
+ "\n",
+ "- ``\"inner\"`` — intersection of coordinates\n",
+ "- ``\"outer\"`` — union of coordinates (with fill)\n",
+ "- ``\"left\"`` — keep left operand's coordinates\n",
+ "- ``\"right\"`` — keep right operand's coordinates\n",
+ "- ``\"override\"`` — positional alignment, ignore coordinate labels\n",
+ "- ``\"exact\"`` — coordinates must match exactly (raises on mismatch)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "m2 = linopy.Model()\n",
+ "\n",
+ "i_a = pd.Index([0, 1, 2], name=\"i\")\n",
+ "i_b = pd.Index([1, 2, 3], name=\"i\")\n",
+ "\n",
+ "a = m2.add_variables(coords=[i_a], name=\"a\")\n",
+ "b = m2.add_variables(coords=[i_b], name=\"b\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Inner join** — only shared coordinates (i=1, 2):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.add(b, join=\"inner\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Outer join** — union of coordinates (i=0, 1, 2, 3):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.add(b, join=\"outer\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Left join** — keep left operand's coordinates (i=0, 1, 2):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.add(b, join=\"left\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Right join** — keep right operand's coordinates (i=1, 2, 3):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.add(b, join=\"right\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "**Override** — positional alignment, ignore coordinate labels. The result uses the left operand's coordinates. Here ``a`` has i=[0, 1, 2] and ``b`` has i=[1, 2, 3], so positions are matched as 0↔1, 1↔2, 2↔3:"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.add(b, join=\"override\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Multiplication with ``join``\n",
+ "\n",
+ "The same ``join`` parameter works on ``.mul()`` and ``.div()``. When multiplying by a constant that covers a subset, ``join=\"inner\"`` restricts the result to shared coordinates only, while ``join=\"left\"`` fills missing values with zero:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "const = xr.DataArray([2, 3, 4], dims=[\"i\"], coords={\"i\": [1, 2, 3]})\n",
+ "\n",
+ "a.mul(const, join=\"inner\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.mul(const, join=\"left\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Alignment in Constraints\n",
+ "\n",
+ "The ``.le()``, ``.ge()``, and ``.eq()`` methods create constraints with explicit coordinate alignment. They accept the same ``join`` parameter:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "rhs = xr.DataArray([10, 20], dims=[\"i\"], coords={\"i\": [0, 1]})\n",
+ "\n",
+ "a.le(rhs, join=\"inner\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "With ``join=\"inner\"``, the constraint only exists at the intersection (i=0, 1). Compare with ``join=\"left\"``:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "a.le(rhs, join=\"left\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "With ``join=\"left\"``, the result covers all of ``a``'s coordinates (i=0, 1, 2). At i=2, where the RHS has no value, the RHS becomes ``NaN`` and the constraint is masked out.\n",
+ "\n",
+ "The same methods work on expressions:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "expr = 2 * a + 1\n",
+ "expr.eq(rhs, join=\"inner\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "## Practical Example\n\nConsider a generation dispatch model where solar availability follows a daily profile and a minimum demand constraint only applies during peak hours."
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "m3 = linopy.Model()\n",
+ "\n",
+ "hours = pd.RangeIndex(24, name=\"hour\")\n",
+ "techs = pd.Index([\"solar\", \"wind\", \"gas\"], name=\"tech\")\n",
+ "\n",
+ "gen = m3.add_variables(lower=0, coords=[hours, techs], name=\"gen\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Capacity limits apply to all hours and techs — standard broadcasting handles this:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "capacity = xr.DataArray([100, 80, 50], dims=[\"tech\"], coords={\"tech\": techs})\n",
+ "m3.add_constraints(gen <= capacity, name=\"capacity_limit\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "For solar, we build a full 24-hour availability profile — zero at night, sine-shaped during daylight (hours 6–18). Since this covers all hours, standard alignment works directly and solar is properly constrained to zero at night:"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "solar_avail = np.zeros(24)\n",
+ "solar_avail[6:19] = 100 * np.sin(np.linspace(0, np.pi, 13))\n",
+ "solar_availability = xr.DataArray(solar_avail, dims=[\"hour\"], coords={\"hour\": hours})\n",
+ "\n",
+ "solar_gen = gen.sel(tech=\"solar\")\n",
+ "m3.add_constraints(solar_gen <= solar_availability, name=\"solar_avail\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "Now suppose a minimum demand of 120 MW must be met, but only during peak hours (8–20). The demand array covers a subset of hours, so we use ``join=\"inner\"`` to restrict the constraint to just those hours:"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peak_hours = pd.RangeIndex(8, 21, name=\"hour\")\n",
+ "peak_demand = xr.DataArray(\n",
+ " np.full(len(peak_hours), 120.0), dims=[\"hour\"], coords={\"hour\": peak_hours}\n",
+ ")\n",
+ "\n",
+ "total_gen = gen.sum(\"tech\")\n",
+ "m3.add_constraints(total_gen.ge(peak_demand, join=\"inner\"), name=\"peak_demand\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": "The demand constraint only applies during peak hours (8–20). Outside that range, no minimum generation is required."
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Summary\n",
+ "\n",
+ "| ``join`` | Coordinates | Fill behavior |\n",
+ "|----------|------------|---------------|\n",
+ "| ``None`` (default) | Auto-detect (keeps superset) | Zeros for arithmetic, NaN for constraint RHS |\n",
+ "| ``\"inner\"`` | Intersection only | No fill needed |\n",
+ "| ``\"outer\"`` | Union | Fill with operation identity (0 for add, 0 for mul) |\n",
+ "| ``\"left\"`` | Left operand's | Fill right with identity |\n",
+ "| ``\"right\"`` | Right operand's | Fill left with identity |\n",
+ "| ``\"override\"`` | Left operand's (positional) | Positional alignment, ignore labels |\n",
+ "| ``\"exact\"`` | Must match exactly | Raises error if different |"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/examples/creating-constraints.ipynb b/examples/creating-constraints.ipynb
index b46db1bc..55251233 100644
--- a/examples/creating-constraints.ipynb
+++ b/examples/creating-constraints.ipynb
@@ -231,6 +231,12 @@
"source": [
"m.constraints[\"my-constraint\"]"
]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "r0wxi7v1m7l",
+ "source": "## Coordinate Alignment in Constraints\n\nAs an alternative to the ``<=``, ``>=``, ``==`` operators, linopy provides ``.le()``, ``.ge()``, and ``.eq()`` methods on variables and expressions. These methods accept a ``join`` parameter (``\"inner\"``, ``\"outer\"``, ``\"left\"``, ``\"right\"``) for explicit control over how coordinates are aligned when creating constraints. See the :doc:`coordinate-alignment` guide for details.",
+ "metadata": {}
}
],
"metadata": {
diff --git a/examples/creating-expressions.ipynb b/examples/creating-expressions.ipynb
index aafd8a09..1d808b07 100644
--- a/examples/creating-expressions.ipynb
+++ b/examples/creating-expressions.ipynb
@@ -193,6 +193,12 @@
"x + b"
]
},
+ {
+ "cell_type": "markdown",
+ "id": "a8xsfdqrcrn",
+ "source": ".. tip::\n\n\tFor explicit control over how coordinates are aligned during arithmetic, use the `.add()`, `.sub()`, `.mul()`, and `.div()` methods with a ``join`` parameter (``\"inner\"``, ``\"outer\"``, ``\"left\"``, ``\"right\"``). See the :doc:`coordinate-alignment` guide for details.",
+ "metadata": {}
+ },
{
"attachments": {},
"cell_type": "markdown",
diff --git a/examples/piecewise-linear-constraints.ipynb b/examples/piecewise-linear-constraints.ipynb
index dd9192b3..4646e87d 100644
--- a/examples/piecewise-linear-constraints.ipynb
+++ b/examples/piecewise-linear-constraints.ipynb
@@ -2,39 +2,24 @@
"cells": [
{
"cell_type": "markdown",
- "id": "intro",
"metadata": {},
- "source": [
- "# Piecewise Linear Constraints\n",
- "\n",
- "This notebook demonstrates linopy's three PWL formulations. Each example\n",
- "builds a separate dispatch model where a single power plant must meet\n",
- "a time-varying demand.\n",
- "\n",
- "| Example | Plant | Limitation | Formulation |\n",
- "|---------|-------|------------|-------------|\n",
- "| 1 | Gas turbine (0–100 MW) | Convex heat rate | SOS2 |\n",
- "| 2 | Coal plant (0–150 MW) | Monotonic heat rate | Incremental |\n",
- "| 3 | Diesel generator (off or 50–80 MW) | Forbidden zone | Disjunctive |"
- ]
+ "source": "# Piecewise Linear Constraints Tutorial\n\nThis notebook demonstrates linopy's piecewise linear (PWL) constraint formulations.\nEach example builds a separate dispatch model where a single power plant must meet\na time-varying demand.\n\n| Example | Plant | Limitation | Formulation |\n|---------|-------|------------|-------------|\n| 1 | Gas turbine (0–100 MW) | Convex heat rate | SOS2 |\n| 2 | Coal plant (0–150 MW) | Monotonic heat rate | Incremental |\n| 3 | Diesel generator (off or 50–80 MW) | Forbidden zone | Disjunctive |\n| 4 | Concave efficiency curve | Inequality bound | LP |\n| 5 | Gas unit with commitment | On/off + min load | Incremental + `active` |\n\n**Note:** The `piecewise(...)` expression can appear on either side of\nthe comparison operator (`==`, `<=`, `>=`). For example, both\n`linopy.piecewise(x, x_pts, y_pts) == y` and `y == linopy.piecewise(...)` work."
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "imports",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.511970Z",
- "start_time": "2026-02-09T19:21:33.501473Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:41.350637Z",
- "iopub.status.busy": "2026-02-09T19:21:41.350440Z",
- "iopub.status.idle": "2026-02-09T19:21:42.583457Z",
- "shell.execute_reply": "2026-02-09T19:21:42.583146Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.167007Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.166576Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.185103Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.184712Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.166974Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:27.800436Z",
+ "start_time": "2026-03-09T10:17:27.796927Z"
}
},
- "outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
@@ -45,56 +30,32 @@
"time = pd.Index([1, 2, 3], name=\"time\")\n",
"\n",
"\n",
- "def plot_pwl_results(model, breakpoints, demand, color=\"C0\", fuel_rate=None):\n",
+ "def plot_pwl_results(\n",
+ " model, x_pts, y_pts, demand, x_name=\"power\", y_name=\"fuel\", color=\"C0\"\n",
+ "):\n",
" \"\"\"Plot PWL curve with operating points and dispatch vs demand.\"\"\"\n",
" sol = model.solution\n",
- " bp = breakpoints.to_pandas()\n",
" fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(10, 3.5))\n",
"\n",
" # Left: PWL curve with operating points\n",
- " if \"var\" in breakpoints.dims:\n",
- " # Connected: power-fuel curve from var dimension\n",
+ " ax1.plot(\n",
+ " x_pts.values.flat, y_pts.values.flat, \"o-\", color=color, label=\"Breakpoints\"\n",
+ " )\n",
+ " for t in time:\n",
" ax1.plot(\n",
- " bp.loc[\"power\"], bp.loc[\"fuel\"], \"o-\", color=color, label=\"Breakpoints\"\n",
- " )\n",
- " for t in time:\n",
- " ax1.plot(\n",
- " sol[\"power\"].sel(time=t),\n",
- " sol[\"fuel\"].sel(time=t),\n",
- " \"s\",\n",
- " ms=10,\n",
- " label=f\"t={t}\",\n",
- " )\n",
- " ax1.set(xlabel=\"Power (MW)\", ylabel=\"Fuel (MWh)\", title=\"Heat rate curve\")\n",
- " else:\n",
- " # Disconnected: segments with linear cost\n",
- " for seg in bp.index:\n",
- " lo, hi = bp.loc[seg]\n",
- " pw = [lo, hi] if lo != hi else [lo]\n",
- " ax1.plot(\n",
- " pw,\n",
- " [fuel_rate * p for p in pw],\n",
- " \"o-\",\n",
- " color=color,\n",
- " label=\"Breakpoints\" if seg == 0 else None,\n",
- " )\n",
- " ax1.axvspan(\n",
- " bp.iloc[0, 1] + 0.5,\n",
- " bp.iloc[1, 0] - 0.5,\n",
- " color=\"red\",\n",
- " alpha=0.1,\n",
- " label=\"Forbidden zone\",\n",
+ " sol[x_name].sel(time=t),\n",
+ " sol[y_name].sel(time=t),\n",
+ " \"s\",\n",
+ " ms=10,\n",
+ " label=f\"t={t}\",\n",
" )\n",
- " for t in time:\n",
- " p = float(sol[\"power\"].sel(time=t))\n",
- " ax1.plot(p, fuel_rate * p, \"s\", ms=10, label=f\"t={t}\")\n",
- " ax1.set(xlabel=\"Power (MW)\", ylabel=\"Cost\", title=\"Cost curve\")\n",
+ " ax1.set(xlabel=x_name.title(), ylabel=y_name.title(), title=\"Heat rate curve\")\n",
" ax1.legend()\n",
"\n",
" # Right: dispatch vs demand\n",
" x = list(range(len(time)))\n",
- " power_vals = sol[\"power\"].values\n",
- " ax2.bar(x, power_vals, color=color, label=\"Power\")\n",
+ " power_vals = sol[x_name].values\n",
+ " ax2.bar(x, power_vals, color=color, label=x_name.title())\n",
" if \"backup\" in sol:\n",
" ax2.bar(\n",
" x,\n",
@@ -113,74 +74,78 @@
" label=\"Demand\",\n",
" )\n",
" ax2.set(\n",
- " xlabel=\"Time\", ylabel=\"MW\", title=\"Dispatch\", xticks=x, xticklabels=time.values\n",
+ " xlabel=\"Time\",\n",
+ " ylabel=\"MW\",\n",
+ " title=\"Dispatch\",\n",
+ " xticks=x,\n",
+ " xticklabels=time.values,\n",
" )\n",
" ax2.legend()\n",
" plt.tight_layout()"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "markdown",
- "id": "sos2-md",
"metadata": {},
"source": [
"## 1. SOS2 formulation — Gas turbine\n",
"\n",
"The gas turbine has a **convex** heat rate: efficient at moderate load,\n",
"increasingly fuel-hungry at high output. We use the **SOS2** formulation\n",
- "to link power output and fuel consumption."
+ "to link power output and fuel consumption via separate x/y breakpoints."
]
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "sos2-setup",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.525641Z",
- "start_time": "2026-02-09T19:21:33.516874Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:42.585470Z",
- "iopub.status.busy": "2026-02-09T19:21:42.585263Z",
- "iopub.status.idle": "2026-02-09T19:21:42.639106Z",
- "shell.execute_reply": "2026-02-09T19:21:42.638745Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.185693Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.185601Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.199760Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.199416Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.185683Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:27.808870Z",
+ "start_time": "2026-03-09T10:17:27.806626Z"
}
},
- "outputs": [],
"source": [
- "breakpoints = linopy.breakpoints(power=[0, 30, 60, 100], fuel=[0, 36, 84, 170])\n",
- "breakpoints.to_pandas()"
- ]
+ "x_pts1 = linopy.breakpoints([0, 30, 60, 100])\n",
+ "y_pts1 = linopy.breakpoints([0, 36, 84, 170])\n",
+ "print(\"x_pts:\", x_pts1.values)\n",
+ "print(\"y_pts:\", y_pts1.values)"
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "df198d44e962132f",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.584017Z",
- "start_time": "2026-02-09T19:21:33.548479Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:42.640305Z",
- "iopub.status.busy": "2026-02-09T19:21:42.640145Z",
- "iopub.status.idle": "2026-02-09T19:21:42.676689Z",
- "shell.execute_reply": "2026-02-09T19:21:42.676404Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.200170Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.200087Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.266847Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.266379Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.200161Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:27.851223Z",
+ "start_time": "2026-03-09T10:17:27.811464Z"
}
},
- "outputs": [],
"source": [
"m1 = linopy.Model()\n",
"\n",
"power = m1.add_variables(name=\"power\", lower=0, upper=100, coords=[time])\n",
"fuel = m1.add_variables(name=\"fuel\", lower=0, coords=[time])\n",
"\n",
+ "# piecewise(...) can be written on either side of the comparison\n",
"# breakpoints are auto-broadcast to match the time dimension\n",
"m1.add_piecewise_constraints(\n",
- " {\"power\": power, \"fuel\": fuel},\n",
- " breakpoints,\n",
- " dim=\"breakpoint\",\n",
+ " linopy.piecewise(power, x_pts1, y_pts1) == fuel,\n",
" name=\"pwl\",\n",
" method=\"sos2\",\n",
")\n",
@@ -188,122 +153,123 @@
"demand1 = xr.DataArray([50, 80, 30], coords=[time])\n",
"m1.add_constraints(power >= demand1, name=\"demand\")\n",
"m1.add_objective(fuel.sum())"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "sos2-solve",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.646228Z",
- "start_time": "2026-02-09T19:21:33.602890Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:42.678723Z",
- "iopub.status.busy": "2026-02-09T19:21:42.678455Z",
- "iopub.status.idle": "2026-02-09T19:21:42.729810Z",
- "shell.execute_reply": "2026-02-09T19:21:42.729268Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.267522Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.267433Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.326758Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.326518Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.267514Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:27.899254Z",
+ "start_time": "2026-03-09T10:17:27.854515Z"
}
},
- "outputs": [],
"source": [
"m1.solve()"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "sos2-results",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.671517Z",
- "start_time": "2026-02-09T19:21:33.665702Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:42.732333Z",
- "iopub.status.busy": "2026-02-09T19:21:42.732173Z",
- "iopub.status.idle": "2026-02-09T19:21:42.737877Z",
- "shell.execute_reply": "2026-02-09T19:21:42.737648Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.327139Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.327044Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.339334Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.338974Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.327130Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:27.914316Z",
+ "start_time": "2026-03-09T10:17:27.909570Z"
}
},
- "outputs": [],
"source": [
"m1.solution[[\"power\", \"fuel\"]].to_pandas()"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "hcqytsfoaa",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.802613Z",
- "start_time": "2026-02-09T19:21:33.695925Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:42.739144Z",
- "iopub.status.busy": "2026-02-09T19:21:42.738977Z",
- "iopub.status.idle": "2026-02-09T19:21:42.983660Z",
- "shell.execute_reply": "2026-02-09T19:21:42.982758Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.339689Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.339608Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.489677Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.489280Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.339680Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.025921Z",
+ "start_time": "2026-03-09T10:17:27.922945Z"
}
},
- "outputs": [],
"source": [
- "plot_pwl_results(m1, breakpoints, demand1, color=\"C0\")"
- ]
+ "plot_pwl_results(m1, x_pts1, y_pts1, demand1, color=\"C0\")"
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "markdown",
- "id": "incremental-md",
"metadata": {},
"source": [
"## 2. Incremental formulation — Coal plant\n",
"\n",
"The coal plant has a **monotonically increasing** heat rate. Since all\n",
"breakpoints are strictly monotonic, we can use the **incremental**\n",
- "formulation — a pure LP with no SOS2 or binary variables."
+ "formulation — which uses fill-fraction variables with binary indicators."
]
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "incremental-setup",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.829667Z",
- "start_time": "2026-02-09T19:21:33.825683Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:42.987305Z",
- "iopub.status.busy": "2026-02-09T19:21:42.986204Z",
- "iopub.status.idle": "2026-02-09T19:21:43.003874Z",
- "shell.execute_reply": "2026-02-09T19:21:42.998265Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.490092Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.490011Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.500894Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.500558Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.490084Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.039245Z",
+ "start_time": "2026-03-09T10:17:28.035712Z"
}
},
- "outputs": [],
"source": [
- "breakpoints = linopy.breakpoints(power=[0, 50, 100, 150], fuel=[0, 55, 130, 225])\n",
- "breakpoints.to_pandas()"
- ]
+ "x_pts2 = linopy.breakpoints([0, 50, 100, 150])\n",
+ "y_pts2 = linopy.breakpoints([0, 55, 130, 225])\n",
+ "print(\"x_pts:\", x_pts2.values)\n",
+ "print(\"y_pts:\", y_pts2.values)"
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "8nq1zqvq9re",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.913679Z",
- "start_time": "2026-02-09T19:21:33.855910Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.009748Z",
- "iopub.status.busy": "2026-02-09T19:21:43.009216Z",
- "iopub.status.idle": "2026-02-09T19:21:43.067070Z",
- "shell.execute_reply": "2026-02-09T19:21:43.066402Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.501317Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.501216Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.604024Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.603543Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.501307Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.121499Z",
+ "start_time": "2026-03-09T10:17:28.052395Z"
}
},
- "outputs": [],
"source": [
"m2 = linopy.Model()\n",
"\n",
@@ -312,9 +278,7 @@
"\n",
"# breakpoints are auto-broadcast to match the time dimension\n",
"m2.add_piecewise_constraints(\n",
- " {\"power\": power, \"fuel\": fuel},\n",
- " breakpoints,\n",
- " dim=\"breakpoint\",\n",
+ " linopy.piecewise(power, x_pts2, y_pts2) == fuel,\n",
" name=\"pwl\",\n",
" method=\"incremental\",\n",
")\n",
@@ -322,199 +286,577 @@
"demand2 = xr.DataArray([80, 120, 50], coords=[time])\n",
"m2.add_constraints(power >= demand2, name=\"demand\")\n",
"m2.add_objective(fuel.sum())"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "incremental-solve",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.981694Z",
- "start_time": "2026-02-09T19:21:33.933519Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.070384Z",
- "iopub.status.busy": "2026-02-09T19:21:43.070023Z",
- "iopub.status.idle": "2026-02-09T19:21:43.124118Z",
- "shell.execute_reply": "2026-02-09T19:21:43.123883Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.604434Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.604359Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.680947Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.680667Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.604427Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.174903Z",
+ "start_time": "2026-03-09T10:17:28.124418Z"
}
},
- "outputs": [],
"source": [
"m2.solve();"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "incremental-results",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:33.991781Z",
- "start_time": "2026-02-09T19:21:33.986137Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.125356Z",
- "iopub.status.busy": "2026-02-09T19:21:43.125291Z",
- "iopub.status.idle": "2026-02-09T19:21:43.129072Z",
- "shell.execute_reply": "2026-02-09T19:21:43.128850Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.681833Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.681725Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.698558Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.698011Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.681822Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.182912Z",
+ "start_time": "2026-03-09T10:17:28.178226Z"
}
},
- "outputs": [],
"source": [
"m2.solution[[\"power\", \"fuel\"]].to_pandas()"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "fua98r986pl",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:34.116658Z",
- "start_time": "2026-02-09T19:21:34.021992Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.130293Z",
- "iopub.status.busy": "2026-02-09T19:21:43.130221Z",
- "iopub.status.idle": "2026-02-09T19:21:43.281657Z",
- "shell.execute_reply": "2026-02-09T19:21:43.281256Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.699350Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.699116Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.852000Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.851741Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.699334Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.285938Z",
+ "start_time": "2026-03-09T10:17:28.191498Z"
}
},
- "outputs": [],
"source": [
- "plot_pwl_results(m2, breakpoints, demand2, color=\"C1\")"
- ]
+ "plot_pwl_results(m2, x_pts2, y_pts2, demand2, color=\"C1\")"
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "markdown",
- "id": "disjunctive-md",
"metadata": {},
"source": [
"## 3. Disjunctive formulation — Diesel generator\n",
"\n",
"The diesel generator has a **forbidden operating zone**: it must either\n",
- "be off (0 MW) or run between 50–80 MW. Because of this gap, we add a\n",
- "high-cost **backup** source to cover demand when the diesel is off or at\n",
- "its maximum."
+ "be off (0 MW) or run between 50–80 MW. Because of this gap, we use\n",
+ "**disjunctive** piecewise constraints via `linopy.segments()` and add a\n",
+ "high-cost **backup** source to cover demand when the diesel is off or\n",
+ "at its maximum.\n",
+ "\n",
+ "The disjunctive formulation is selected automatically when the breakpoint\n",
+ "arrays have a segment dimension (created by `linopy.segments()`)."
]
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "disjunctive-setup",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:34.147920Z",
- "start_time": "2026-02-09T19:21:34.142740Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.283679Z",
- "iopub.status.busy": "2026-02-09T19:21:43.283490Z",
- "iopub.status.idle": "2026-02-09T19:21:43.290429Z",
- "shell.execute_reply": "2026-02-09T19:21:43.289665Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.852397Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.852305Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.866500Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.866141Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.852387Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.301657Z",
+ "start_time": "2026-03-09T10:17:28.294924Z"
}
},
- "outputs": [],
"source": [
- "breakpoints = linopy.breakpoints.segments([(0, 0), (50, 80)])\n",
- "breakpoints.to_pandas()"
- ]
+ "# x-breakpoints define where each segment lives on the power axis\n",
+ "# y-breakpoints define the corresponding cost values\n",
+ "x_seg = linopy.segments([(0, 0), (50, 80)])\n",
+ "y_seg = linopy.segments([(0, 0), (125, 200)])\n",
+ "print(\"x segments:\\n\", x_seg.to_pandas())\n",
+ "print(\"y segments:\\n\", y_seg.to_pandas())"
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "reevc7ood3",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:34.234326Z",
- "start_time": "2026-02-09T19:21:34.188461Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.293229Z",
- "iopub.status.busy": "2026-02-09T19:21:43.292936Z",
- "iopub.status.idle": "2026-02-09T19:21:43.363049Z",
- "shell.execute_reply": "2026-02-09T19:21:43.362442Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.866940Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.866839Z",
+ "iopub.status.idle": "2026-03-06T11:51:29.955272Z",
+ "shell.execute_reply": "2026-03-06T11:51:29.954810Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.866931Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.381180Z",
+ "start_time": "2026-03-09T10:17:28.308026Z"
}
},
- "outputs": [],
"source": [
"m3 = linopy.Model()\n",
"\n",
"power = m3.add_variables(name=\"power\", lower=0, upper=80, coords=[time])\n",
+ "cost = m3.add_variables(name=\"cost\", lower=0, coords=[time])\n",
"backup = m3.add_variables(name=\"backup\", lower=0, coords=[time])\n",
"\n",
"# breakpoints are auto-broadcast to match the time dimension\n",
- "m3.add_disjunctive_piecewise_constraints(power, breakpoints, name=\"pwl\")\n",
+ "m3.add_piecewise_constraints(\n",
+ " linopy.piecewise(power, x_seg, y_seg) == cost,\n",
+ " name=\"pwl\",\n",
+ ")\n",
"\n",
"demand3 = xr.DataArray([10, 70, 90], coords=[time])\n",
"m3.add_constraints(power + backup >= demand3, name=\"demand\")\n",
- "m3.add_objective((2.5 * power + 10 * backup).sum())"
- ]
+ "m3.add_objective((cost + 10 * backup).sum())"
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "disjunctive-solve",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:34.322383Z",
- "start_time": "2026-02-09T19:21:34.260066Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.366552Z",
- "iopub.status.busy": "2026-02-09T19:21:43.366148Z",
- "iopub.status.idle": "2026-02-09T19:21:43.457707Z",
- "shell.execute_reply": "2026-02-09T19:21:43.457113Z"
+ "iopub.execute_input": "2026-03-06T11:51:29.955750Z",
+ "iopub.status.busy": "2026-03-06T11:51:29.955667Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.027311Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.026945Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:29.955741Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.437326Z",
+ "start_time": "2026-03-09T10:17:28.384629Z"
}
},
- "outputs": [],
"source": [
"m3.solve()"
- ]
+ ],
+ "outputs": [],
+ "execution_count": null
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "disjunctive-results",
"metadata": {
- "ExecuteTime": {
- "end_time": "2026-02-09T19:21:34.333489Z",
- "start_time": "2026-02-09T19:21:34.327107Z"
- },
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.459934Z",
- "iopub.status.busy": "2026-02-09T19:21:43.459654Z",
- "iopub.status.idle": "2026-02-09T19:21:43.468110Z",
- "shell.execute_reply": "2026-02-09T19:21:43.465566Z"
+ "iopub.execute_input": "2026-03-06T11:51:30.028114Z",
+ "iopub.status.busy": "2026-03-06T11:51:30.027864Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.043138Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.042813Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:30.028095Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.449248Z",
+ "start_time": "2026-03-09T10:17:28.444065Z"
}
},
+ "source": [
+ "m3.solution[[\"power\", \"cost\", \"backup\"]].to_pandas()"
+ ],
"outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
"source": [
- "m3.solution[[\"power\", \"backup\"]].to_pandas()"
+ "## 4. LP formulation — Concave efficiency bound\n",
+ "\n",
+ "When the piecewise function is **concave** and we use a `>=` constraint\n",
+ "(i.e. `pw >= y`, meaning y is bounded above by pw), linopy can use a\n",
+ "pure **LP** formulation with tangent-line constraints — no SOS2 or\n",
+ "binary variables needed. This is the fastest to solve.\n",
+ "\n",
+ "For this formulation, the x-breakpoints must be in **strictly increasing**\n",
+ "order.\n",
+ "\n",
+ "Here we bound fuel consumption *below* a concave efficiency envelope.\n"
]
},
{
"cell_type": "code",
- "execution_count": null,
- "id": "g32vxea6jwe",
"metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-03-06T11:51:30.043492Z",
+ "iopub.status.busy": "2026-03-06T11:51:30.043410Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.113382Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.112320Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:30.043484Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.503165Z",
+ "start_time": "2026-03-09T10:17:28.458328Z"
+ }
+ },
+ "source": [
+ "x_pts4 = linopy.breakpoints([0, 40, 80, 120])\n",
+ "# Concave curve: decreasing marginal fuel per MW\n",
+ "y_pts4 = linopy.breakpoints([0, 50, 90, 120])\n",
+ "\n",
+ "m4 = linopy.Model()\n",
+ "\n",
+ "power = m4.add_variables(name=\"power\", lower=0, upper=120, coords=[time])\n",
+ "fuel = m4.add_variables(name=\"fuel\", lower=0, coords=[time])\n",
+ "\n",
+ "# pw >= fuel means fuel <= concave_function(power) → auto-selects LP method\n",
+ "m4.add_piecewise_constraints(\n",
+ " linopy.piecewise(power, x_pts4, y_pts4) >= fuel,\n",
+ " name=\"pwl\",\n",
+ ")\n",
+ "\n",
+ "demand4 = xr.DataArray([30, 80, 100], coords=[time])\n",
+ "m4.add_constraints(power == demand4, name=\"demand\")\n",
+ "# Maximize fuel (to push against the upper bound)\n",
+ "m4.add_objective(-fuel.sum())"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-03-06T11:51:30.113818Z",
+ "iopub.status.busy": "2026-03-06T11:51:30.113727Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.171329Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.170942Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:30.113810Z"
+ },
"ExecuteTime": {
- "end_time": "2026-02-09T19:21:34.545650Z",
- "start_time": "2026-02-09T19:21:34.425456Z"
+ "end_time": "2026-03-09T10:17:28.554560Z",
+ "start_time": "2026-03-09T10:17:28.520243Z"
+ }
+ },
+ "source": [
+ "m4.solve()"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-03-06T11:51:30.172009Z",
+ "iopub.status.busy": "2026-03-06T11:51:30.171791Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.191956Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.191556Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:30.171993Z"
},
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.563539Z",
+ "start_time": "2026-03-09T10:17:28.559654Z"
+ }
+ },
+ "source": [
+ "m4.solution[[\"power\", \"fuel\"]].to_pandas()"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
"execution": {
- "iopub.execute_input": "2026-02-09T19:21:43.475302Z",
- "iopub.status.busy": "2026-02-09T19:21:43.475060Z",
- "iopub.status.idle": "2026-02-09T19:21:43.697893Z",
- "shell.execute_reply": "2026-02-09T19:21:43.697398Z"
+ "iopub.execute_input": "2026-03-06T11:51:30.192604Z",
+ "iopub.status.busy": "2026-03-06T11:51:30.192376Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.345074Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.344642Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:30.192590Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.665419Z",
+ "start_time": "2026-03-09T10:17:28.575163Z"
}
},
+ "source": [
+ "plot_pwl_results(m4, x_pts4, y_pts4, demand4, color=\"C4\")"
+ ],
"outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
"source": [
- "plot_pwl_results(m3, breakpoints, demand3, color=\"C2\", fuel_rate=2.5)"
+ "## 5. Slopes mode — Building breakpoints from slopes\n",
+ "\n",
+ "Sometimes you know the **slope** of each segment rather than the y-values\n",
+ "at each breakpoint. The `breakpoints()` factory can compute y-values from\n",
+ "slopes, x-coordinates, and an initial y-value."
]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-03-06T11:51:30.345523Z",
+ "iopub.status.busy": "2026-03-06T11:51:30.345404Z",
+ "iopub.status.idle": "2026-03-06T11:51:30.357312Z",
+ "shell.execute_reply": "2026-03-06T11:51:30.356954Z",
+ "shell.execute_reply.started": "2026-03-06T11:51:30.345513Z"
+ },
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.673673Z",
+ "start_time": "2026-03-09T10:17:28.668792Z"
+ }
+ },
+ "source": [
+ "# Marginal costs: $1.1/MW for 0-50, $1.5/MW for 50-100, $1.9/MW for 100-150\n",
+ "x_pts5 = linopy.breakpoints([0, 50, 100, 150])\n",
+ "y_pts5 = linopy.breakpoints(slopes=[1.1, 1.5, 1.9], x_points=[0, 50, 100, 150], y0=0)\n",
+ "print(\"y breakpoints from slopes:\", y_pts5.values)"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "source": "## 6. Active parameter — Unit commitment with piecewise efficiency\n\nIn unit commitment problems, a binary variable $u_t$ controls whether a\nunit is **on** or **off**. When off, both power output and fuel consumption\nmust be zero. When on, the unit operates within its piecewise-linear\nefficiency curve between $P_{min}$ and $P_{max}$.\n\nThe `active` parameter on `piecewise()` handles this by gating the\ninternal PWL formulation with the commitment binary:\n\n- **Incremental:** delta bounds tighten from $\\delta_i \\leq 1$ to\n $\\delta_i \\leq u$, and base terms are multiplied by $u$\n- **SOS2:** convexity constraint becomes $\\sum \\lambda_i = u$\n- **Disjunctive:** segment selection becomes $\\sum z_k = u$\n\nThis is the only gating behavior expressible with pure linear constraints.\nSelectively *relaxing* the PWL (letting x, y float freely when off) would\nrequire big-M or indicator constraints.",
+ "metadata": {}
+ },
+ {
+ "cell_type": "code",
+ "source": "# Unit parameters: operates between 30-100 MW when on\np_min, p_max = 30, 100\nfuel_min, fuel_max = 40, 170\nstartup_cost = 50\n\nx_pts6 = linopy.breakpoints([p_min, 60, p_max])\ny_pts6 = linopy.breakpoints([fuel_min, 90, fuel_max])\nprint(\"Power breakpoints:\", x_pts6.values)\nprint(\"Fuel breakpoints: \", y_pts6.values)",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.685034Z",
+ "start_time": "2026-03-09T10:17:28.681601Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Power breakpoints: [ 30. 60. 100.]\n",
+ "Fuel breakpoints: [ 40. 90. 170.]\n"
+ ]
+ }
+ ],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "source": "m6 = linopy.Model()\n\npower = m6.add_variables(name=\"power\", lower=0, upper=p_max, coords=[time])\nfuel = m6.add_variables(name=\"fuel\", lower=0, coords=[time])\ncommit = m6.add_variables(name=\"commit\", binary=True, coords=[time])\n\n# The active parameter gates the PWL with the commitment binary:\n# - commit=1: power in [30, 100], fuel = f(power)\n# - commit=0: power = 0, fuel = 0\nm6.add_piecewise_constraints(\n linopy.piecewise(power, x_pts6, y_pts6, active=commit) == fuel,\n name=\"pwl\",\n method=\"incremental\",\n)\n\n# Demand: low at t=1 (cheaper to stay off), high at t=2,3\ndemand6 = xr.DataArray([15, 70, 50], coords=[time])\nbackup = m6.add_variables(name=\"backup\", lower=0, coords=[time])\nm6.add_constraints(power + backup >= demand6, name=\"demand\")\n\n# Objective: fuel + startup cost + backup at $5/MW (cheap enough that\n# staying off at low demand beats committing at minimum load)\nm6.add_objective((fuel + startup_cost * commit + 5 * backup).sum())",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.787328Z",
+ "start_time": "2026-03-09T10:17:28.697214Z"
+ }
+ },
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "source": "m6.solve()",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:28.878112Z",
+ "start_time": "2026-03-09T10:17:28.791383Z"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Set parameter Username\n",
+ "Academic license - for non-commercial use only - expires 2026-12-18\n",
+ "Read LP format model from file /private/var/folders/7j/18_93__x4wl2px44pq3f570m0000gn/T/linopy-problem-fm9ucuy2.lp\n",
+ "Reading time = 0.00 seconds\n",
+ "obj: 27 rows, 24 columns, 66 nonzeros\n",
+ "Gurobi Optimizer version 13.0.1 build v13.0.1rc0 (mac64[arm] - Darwin 25.2.0 25C56)\n",
+ "\n",
+ "CPU model: Apple M3\n",
+ "Thread count: 8 physical cores, 8 logical processors, using up to 8 threads\n",
+ "\n",
+ "Optimize a model with 27 rows, 24 columns and 66 nonzeros (Min)\n",
+ "Model fingerprint: 0x4b0d5f70\n",
+ "Model has 9 linear objective coefficients\n",
+ "Variable types: 15 continuous, 9 integer (9 binary)\n",
+ "Coefficient statistics:\n",
+ " Matrix range [1e+00, 8e+01]\n",
+ " Objective range [1e+00, 5e+01]\n",
+ " Bounds range [1e+00, 1e+02]\n",
+ " RHS range [2e+01, 7e+01]\n",
+ "\n",
+ "Found heuristic solution: objective 675.0000000\n",
+ "Presolve removed 24 rows and 19 columns\n",
+ "Presolve time: 0.00s\n",
+ "Presolved: 3 rows, 5 columns, 10 nonzeros\n",
+ "Found heuristic solution: objective 485.0000000\n",
+ "Variable types: 3 continuous, 2 integer (2 binary)\n",
+ "\n",
+ "Root relaxation: objective 3.516667e+02, 3 iterations, 0.00 seconds (0.00 work units)\n",
+ "\n",
+ " Nodes | Current Node | Objective Bounds | Work\n",
+ " Expl Unexpl | Obj Depth IntInf | Incumbent BestBd Gap | It/Node Time\n",
+ "\n",
+ " 0 0 351.66667 0 1 485.00000 351.66667 27.5% - 0s\n",
+ "* 0 0 0 358.3333333 358.33333 0.00% - 0s\n",
+ "\n",
+ "Explored 1 nodes (5 simplex iterations) in 0.01 seconds (0.00 work units)\n",
+ "Thread count was 8 (of 8 available processors)\n",
+ "\n",
+ "Solution count 3: 358.333 485 675 \n",
+ "\n",
+ "Optimal solution found (tolerance 1.00e-04)\n",
+ "Best objective 3.583333333333e+02, best bound 3.583333333333e+02, gap 0.0000%\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Dual values of MILP couldn't be parsed\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "('ok', 'optimal')"
+ ]
+ },
+ "execution_count": 47,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "source": "m6.solution[[\"commit\", \"power\", \"fuel\", \"backup\"]].to_pandas()",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:29.079925Z",
+ "start_time": "2026-03-09T10:17:29.069821Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ " commit power fuel backup\n",
+ "time \n",
+ "1 0.0 0.0 0.000000 15.0\n",
+ "2 1.0 70.0 110.000000 0.0\n",
+ "3 1.0 50.0 73.333333 0.0"
+ ],
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " commit | \n",
+ " power | \n",
+ " fuel | \n",
+ " backup | \n",
+ "
\n",
+ " \n",
+ " | time | \n",
+ " | \n",
+ " | \n",
+ " | \n",
+ " | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 1 | \n",
+ " 0.0 | \n",
+ " 0.0 | \n",
+ " 0.000000 | \n",
+ " 15.0 | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " 1.0 | \n",
+ " 70.0 | \n",
+ " 110.000000 | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " 1.0 | \n",
+ " 50.0 | \n",
+ " 73.333333 | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ]
+ },
+ "execution_count": 48,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "source": "plot_pwl_results(m6, x_pts6, y_pts6, demand6, color=\"C2\")",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2026-03-09T10:17:29.226034Z",
+ "start_time": "2026-03-09T10:17:29.097467Z"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ],
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAFUCAYAAAA57l+/AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/TGe4hAAAACXBIWXMAAA9hAAAPYQGoP6dpAABq3ElEQVR4nO3dB3hU1fbw4ZVeKKETeu9SpDeRJoiCIlxRBKliowiIUqQLBlABQYpYKCqCKKCgYqFKbyId6b1Jh0DqfM/afjP/mZBAEjKZZOb33mducs6cmZycieyz9l57bS+LxWIRAAAAAACQ4rxT/i0BAAAAAABBNwAAAAAATsRINwAAAAAATkLQDQAAAACAkxB0AwAAAADgJATdAAAAAAA4CUE3AAAAAABOQtANAAAAAICTEHQDAAAAAOAkBN0AAABAGjJ8+HDx8vKS9E5/hx49erj6NACXI+gGnGTWrFmmsdm6dWu8z9evX18eeughp17/n3/+2TTcqWnq1KnmdwcAAI73BNZHYGCg5M2bV5o2bSqTJk2SGzdupMlL5Yr7CMAdEXQDbkwbyxEjRqTqzyToBgAgfiNHjpQvv/xSpk2bJj179jT7evfuLeXLl5edO3fajhs8eLDcvn3bI+8jAHfk6+oTAJB2WSwWuXPnjgQFBYm709/T399fvL3piwQAOEezZs2katWqtu2BAwfKihUrpHnz5vLUU0/Jvn37TJvr6+trHgDcA3eXQBrz1VdfSZUqVUyjmy1bNnn++efl5MmTDsf8+eef8uyzz0rBggUlICBAChQoIH369HHoFe/UqZNMmTLFfG+f0nYvhQsXNg3/r7/+am4K9Bw++eQT89zMmTOlYcOGkitXLvMzy5Yta3rq475+z549snr1atvP0zR6q6tXr5oefT1ffY/ixYvL2LFjJTY2NlHX5pdffpFHH31UMmXKJJkzZ5Zq1arJ3LlzHX6+/t5x6TnYn8eqVavMuc2bN8+MJuTLl0+Cg4Nl+/btZv/s2bPveg+9Jvrc0qVLbftOnz4tXbp0kdy5c5vfp1y5cvLFF18k6ncBAEBp2zpkyBA5fvy4uQdIaE7377//LnXr1pUsWbJIxowZpVSpUjJo0KC72rb58+eb/aGhoZIhQwYTzDvjPkLb7o8++siM0mu6fM6cOeXxxx+Pd1rd4sWLzZQ6a1u5bNkyPnx4FLrQACe7du2a/Pvvv3ftj4qKumvf6NGjTcPbpk0beemll+TixYsyefJkqVevnvz111+moVULFiyQ8PBwee211yR79uyyefNmc9ypU6fMc+qVV16RM2fOmEZaU9kS68CBA9K2bVvz+m7duplGXWmArQ2lNt7a+75kyRJ5/fXXTaPbvXt3c8zEiRNNupzeDLzzzjtmnwakSs9XA2YNVPW9taFfv3696eU/e/asee395sNpgKvnoK/Ra6HXRBvuF154QZLj3XffNaPb/fr1k4iICNORULRoUfn222+lY8eODsfqTUzWrFnN/Dt1/vx5qVmzpq1IjN5saKdA165d5fr166ZzAQCAxHjxxRdNoPzbb7+Ztjcu7dDWTvEKFSqYFHUNXg8dOiTr1q2L915C26b+/fvLhQsXTPvauHFj2bFjhy1zLSXuI7S907ZZR+/1niU6OtoE8xs3bnQYzV+7dq0sXLjQ3DNop7nOYW/durWcOHHC/GzAI1gAOMXMmTMt+p/YvR7lypWzHX/s2DGLj4+PZfTo0Q7vs2vXLouvr6/D/vDw8Lt+XlhYmMXLy8ty/Phx277u3bubn5NYhQoVMscvW7bsrufi+5lNmza1FC1a1GGf/k6PPvroXce+++67lgwZMlj++ecfh/0DBgwwv/eJEycSPK+rV69aMmXKZKlRo4bl9u3bDs/FxsY6nH/Hjh3ver2ej/05rVy50vyeeu5xf6+BAwda/Pz8LJcvX7bti4iIsGTJksXSpUsX276uXbta8uTJY/n3338dXv/8889bQkJC4r1eAADPvifYsmVLgsdo2/Hwww+b74cNG+bQfk+YMMFsX7x4McHXW9u2fPnyWa5fv27b/+2335r9H330UYrdR6xYscLs79Wr113P2bfLeoy/v7/l0KFDtn1///232T958uQEfxfA3ZBeDjiZpmZpL3Hch/ZW29NeYB011lFuHRm3PjQ9rESJErJy5UrbsfZzrG/dumWOq127tpmDraO/D6JIkSK20Vx79j/TOnqvI9dHjhwx2/ejPeePPPKIGS22//209z0mJkbWrFmT4Gv1emll1wEDBpgUNnsPsqSKjmbHna/+3HPPmSwE/TysdORBU+P1OaXX+fvvv5cWLVqY7+1/H712ej00VR0AgMTSLLGEqphbM91++OGH+07J6tChgxlRtvrf//4nefLkMUXRUuo+QttAbX+HDRt213Nx22Vt54sVK2bb1vsfnSKm9w+ApyC9HHCy6tWrO6RZWVmDT6uDBw+axk4D7Pj4+fnZvteUrKFDh8qPP/4oV65ccTguMQHw/YLu+GgKmzauGzZsMClpcX9mSEjIPd9Xfz+tzKpp2PHRFLiEHD582HxN6SXW4vtdK1asKKVLlzbp5Jo6p/T7HDlymHl3StP+NQifMWOGeST19wEAIK6bN2+auinx0U7fzz77zKRxawd0o0aNpFWrViagjlsANO59hAbBWkPl2LFjKXYfoe2yLnmmtWfuR6eTxXcPFPfnAu6MoBtII7TnWhtGnRfs4+MTbw+40lHhxx57TC5fvmzma2mAqIVSdK60Fj1JbFGyhMRXqVwbV23g9WeNHz/eFFzRudDaaz5hwoRE/Uw9Rs/77bffjvf5kiVLyoNKaNRbr1l81zShqux6c6Nz4rRTREcL9KZE57lbK8laf9/27dvfNffbKm4mAwAACdG51BrsanAcH22vNCNMs95++uknU89EO4S1M1izseJr4xLi7PuIuBI6t/+yzwHPQNANpBGaeqUNkI6+3isA3bVrl/zzzz+mwramkNmnYMf1IKnX9rRomhYa0+DTvsfaPuX9fj9Tfz/txdc0s6SypqXt3r07wRsSa8+5jkDHpRVhtUBaYmnQreuSavqcFoLTwmhaRd5KR+s1GNcbl+T8PgAA2LMWKotvepeVjmhrB7g+tAP8vffeM0VLtS22b4s0s8ye3lto0TVrZ3BK3Edou6yremjgnpjRbsDTMacbSCM0TUx7gzXYi9v7q9uXLl1y6DG2P0a/12U74tKeaxVfIJoU8f1M7ZHXZcTi+5nx/Tydq66p6dpIx6XHa9XThDRp0sQEuWFhYWY9bXv256Q3AVo1NTIy0rZPl/iKu1TK/ZQpU8YsgaKjCPrQuXBaQd7+emjlVQ3KtSMgLk0/BwAgMXSdbl1NQzvd27VrF+8xGtzGValSJfNVO8XtzZkzx2Fu+HfffWdWCdEq49Y27EHvI7QN1NfoPUtcjGADd2OkG0gjNGAcNWqUWQ5L5121bNnSBJpHjx6VRYsWycsvv2yWttI0MD1Wv9dUMC1GosFffHOjdL1v1atXL9N7rg2t/YhtYmnQq+nkWjhMlxDREetPP/3UzD3Thjzuz9TlxfR30VFpPUbT39566y0zUq5Lnmj6mh6nxVu0x11vCPR31nnT8dHfUdPYdS6brs2tS4TpqPbff/9t5pdb19XW5/W9dJ1QDfI1LV7XPLUv4JKU0W6d76aF23Rud9w5c2PGjDGjCzVq1DDLu+hyY3pTpAXU/vjjj3hvkAAAnk2nkO3fv990NOvSkxpw6whzoUKFTBsZt1iolS4TpunlTz75pDlW64ZMnTpV8ufPb9butqcjz7qvc+fO5mfokmHaHluXIkuJ+4gGDRqYZc50+S8dWdd2V9PSdckwfU6X0gRgx9Xl0wFPXR5El7CyXzLM6vvvv7fUrVvXLK+lj9KlS5slOw4cOGA7Zu/evZbGjRtbMmbMaMmRI4elW7dutiU49OdaRUdHW3r27GnJmTOnWQbkfv/J65JbTz75ZLzP/fjjj5YKFSpYAgMDLYULF7aMHTvW8sUXX5j3PHr0qO24c+fOmffQJb70Ofulum7cuGGW5CpevLhZQkTPvXbt2pYPPvjAEhkZeZ8r+t856PFBQUGWzJkzW6pXr2755ptvHI758MMPzXIpAQEBljp16li2bt2a4JJhCxYsSPBnHTx40La029q1a+M95vz58+azKVCggFlmLDQ01NKoUSPLjBkz7vu7AAA8dxlRbQO1zXjsscfMUl72S3zFt2TY8uXLLU8//bQlb9685rX6tW3btg7LcFrbNm0Xta3NlSuXaS+1TbZfBiyl7iP0uffff9/cp+g56THNmjWzbNu2zXaMHq/tZFwJLfEJuCsv/T/7IBwAAABA+rJq1SozyqxLdGpVcwBpB3O6AQAAAABwEoJuAAAAAACchKAbAAAAAAAnIegGAABJVrhwYbOGb9xH9+7dzfO6vJ9+nz17dsmYMaNZYkgrKQNwjvr165vlupjPDaQ9FFIDAABJpuvRx8TE2LZ1zfrHHnvMLKWnN/+vvfaa/PTTTzJr1iwJCQkxSwjp0nvr1q3jagMAPApBNwAAeGC9e/eWpUuXmjV7r1+/Ljlz5pS5c+faRt10beIyZcrIhg0bpGbNmlxxAIDH8HX1CaQFsbGxcubMGcmUKZNJjQMAIC3RlNEbN25I3rx5zWhxWhMZGSlfffWV9O3b17Sj27Ztk6ioKGncuLHtmNKlS0vBggXvGXRHRESYh337fPnyZZOiTvsMAEiv7TNBt4gJuAsUKJCanw8AAEl28uRJyZ8/f5q7cosXL5arV69Kp06dzPa5c+fE399fsmTJ4nBc7ty5zXMJCQsLkxEjRjj9fAEASM322aVB95o1a+T99983PeJnz56VRYsWScuWLW3PJ9SrPW7cOHnrrbdshVyOHz9+V6M9YMCARJ+HjnBbL1bmzJmT+dsAAOAcmq6tncPW9iqt+fzzz6VZs2amp/9BDBw40IyWW127ds2MjtM+IyVotoXeb+r9ZWhoaKJfd+H2BT6ANCxXUK5EH6udfjoymSdPHjPlBUit9tmlQfetW7ekYsWK0qVLF2nVqtVdz+s/jPZ++eUX6dq1q6mAam/kyJHSrVs323ZSb0qswb0G3ATdAIC0Ki2mWGvH9x9//CELFy607dOARlPOdfTbfrRbq5ffK9gJCAgwj7hon5ESrKmf2jl06tSpRL+u/OzyfABp2K6OuxJ9rI5Enj592vwtcM+P1GyfXRp0a6+4PhISt2H+4YcfpEGDBlK0aFGH/RpkJ6XHEgAApIyZM2dKrly55Mknn7Ttq1Klivj5+cny5cttHeUHDhyQEydOSK1atbj0AACPkvaqsSRAe8d16REd6Y5rzJgxpsjKww8/bNLVo6Oj7/leWqRFUwHsHwAAIGm00JkG3R07dhRf3//rx9clwrS91lRxXUJMp5F17tzZBNxULgcAeJp0U0ht9uzZZkQ7bhp6r169pHLlypItWzZZv369mQ+maenjx49P8L0o1AIAwIPTtHIdvdZpYnFNmDDBpHDqSLd2djdt2lSmTp3KZQcAeJx0E3R/8cUX0q5dOwkMDHTYb19wpUKFCqZa6iuvvGIC6/jmhcVXqMU6Af5+vfk6Pw2eTdMlfXx8XH0aAJAmNGnSxBQlio+211OmTDEPZ4uJiTFLlCHtov0E4MnSRdD9559/mrlg8+fPv++xNWrUMOnlx44dk1KlSiWpUEtCNNg+evSoCbwBLQqkNQTSYkEjAGlDTGyMbL+wXS6GX5ScwTmlcq7K4uNNh11K04BfqxFrwTakfbSfADxVugi6dSkSLcqilc7vZ8eOHSadTYu6pFSDrunqOrqpo+H3WvQc7k3/FsLDw+XChf+WDtHlJgAgrj+O/yFjNo+R8+HnbftyB+eWAdUHSONCjblgKcgacGubHxwcTGdoGkX7CcDTuTTovnnzphw6dMi2raPJGjTr/Gxdl9Oa+r1gwQL58MMP73r9hg0bZNOmTaaiuc731u0+ffpI+/btJWvWrClyjjpqroGWLi+hDTo8W1BQkPmqgbfe5JFqDiBuwN13VV+xiGPK9YXwC2b/+PrjCbxTMKXcGnBrMVWkbbSfADyZS4PurVu3moDZyjrPWqugzpo1y3w/b94800Patm3bu16vKeL6/PDhw02RliJFipig236+dko06krnigPK2vmi8wcJugHY2ovYGDPCHTfgVrrPS7xk7Oax0qBAA1LNU4B1Djcd4ukH7ScAT+XSoLt+/foJFmCxevnll80jPlq1fOPGjZIamL8L/hYA3IvO4bZPKY8v8D4Xfs4cVy20GheT9tnjcC8FwFMxQRkAgBSgRdNS8jgAAOAeCLqR4jTdv1KlSqnSY7548WKn/xwAuJ870Xfkt2O/JepCaTVzwB116tRJWrZs6erTAIA0h6A7Fef6bTm3RX4+8rP5qtvObvg0KLU+tMjM448/Ljt37hR3oVXlmzVrlujjtU6ALlcCAClp36V98tzS52T5yeX3PE7ndIcGh5rlw+DZ7NtoXb86d+7c8thjj8kXX3zB8qQA4IYIulOpmm3T75tKl1+7SP8/+5uvuq37nUmDbA1M9bF8+XLx9fWV5s2b37coTXqha2UnZb11AEhJ2nn6+a7P5YWfX5Aj145IjqAc8kqFV0xwrf+zZ93uX70/RdTg0EYfO3ZMfvnlF1NY9o033jDttK6cAgBwHwTdqbR8TNziOtblY5wZeGtAqoGpPjTde8CAAXLy5Em5ePGiaeS1h33+/Pny6KOPSmBgoHz99dfmdZ999pmUKVPG7CtdurRMnTrV4X379+8vJUuWNFVIixYtKkOGDLlnwH748GFzXI8ePUzhPOuIs6aGlyhRwvycpk2bmnOzN23aNClWrJipHF+qVCn58ssvE0wvt/4+CxcuNDcuem66rrsuI6dWrVolnTt3lmvXrtlGFzQNXunvZz0PHW343//+l0KfAAB3debmGen6W1eZuH2iRMdGS6OCjWThUwulx8M9zLJguYJzORyv63SzXBjia6Pz5ctnCsMOGjRIfvjhBxOAW1dw0SXRXnrpJcmZM6dkzpxZGjZsKH///fdd07l0hFyXWs2YMaO8/vrrZuWVcePGmffXJdVGjx7t8LPHjx8v5cuXlwwZMkiBAgXMa3QZVytrO/3rr7+a+wF9X2sngZX+DF0tRo/TbLq33377vsVxAcBTubR6eXqkDcrt6NuJHgUJ2xyW4PIxSpeXqRFaI1EjH0G+Qcmu/KmN6VdffSXFixc3jeOtW7fMfg3EdQ30hx9+2BZ4Dx06VD7++GOz76+//pJu3bqZhlmXclO6Jro2yLp2+a5du8zzuk8b3Lg0nV0D6q5du8qoUaNs+3Xtc70JmDNnjgmqtcF//vnnZd26deb5RYsWmR7/iRMnSuPGjWXp0qUmaM6fP7/DMnNxvfPOO/LBBx+YIFq/16XmdC342rVrm/fS3+3AgQPmWL2J0GXrevXqZQJ6Peby5cvy559/JusaA/CMNmDpkaXy3qb35GbUTQn2DZYB1QdIy+Itbf8+Ny7U2CwLplXKtWiazuHWlPLE/DsPz6ZBtXYYaweyBtvPPvusWd9aA/GQkBD55JNPpFGjRvLPP/9ItmzZbB3b+vyyZcvM99pxfOTIEdM5vnr1alm/fr106dLFtKU1atQwr/H29pZJkyaZpVb1WG2DtQ2372TXdlrbU20f9fj27dtLv379bB30eu+g9wIa8GtgrtvaduvvAABwRNCdRBpw15j7X6OVEnQEvPa82ok6dtMLmyTY7781ohNDA1UNLJUG2Xny5DH7tPG06t27t7Rq1cq2PWzYMNNwWvdpg7x3717T0FuD7sGDB9uOL1y4sGmEdb30uEG3NvSaJqfB75tvvunwnI6Ma2BvvQGYPXu2abQ3b94s1atXNw29znnTGwGlvem6PJzuv1fQrefy5JNPmu9HjBgh5cqVM0G3jtjrDYveFGvPv9WJEydMh4Kep3YcFCpUyHQ2AEBc1yKuyaiNo2TZsWVmu2LOihJWN0wKZC5w17EaYLMsWOqrWrWqnDt3LtV/rrYr2ombErS90g7rtWvXmjbxwoULtqlU2gZqhtd3331nW041NjbWBL7ahpUtW9a0kdq5/PPPP5v2XjPFxo4dKytXrrS1udr227fj2in+6quvOgTd2k5Pnz7dZJwpzVYbOXKk7XntyB44cKDtfkGP1ZFxAMDdCLrdmDa8mqKtrly5YhpTLTymjbj9DYqVBubaS66j0jp6baVzyzRgtdKUdO0h12N1BF2f17Q3exrMalEYHc22b9ytdH55tWrVHG4yNEVt3759JujWr3HXZ69Tp4589NFH9/ydK1SoYPteOxmU3rDo+8dHz1EDbU1/19Q5fTzzzDMmPR0ArDad3STvrH3HdJT6ePnIqxVflZfKvyS+3jSjaYkG3KdPn5b0nk2hHcSaRq5trGan2bt9+7Zpf+2DZg24rXSalI+Pj0MHu+7TttDqjz/+kLCwMNm/f79cv37dtON37twxo9vW9k+/WgNua5tqfQ+dqqWp5tYg3tqu6z0FKeYAcDfuFpJIU7x1xDkxtp3fJq8v/2+k9l6mNpoqVXJXSdTPTgodwdV0ciudq63B86effmrS1qzHWFnnc+nz9g2p0gZc6Rzpdu3amVFkTRvX99NRbh0dt6fzzzT9/JtvvjFpbXGDcmfRKrBW1lRPHQVIiN6obN++3cz5/u2330z6uc6R27JlC5XOAUhETIRM2j5J5uydY65GocyFzOh2+ZzluTppkH0mU3r9udrprFlm2iZroKvtU1z2K3HYt3vKWhE97j5rW6g1UDS767XXXjMd45qmrqPq2uEeGRlpC7rjew8CagBIHoLuJNJGJ7Ep3rXz1jbFc7RoWnzzurWarT6vx6XGXD89d+351l7y+GhPuAbKOr9LA+v4aMq4jgxryrjV8ePH7zpO56BpKvsTTzxhgnMNaO174rVXXVPxdFRbaSqcFozRFHOlX3V+tzWlXem2ps4ll84d18IvcWnvvM5104em1+vNzIoVKxzS7gF4nn+u/CMD/hwgB68cNNttSraRN6u+maRpPkhdKZXi7Sra9mitlD59+pgaJjpyr22UjmanlG3btpkAXDvLraPh3377bZLeQzvctUNg06ZNUq9ePVu7ru+tReEAAI4Iup1IA2ktsKNVyjXAtg+8U2P5mIiICNvcNk0v1znU2nPeokWLBF+jI9haWEwbVE211vfQmxh9vc6r1gJlmjquo9uaHv7TTz+Zwinx0VF0fV5T2vWhRV6sc8y1B71nz54mTV1vKHSuWM2aNW1B+FtvvSVt2rQx86s1GF6yZIkpLKMpccmlNy36++vyaVqoRnvz9QZHOxn0piFr1qxmDpzejOgcOACeKdYSK1/u/VI+2v6RRMVGSbbAbDKi9gipX6C+q08NbsTaRmtn8Pnz500bqSnfOgrdoUMHExDXqlVLWrZsaSqRa2G0M2fOmHZVp0HZTw9LCs2A0/nakydPNvcD2qGt87GTSoudjhkzxtwX6BQurYiunecAgLuxZJiTaRVbVy0fow249kTrQ9PFNWV6wYIFUr9+wjeOmnauaegzZ840y4nocmJanVRT3dRTTz1leuA1SNZlSnTkW5cMS4gG2VpVVVPStMCZtWq6Bry69NgLL7xg5mrrcTpX3EpvMnT+thaN0WJoWshNz+le534/Wp1cC8U899xzJv1db2J0VFuDea22qqPreuOhKfH6MwF4nnO3zsnLv70sH2z9wATcj+Z/VL5/6nsCbjitjdYOYe3k1kJn2hGty4bplC7NTtOOYO0U1tU7NOjWVT40u0wz05JLO501QNbiag899JCpRq7BflJpgdQXX3zRZKRp54Bms2lnAADgbl4WJuiYIiI6squFQeLOPdbCIkePHjVBpy6plVy6fBjLx/xHg3gtrpZee8RT6m8CQNqiVclHbhgpNyJvmBoa/ar2k2dLPpvspRpTq51yZ6nRPiP1uPoz05R9LbSna6OfOnUq0a8rP5saDmnZro67nP43ADxo+0x6eSph+RgASJs0yA7bFCZLjiwx2w9lf0jCHgmTwiEpN48WAAB4LoJuAIDH0lUmBv05SM7cOiPeXt7SrXw3eaXiK+Ln7Vi5GQAAILmY041U16lTp3SbWg7APUTFRMnEbROl87LOJuDOnzG/zH58tvR4uAcBNwAASFGMdAMAPMqRq0fMUmD7Lu8z288Uf8asJJHBL4OrTw0AALghgm4AgEfQuqHf7P9Gxm8bLxExEZIlIIsMqzXMqatIAAAAEHQDANzexfCLMmT9EFl3ep3ZrpO3jrxb513JGZzT1acGAADcHEE3AMCtLT++XIZvGC5XI65KgE+A9K3SV9qWbpsmlgIDAADuj6DbCU5fvS1XbkUm+XVZM/hLvixBzjglAPA4t6JuydjNY2XRoUVmu3S20jLmkTFSLEsxV5+aW9C1bvv37y+//PKLhIeHS/HixWXmzJlStWpVWzr/sGHD5NNPPzXFM+vUqSPTpk2TEiVKuPrUAQBIVQTdTgi4G36wSiKiY5P82gBfb1nRrz6BNwA8oB0XdsjAPwfKqZunxEu8pMtDXaR7pe7i58NSYCnhypUrJohu0KCBCbpz5swpBw8elKxZs9qOGTdunEyaNElmz54tRYoUkSFDhkjTpk1l7969EhgYmCLnAQBAekDQncJ0hDs5AbfS1+nrGe0GgOSJio2ST/7+RD7d9anEWmIlT4Y88l7d96Rq6H+jr0gZY8eOlQIFCpiRbSsNrK10lHvixIkyePBgefrpp82+OXPmSO7cuWXx4sXy/PPP81EAADyGS4PuNWvWyPvvvy/btm2Ts2fPyqJFi6Rly5YO6zlrD7k97SVftmyZbfvy5cvSs2dPWbJkiXh7e0vr1q3lo48+kowZM4onq1+/vlSqVMnc9CTHnj17ZOjQoeazOX78uEyYMEF69+6d4ucJACnl2LVjZnR796XdZrt50eYyqMYgyeSfiYucwn788UfTHj/77LOyevVqyZcvn7z++uvSrVs38/zRo0fl3Llz0rjx/1WGDwkJkRo1asiGDRucGnSXn11eUtOujruS/Br7+xs/Pz8pWLCgdOjQQQYNGiS+voyHAIC78XblD79165ZUrFhRpkyZkuAxjz/+uAnIrY9vvvnG4fl27dqZAPH333+XpUuXmkD+5ZdfToWzd286P69o0aIyZswYCQ0NdfXpAECCdFR1wT8LpM3SNibg1iD7/XrvS9gjYQTcTnLkyBHb/Oxff/1VXnvtNenVq5ctkNSAW+nItj3dtj4Xn4iICLl+/brDw11Z7280Lf/NN9+U4cOHm4EIV4uMTHpNGgBAGg66mzVrJqNGjZJnnnkmwWMCAgJM0Gd92M8X27dvnxn1/uyzz0zved26dWXy5Mkyb948OXPmjHgq7UHXkQcd8dfqvPo4duxYkt6jWrVqpvHX0Qj9DAAgLbp0+5L0WtFLRm4YKbejb0uN0Bqy8KmF8niRx119am4tNjZWKleuLO+99548/PDDprNbR7mnT5/+QO8bFhZmRsStD01hd1fW+5tChQqZTgvNCtAMAp0vr6Peer8THBxs7pU0MLd2MOn8+e+++872PprVlidPHtv22rVrzXtr57nSInYvvfSSeV3mzJmlYcOG8vfff9uO12Bf30PvpXSKAPPtAcDNgu7EWLVqleTKlUtKlSplGqVLly7ZntMUtSxZstgqpSpttDTNfNOmTR7bk67Bdq1atcwNkDVDQG9cNOX+Xo9XX33V1acOAIm2+uRqafVjK1l1apX4efvJW1XfkhlNZkhoBrJznE2DvLJlyzrsK1OmjJw4ccJ8b82QOn/+vMMxun2v7KmBAwfKtWvXbI+TJ0+KpwgKCjKjzNpxvnXrVhOA632OBtpPPPGEREVFmU70evXqmXsjpQG6DkDcvn1b9u/fb/Zpp7t2nGvArnQKwIULF0zBO50ypp0ljRo1MtPzrA4dOiTff/+9LFy4UHbs2OGiKwAA7ss3radetWrVyvS8Hj582Mx10h5fbYR8fHxMipoG5PZ0LlS2bNnumb6mPekjRowQd6WjA/7+/qbBtb+5uV9Dqj3gAJDWhUeFywdbPzAp5apE1hISVjdMSmUr5epT8xhaufzAgQMO+/755x8zaqu03db2Z/ny5WYUVWkHt3aIawd6QnSE1tOyqzSo1uukafp6j6OF5tatWye1a9c2z3/99dem41z3awCtNVs++eQT85xOqdNMA73WGoiXLl3afH300Udto96bN282Qbf1un7wwQfmvXS03DodT4N9LXSno+EAAA8Luu0LrZQvX14qVKggxYoVMw2K9tIml/ak9+3b17atNwLunMJmpWuoAkB6tvvf3TLgzwFy/Ppxs92hbAfpVbmXBPh4VqDman369DFBoaaXt2nTxgR2M2bMMA+lI7JafFOnkOm8b+uSYXnz5nUomOrJtA6NZpnpCLam67/wwgtmoEH365Q5q+zZs5tsPx3RVhpQv/HGG3Lx4kUzqq1BuDXo7tq1q6xfv17efvttc6ymkd+8edO8hz0dGdfBDCvtLCHgBgAPDbrj0sJeOXLkMGlQGnRrI6O9t/aio6NNytS90tc8sSdd3a+ie/v27R94Ph4AOEN0bLR8vutzmf73dIm2REuu4Fwyuu5oqZmnJhfcBTR9WVcc0U7skSNHmqBaV8vQ4qZWGvhpwVQdTdV5xVp3ReuwMGf4P7rGuRaj08w07YzQTD1NKb8fHYTQjD4NuPUxevRoc8+jy7ht2bLFBPHWUXINuHUqgDUd3Z5Oz7PKkCFDivxdAADcIOg+deqUmdNtLRii85a1Idc5SlWqVDH7VqxYYXqM7XuJPZE24jExMQ77SC8HkB6dvHFSBv05SHZc/G+KTNPCTWVIzSESEhDi6lPzaM2bNzePhOhotwbk+sDdNNCNm4Gm8+J18EDT8K2Bs973aCq/dQ69XtdHHnlEfvjhB7N6i3Zm6HQyrVejaeda58YaROv8bZ1upwF94cKF+RgAwBODbu2B1VFrK13XUwND7cHVh8671nW3tQdX06C011wbKF0b1No46bxva8VU7d3t0aOHSUvXXmNPpo2rNtpatVxHuPV6JiW9XOd37d271/b96dOnzWej70WaOoDUmuu6+NBiGbN5jIRHh0tGv4xm3W1df1sDD8DdaCr+008/be5rNIDOlCmTDBgwwKyDrvutNKVclxnTANuaxaYF1nT+91tvveVQXFYHKDSlf9y4cVKyZEmzustPP/1kVo6xL0QLAHDT6uVanVMLgOhD6Txr/X7o0KGmUNrOnTvlqaeeMo2EzlPS0ew///zTITVcGxgtHKLp5lrdU3t8rXPKPFm/fv3MNdSecZ2nZa0om1jaKFs/G61+roVX9HtddgQAnO3qnavSd1VfGbp+qAm4q+SuIt8/9b20KNaCgBtubebMmeZ+R7MINGDWzqeff/5Z/Pz8bMfovG7NZtPg20q/j7tPO6f0tRqQd+7c2dxP6cDE8ePH71pDHQDgPF4W/dfcw2khNa34rcuTxK3gfefOHTMCn9i1K3efvibNJ69N9rks7VlXHspHymRaltS/CQBJs+70OhmybohcvH1RfL19pUelHtKpXCfx8fbx2Et5r3bKnaVk+wzXc/Vnlj9/fpO5p5kDOmUxscrPLu/U88KD2dVxl9P/BoAHbZ/T1ZxuAID7uhN9RyZsmyBz988120VDikrYI2FSNrvjetAAAADpCUF3CsuawV8CfL0lIjo2ya/V1+nrAcDT7Lu0zywFduTaEbPdtnRb6VulrwT6MoIJAADSN4LuFJYvS5Cs6FdfrtyKTPJrNeDW1wOAp4iJjZFZe2bJxzs+NsuC5QjKIe/WeVfq5qvr6lMDAABIEQTdTqCBM8EzANzbmZtnZNDaQbLt/Daz3ahgIxlWa5hkDczKpQMAAG6DoBsAkKq0fudPR3+S0RtHy82omxLsGywDqg+QlsVbUpkcAAC4HYJuAECquRZxTUZtHCXLji0z2xVzVpSwumFSIHMBPgUAAOCWCLoBAKli09lN8s7ad+R8+Hnx8fKRVyu+Ki+Vf8ksCwYAAOCuuNNxhqsnRcIvJf11wdlFsjDaA8C9RMZEyqTtk2T23tlmu1DmQmZ0u3xO1r4FAADuj6DbGQH3x1VEoiOS8WkEiPTYRuANwG38c+UfsxTYwSsHzfazJZ+VflX7SbBfsKtPDQAAIFV4p86P8SA6wp2cgFvp65IzQg4AaUysJVbm7JkjbZe2NQF3tsBsMrnhZBlaaygBN5AKChcuLBMnTuRaA0AawEi3m6pfv75UqlQp2Q3up59+KnPmzJHdu3eb7SpVqsh7770n1atXT+EzBeBuzt06J4PXDTZzuNWj+R+V4bWHmzW4AWe7OPnjVL3IOXv2SPJrOnXqJLNn/zfdQmXLlk2qVasm48aNkwoVKqTwGQIAXI2RbsRr1apV0rZtW1m5cqVs2LBBChQoIE2aNJHTp09zxQAkSKuSt/6xtQm4A30CZUjNIWaEm4AbcPT444/L2bNnzWP58uXi6+srzZs35zIBgBsi6HZD2oO+evVq+eijj8yat/o4duxYkt7j66+/ltdff92MlpcuXVo+++wziY2NNTcGABDXjcgbMujPQfLW6rfkeuR1KZe9nHzb4ltpU6oNa28D8QgICJDQ0FDz0LZ2wIABcvLkSbl48aJ5vn///lKyZEkJDg6WokWLypAhQyQqKsrhPZYsWWJGyAMDAyVHjhzyzDPPJHittR3PkiWLace1Y13vDa5evWp7fseOHQ73C7NmzTLHL168WEqUKGF+RtOmTc05AgCShqDbDWmwXatWLenWrZutF11HqjNmzHjPx6uvvprge4aHh5vGXlPgAMDetvPb5H8//k+WHFki3l7e8kqFV+TLJ76UIiFFuFBAIty8eVO++uorKV68uGTPnt3sy5Qpkwl89+7da9p1nfY1YcIE22t++uknE2Q/8cQT8tdff5lgOqEpYJq2rkH9b7/9Jo0aNUr0Z6Jt/+jRo810s3Xr1pkg/fnnn+czBYAkYk63GwoJCRF/f3/TO6496Pa92PeSOXPmBJ/THve8efNK48aNU/RcAaRfUTFRMmXHFPli9xdiEYvky5hPxjwyRirlquTqUwPSvKVLl5oOb3Xr1i3JkyeP2eft/d94yODBgx2KovXr10/mzZsnb7/9ttmnwbAGwCNGjLAdV7FixXjb7y+//NJkwJUrVy5J56id7R9//LHUqFHDbOs89DJlysjmzZup8QIASUDQ7UG0Bz05xowZYxp6TUfT9DIAOHL1iFkKbN/lfeZitCzeUgZUHyAZ/DJwcYBEaNCggUybNs18f+XKFZk6dao0a9bMBLSFChWS+fPny6RJk+Tw4cNmJDw6Otqhc1w70jWj7V4+/PBDE9Bv3brVpKgnlc4z1/R1K51upinn+/btI+gGgCQgvdyDJCe9/IMPPjBBt6akUVEVgMVikW/2fyNtlrYxAXdIQIhMqD9B3q3zLgE3kAQZMmQwneH60MBW51xrgKxp5FrAtF27diZ1XEe/NX38nXfekcjISNvrg4KC7vszHnnkEYmJiZFvv/3WYb91NF3/e7aKO18cAJByGOl2U5perg2tvaSml+scME1f+/XXX6Vq1apOOU8A6cfF8IsyZP0QWXd6ndmuk7eOjKwzUnIF53L1qQHpnhYx02D49u3bsn79ejParYG21fHjxx2O145wncfduXPnBN9T53j36NHDVErXUWtNUVc5c+Y0X7XmS9asWRO8R9DRdR0lt84VP3DggJnXrSnmAIDEI+h2Uzr/a9OmTaYKqY5iawG0pKSXjx07VoYOHSpz584173Xu3Dmz3zoqDsCzLD++XIZvGC5XI65KgE+A9KnSR14o/QKVyYFkioiIsLWtml6uc6c1jbxFixZy/fp1OXHihJnapaPgWjRt0aJFDq8fNmyYKYpWrFgxM7dbA+Sff/7ZzOG2V7t2bbNfU9c18O7du7e5H9ACq8OHDzed6//8849JRY/Lz89PevbsadLc9bUawNesWZPUcgBIItLL3ZT2Zvv4+EjZsmVNj7Y23kmh88w0je1///ufKe5ifWi6OQDPcSvqlgxbP0x6r+ptAu7S2UrL/ObzpV2ZdgTcwANYtmyZrW3VQmVbtmyRBQsWSP369eWpp56SPn36mCBXlxPTkW9dMsyeHqfH//jjj+aYhg0bmvng8albt64J3LU42+TJk00w/c0338j+/fvNiLl2tI8aNequ12lBVg3iX3jhBalTp47pdNe55gCApGGk203p2p46Jyy5krquNwD3s+PCDhn450A5dfOUeImXdH6os/So1EP8fPxcfWpAgnL27JHmr44uBaaPe9EpXvqwp6PU9lq1amUeiWnH69WrZ0bSrTSI3rlzp8Mx9nO8E/MzAACJQ9ANAHAQFRslM3bOMI9YS6zkyZBH3qv7nlQNpbYDAABAUhF0p7Tg7CK+ASLREUl/rb5OXw8ALnL8+nEzur3r311mu3nR5jKoxiDJ5J+JzwQAACC9zeles2aNKRiSN29eMzdw8eLFDktX6Dyi8uXLm2U19JgOHTrImTNnHN5Di3zpa+0fusSVy2QpINJjm8jLq5P+0Nfp6wEglWla6YJ/FsizS541AbcG2ePqjZOwR8IIuBEvLcIVt/3VdZyt7ty5I927d5fs2bObucCtW7eW8+fPczXTiU6dOplK5QCAdD7SretRVqxYUbp06XLXfKHw8HDZvn27KRyix2hlzzfeeMMUF9HlK+yNHDlSunXrZtvOlMnFIzIaOBM8A0gnLt2+JMPXD5dVp1aZ7RqhNWRU3VESmiHU1aeGNK5cuXLyxx9/2La1wrWVFgLT4l1a7CskJMQUBdO2ft26/5acAwDAU7g06NblK/QRH22gf//9d4d9upyGrhWplbgLFizoEGSHhnJzCABJtebUGhmybohcvnNZ/Lz95I3Kb8iLZV8Uby8Wt8D9aZAdX/t77do1+fzzz82yk1pVW82cOdOs77xx40az7BQAAJ4iXc3p1kZc09eyZMnisF/Tyd99910TiOuyFtq7bt/bHt/amPqw0vUwAcCThEeFy4dbP5Rv//nWbBfPUlzGPDJGSmUr5epTQzpy8OBBM/0rMDBQatWqJWFhYaYt3rZtm5km1rhxY9uxmnquz+nKGgkF3clpn2NjY1Pot4Gz8VkhrTh79qzkz5/f1acBF9NO47gZ1OLpQbfODdM53m3btpXMmTPb9vfq1UsqV64s2bJlM+tYDhw40PyHNH78+ATfS28KRowYkUpnDgBpy55/98iAPwfIsev/LSnUoWwH6VW5lwT4BLj61JCO6NrSuuxVqVKlTLur7eojjzwiu3fvlnPnzom/v/9dneS5c+c2z6VE+6zv7+3tbWq95MyZ02xrxzzSZs2IyMhIuXjxovnM9LMCXME6BVU7gE6fPs2HgFSTLoJu7S1v06aN+Ud72rRpDs/17dvX9n2FChXMP+SvvPKKabgDAuK/gdTA3P512pNeoEDKFTA7e/OsXIm4kuTXZQ3IKnky5kmx8wAAe9Gx0fLF7i9k2o5pEm2JllzBuWR03dFSMw+pvkg6++lh2v5qEF6oUCH59ttvJSgoKFmXNCntswZvRYoUMQF/3CKrSJuCg4NNtoN+doAraGas1ou6ceNGkl53PpwikGlZ7uDcyXpdak5P9k0vAffx48dlxYoVDqPc8dFGPzo6Wo4dO2Z63+OjwXhCAXlKBNzNFzeXyJjIJL/W38dflrZcSuANIMWdvHFSBv05SHZc3GG2mxZuKkNqDpGQgBCuNlKEjmqXLFlSDh06JI899pgZ2dTq1/aj3Vq9/F43OUltn7WjXYM4bfdjYmIe+HeA8/j4+Jipf2QjwJX+97//mUdSlZ9d3inng5Sxq+N/y5ymZb7pIeDWOWMrV640y47cz44dO0wPaq5cucQVdIQ7OQG30tfp6xntBpBSNEPoh8M/SNimMAmPDpeMfhnNutu6/jY3v0hJN2/elMOHD8uLL74oVapUET8/P1m+fLlZKkwdOHDAFELVud8pSf+O9WfpAwCAtMjX1Q209ohbHT161ATNOj87T548pidKlw1bunSp6cG2zgPT57V3W4uxbNq0SRo0aGDmaOi2FlFr3769ZM2aVTxZ/fr1pVKlSjJx4sRkvX7hwoXy3nvvmc9HOz9KlCghb775prmZApA+XL1zVUZsGCF/nPhvSafKuSrLe4+8J/ky5nP1qcEN9OvXT1q0aGFSyjW9e9iwYWY0U2uv6AokXbt2Nani2mZrllrPnj1NwE3lcgCAp3Fp0K3V4jRgtrLO4+rYsaMMHz5cfvzxR7OtwaM9HfXWoFJT0ObNm2eO1WqnOrdLg277+WBIHr1Jeuedd0y1We3g0I6Pzp07mwyCpk2bclmBNG796fUyeN1guXj7ovh6+0r3St2lc7nO4uPt4+pTg5s4deqUCbAvXbpkCpnVrVvXLAem36sJEyaYzDMd6dY2WtuOqVOnuvq0AQDwrKBbA2dNfUzIvZ5TWrVcG3g46tSpk6xevdo8PvroI1sWQeHChZP02dh74403ZPbs2bJ27VqCbiANuxN9RyZunyhf7/vabBcJKWKWAiubvayrTw1uRju970WXEZsyZYp5AADgySgf6YY00NYUvm7dupmqrvrQ6q8ZM2a85+PVV19NsPND5+XpfLx69eql+u8DIHH2X94vzy993hZwty3dVuY3n0/ADQAA4EJpupAakkfn0mlKuC7NYV8lVufL30vcyvDXrl2TfPnymbRAnaenaYFakRZA2hITGyOz986WyX9NNsuC5QjKISNrj5RH8j/i6lMDAADweATdHqR48eJJOl6L02mgrgXvdKRb58oXLVr0rtRzAK5z5uYZeWftO7L1/Faz3bBAQxlee7hkDfTsYpIAAABpBUG3B9EU8nvRqu/Tp0+3bWsBHGugrsXs9u3bJ2FhYQTdQBqx9MhSGb1xtNyMuinBvsEyoPoAaVm8JUuBAQAApCEE3W5K08t1mTV7SU0vjys2NtakmgNwrWsR10yw/cuxX8x2xZwVJaxumBTIXICPBgAAII0h6HZTWqlc1zA/duyYGeHWJcCSkl6uI9pVq1aVYsWKmUD7559/li+//FKmTZvm1PMGcG+bz26WQWsHyfnw8+Lj5SOvVnxVXir/klkWDAAAAGkPd2luql+/fma987Jly8rt27eTvGTYrVu35PXXXzfrsAYFBZn1ur/66it57rnnnHregNu7elIk/FKSXxYZmFkmHV4oc/bOEYtYpGCmgmYpsPI5yzvlNAEAAJAyCLrdVMmSJWXDhg3Jfv2oUaPMA0AKB9wfVxGJTsY0DS9v+TV/qFh8feV/Jf8nb1V9S4L9gvl4AAAA0jiCbgBILTrCnZyAW+s0WGKlkE9Geafh+1K/ACsIAAAApBcE3Sksa0BW8ffxl8iYyCS/Vl+nrweA+Lz/6PuSlYAbAAAgXSHoTmF5MuaRpS2XypWIK0l+rQbc+noAiP/fiCxcGAAAgHSGoNsJNHAmeAYAAAAAeHMJEsdisXCpwN8CAAAAgCQh6L4PHx8f8zUyMulztOGewsPDzVc/Pz9XnwoAAACANI708vtdIF9fCQ4OlosXL5ogy9ubfgpPznbQgPvChQuSJUsWW4cMAAAAACSEoPs+vLy8JE+ePHL06FE5fvz4/Q6HB9CAOzQ01NWnAQAAACAdIOhOBH9/fylRogQp5jDZDoxwAwAAAEgsgu5E0rTywMDARF9YAIhr87nNUp3LAgAA4FEIugHAycKjwmXslrGyb/c8+ZarDQAA4FEIugHAif6++LcM/HOgnLxxUspypQEAADwOpbgBwAmiYqNk6o6p0vGXjibgzpMhjwytNYxrDQAA4GEY6QaAFHb8+nEzur3r311m+8miT8qgGoMkc/g1Ed8AkeiIpL+pvi44O58VAABAOkPQDQApuJb79we/l3Fbxsnt6NuSyT+TDKk5RJoVafbfAf6ZRXpsEwm/lPQ314A7SwE+KwAAgHTGpenla9askRYtWkjevHnNetiLFy++6wZ26NChZp3soKAgady4sRw8eNDhmMuXL0u7du0kc+bMZv3krl27ys2bN1P5NwHg6S7dviS9VvaSERtGmIC7emh1WfjUwv8LuK00cM5bKekPAm4AAIB0yaVB961bt6RixYoyZcqUeJ8fN26cTJo0SaZPny6bNm2SDBkySNOmTeXOnTu2YzTg3rNnj/z++++ydOlSE8i//PLLqfhbAPB0a06tkVY/tpJVJ1eJn7ef9KvaTz5t8qmEZgh19akBAADAk4PuZs2ayahRo+SZZ5656zkd5Z44caIMHjxYnn76aalQoYLMmTNHzpw5YxsR37dvnyxbtkw+++wzqVGjhtStW1cmT54s8+bNM8cBgDPpiPaojaOk+/LucvnOZSmepbh88+Q30rFcR/H2ok4lPMeYMWNMxlrv3r1t+7SDvHv37pI9e3bJmDGjtG7dWs6fP+/S8wQAwBXS7F3h0aNH5dy5cyal3CokJMQE1xs2bDDb+lVTyqtWrWo7Ro/39vY2I+MJiYiIkOvXrzs8ACAp9vy7R9osaSPzD8w32y+WfVHmNZ8npbKV4kLCo2zZskU++eQT0zlur0+fPrJkyRJZsGCBrF692nSGt2rVymXnCQCAq6TZoFsDbpU7d26H/bptfU6/5sqVy+F5X19fyZYtm+2Y+ISFhZkA3vooUIDiRAASJyY2RmbsnCHtf24vx64fk1zBuWTGYzPk7WpvS4BPAJcRHkVrqOg0r08//VSyZs1q23/t2jX5/PPPZfz48dKwYUOpUqWKzJw5U9avXy8bN2506TkDAJDa0mzQ7UwDBw40NwTWx8mTJ119SgDSgVM3TknnXzvL5L8mS7QlWpoUamKKpdXKW8vVpwa4hKaPP/nkkw5ZaWrbtm0SFRXlsL906dJSsGBBW7ZafMhEAwC4ozS7ZFho6H8FiHT+l1Yvt9LtSpUq2Y65cOGCw+uio6NNRXPr6+MTEBBgHgCQGFpj4sfDP0rY5jC5FXVLMvhlkHdqvCPNizY381gBT6T1U7Zv327Sy+PSbDN/f38zBSyhbLWEMtFGjBjhlPMFAMBV0uxId5EiRUzgvHz5cts+nXutc7Vr1fpvVEm/Xr161fSoW61YsUJiY2PN3G8AeFBX71yVN1e/KYPXDTYBd+VcleX7p76XFsVaEHDDY2mG2BtvvCFff/21BAYGptj7kokGAHBHvq6eC3bo0CGH4mk7duwwc7I1BU2roGp18xIlSpggfMiQIWZN75YtW5rjy5QpI48//rh069bNLCumqWw9evSQ559/3hwHAA9i/en1Jti+ePui+Hr5SveHu0vncp3Fx9uHCwuPpp3dmmlWuXJl276YmBizbOfHH38sv/76q0RGRpqOcfvRbs1WIxMNAOBpXBp0b926VRo0aGDb7tu3r/nasWNHmTVrlrz99ttmLW9dd1sbbl0STJcIs+9V1152DbQbNWpkqpbrkiS6tjcAJNed6DsycftE+Xrf12a7SEgRGfPIGCmbvSwXFRAxbe6uXbscrkXnzp3NvO3+/fubAqV+fn4mW03bZXXgwAE5ceKELVsNAABP4dKgu379+mauZEJ0ruTIkSPNIyE6Kj537lwnnSEAT7P/8n4ZsGaAHL522Gw/X+p56Vu1rwT5Brn61IA0I1OmTPLQQw857MuQIYNZk9u6v2vXrqYzXdvpzJkzS8+ePU3AXbNmTRedNQAArpFmC6kBQGovBTZn7xyZ9NckiY6NlhxBOWRk7ZHySP5H+CCAZJgwYYItA02rkjdt2lSmTp3KtQQAeByCbgAe7+zNszJo7SDZen6ruRYNCzSUYbWHSbbAbB5/bYDEWrVqlcO2TgWbMmWKeQAA4MkIugF41Gj29gvb5WL4RckZnNNUIl92bJmM3jhabkTdMCnkA6oPkGeKP0NlcgAAAKSIRAfdulxXYuncLQBIS/44/oeM2TxGzoeft+0L9AmUOzF3zPcVclaQMXXHSIHMBVx4loDz6AohuhIIAABIo0G3Lvmhhc3uRYui6TG6bAgApKWAu++qvmIRx8KN1oC7aeGmpjq5rzfJP3BfxYoVk0KFCplVQ6yP/Pnzu/q0AABwe4m+w1y5cqVzzwQAnJRSriPccQNue39f+Fu85N6dikB6t2LFCjPvWh/ffPONWUe7aNGi0rBhQ1sQnjt3blefJgAAnht0P/roo849EwBwAp3DbZ9SHp9z4efMcdVCq/EZwG3pMp36UHfu3JH169fbgvDZs2dLVFSUWWd7z549rj5VAADcindyX/jnn39K+/btpXbt2nL69Gmz78svv5S1a9em5PkBwAPZ82/iAggtrgZ4Cq0sriPcgwcPlhEjRkivXr0kY8aMsn//flefGgAAbidZQff3339v1tsMCgqS7du3m/U31bVr1+S9995L6XMEgCS7EXlDxm0ZJxO2TUjU8VrNHHB3mlK+Zs0aE2hrOrnWa3n11VflypUr8vHHH5tiawAAIGUlq2rQqFGjZPr06dKhQweZN2+ebX+dOnXMcwDgKrGWWPnh0A8ycftEuXznstkX4BMgETH/dQ7GpXO5cwfnNsuHAe5MR7Y3bdpkKpjrlLFXXnlF5s6dK3ny5HH1qQEA4NaSFXQfOHBA6tWrd9f+kJAQuXr1akqcFwAk2d8X/5Yxm8bI7ku7zXbhzIXNutu3o2+b6uXKvqCatXha/+r9xcfbhysOt6bTwjTA1uBb53Zr4J09e3ZXnxYAAG4vWenloaGhcujQobv263xurYQKAKlJ52O/s/Ydaf9zexNwZ/DLIP2q9pOFTy2UOvnqSONCjWV8/fGSKziXw+t0hFv36/OAu9NO8RkzZkhwcLCMHTtW8ubNK+XLl5cePXrId999JxcvUtcAAIA0M9LdrVs3eeONN+SLL74w63KfOXNGNmzYIP369ZMhQ4ak/FkCQDyiYqLkq31fyfS/p0t4dLjZ17J4S3mj8huSIyiHw7EaWDco0MBUKdcgXedwa0o5I9zwFBkyZJDHH3/cPNSNGzdMZ7kuCTpu3Dhp166dlChRQnbv/i9TBAAAuDDoHjBggMTGxkqjRo0kPDzcpJoHBASYoLtnz54pdGoAkLA/T/1pCqUdu37MbFfIUcGkkpfPWT7B12iAzbJgwP8F4dmyZTOPrFmziq+vr+zbt4/LAwBAWgi6dXT7nXfekbfeesukmd+8eVPKli1rlhsBAGc6fv24CbbXnFpjtrMHZpc+VfpIi2ItxNsr2asgAm5PO8u3bt1q1uXW0e1169bJrVu3JF++fKaS+ZQpU8xXAACQBoJuK39/fxNsA4Cz3Yq6JTN2zpA5e+dIdGy0+Hr7Svsy7eWVCq9IRn86/ID70eXBNMjWuiwaXE+YMMEUVCtWrBgXDwCAtBZ0a2Oto90JWbFixYOcEwDYWCwWWXpkqVlv++Lt/wo9aXG0/tX6S5GQIlwpIJHef/99036XLFmSawYAQFoPuitVquSwHRUVJTt27DDFVzp27JhS5wbAw+25tEfCNoWZpcBUgUwFTLBdL3+9e3b8AbibrtGtj/vRIqkAAMDFQbempMVn+PDhZn43ADyIS7cvyeS/JsvCgwvNutpBvkHycoWXpUPZDuLv48/FBZJh1qxZUqhQIXn44YdNBgkAAEgHc7rjat++vVSvXl0++OCDlHxbAB4iKjZK5u+fL1N3TJUbUTfMvuZFm0vvyr0ld4bcrj49IF177bXX5JtvvpGjR49K586dTZutlcsBAIBzpWipX12rOzAwMCXfEoCH2HBmgzz747MydstYE3CXyVZG5jSbI2GPhBFwAylAq5OfPXtW3n77bVmyZIkUKFBA2rRpI7/++isj3wAApLWR7latWjlsa5qaNuS6FMmQIUNS6twApHFnb56VKxFXkvy6rAFZJU/GPOb7UzdOyQdbP5DlJ5bbnutVuZc8U/wZs642gJQTEBAgbdu2NY/jx4+blPPXX39doqOjZc+ePSz9CQCAq4PuI0eOSOHChSUkJMRhv7e3t5QqVUpGjhwpTZo0SelzBJBGA+7mi5tLZExkkl+r87IXNF8gPx/9WWbunimRsZHi4+UjbUu3lVcrviohAY7/xgBIedp2a0FC7TiPiYnhEgMAkBaC7hIlSpgR7ZkzZ5rt5557TiZNmiS5cztvrqUG+dobH5f2zGuqnK4xunr1aofnXnnlFZk+fbrTzgmAmBHu5ATcSl/X+dfOcvnOZbNdI08NGVBtgBTPWpxLCzhRRESELFy40FQoX7t2rTRv3lw+/vhjefzxx00QDgAAXBx0x612+ssvv8itW7fEmbZs2eLQA6/Lkj322GPy7LPP2vZ169bNjLJbBQcHO/WcADw4DbjzZcwn/ar2k0YFG7EEGOBk2lk9b948M5e7S5cupqhajhw5uO4AAKTl6uWpseRIzpw5HbbHjBkjxYoVk0cffdQhyA4NDXX6uQBIOW1KtpG3qr0lgb4UXwRSg2aAFSxYUIoWLWoyxOJmiVnpSDgAAHBR0K1zv/QRd19qiYyMlK+++kr69u3r8HO//vprs18D7xYtWphibvca7db0On1YXb9+3ennDsBR65KtCbiBVNShQwcySgAASA/p5Z06dTLVT9WdO3fk1VdflQwZMqRKL/nixYvl6tWr5hysXnjhBSlUqJDkzZtXdu7cKf3795cDBw7c8xzCwsJkxIgRTjlHAADSIq1UnpKmTZtmHseOHTPb5cqVk6FDh0qzZs1s9whvvvmmSWnXju6mTZvK1KlTnVoHBgCAdB90d+zY0WG7ffv2kpo+//xz05hrgG318ssv274vX7685MmTRxo1aiSHDx82aejxGThwoBkttx/p1jluAAAgcfLnz2+mfGmRVe2Unz17tjz99NPy119/mQC8T58+8tNPP8mCBQvMqic9evQwS46uW7eOSwwA8ChJCrqtVctdQSuY//HHH/cdRa9Ro4b5eujQoQSDbh2pt47WAwCApNPpXPZGjx5tRr43btxoAnLtKJ87d640bNjQdg9RpkwZ83zNmjW55AAAj5Fu1gfRxjpXrlzy5JNP3vO4HTt2mK864g0AAJxPVxnRNHJd0aRWrVqybds2iYqKksaNG9uOKV26tCnktmHDBj4SAIBHeaDq5aklNjbWBN2a3u7r+3+nrCnk2ov+xBNPSPbs2c2cbk1nq1evnlSoUMGl5wwAgLvbtWuXCbJ1/nbGjBll0aJFUrZsWdMB7u/vL1myZHE4Xudznzt3LsH3o9ApAMAdpYugW9PKT5w4YdYVtacNuj43ceJE07uu87Jbt24tgwcPdtm5Ap7geuR1mbU7ZYsyAUh/SpUqZQLsa9euyXfffWc6xxNaiiwxKHQKAHBH6SLobtKkSbxrgmuQ/SCNO4CkiYmNkcWHFstH2z+SKxFXuHyAh9PO7+LFi5vvq1SpIlu2bJGPPvpInnvuObPMp644Yj/aff78ebO8Z0IodAoAcEfpIugG4Ho7LuyQsM1hsvfSXrOdL2M+OX3ztKtPC0Aamw6mKeIagPv5+cny5ctNBprS5Tw1a03T0RNCoVMAgDsi6AZwTxfCL8iEbRNk6ZGlZjujX0Z5vdLrUjFnRWn3czuuHuChdFRal/HU4mg3btwwNVZWrVolv/76q1kirGvXrmZ5zmzZsknmzJmlZ8+eJuCmcjkAwNMQdAOIV2RMpHy590v5ZOcncjv6tniJl7Qq0Up6PtxTsgdll7M3z4q/j785Lqn0dVkDsnLlgXTswoUL0qFDBzl79qwJsrWAqQbcjz32mHl+woQJ4u3tbUa6dfS7adOmMnXqVFefNgAAqY6gG8Bd1pxaI2M3j5UTN06YbR3VHlh9oJTLUc52TJ6MeWRpy6XJmtutAbe+HkD6petw30tgYKBMmTLFPAAA8GQE3QBsjl47KuO2jJO1p9ea7RxBOaRvlb7yZNEnxdvL+64rpYEzwTMAAACQMIJuAHIz8qbM2DlDvtz3pUTHRouvt690KNtBXq7wsmTwy8AVAgAAAJKJoBvwYLGWWFlyeIlM3D5R/r39r9lXL389ebva21IocyFXnx4AAACQ7hF0Ax5q97+7JWxTmOz8d6fZ1iBbg20NugEAAACkDIJuwMPoiPak7ZNk0aFFZjvYN1herfiqtC/TXvx8/Fx9egAAAIBbIegGPERUbJTM3TdXpv89XW5G3TT7nir2lPSu3FtyBud09ekBAAAAbomgG/AA60+vlzFbxpjq5Kps9rJmCbBKuSq5+tQAAAAAt0bQDbixkzdOyvtb3peVJ1ea7WyB2czI9tPFn453CTAAAAAAKYugG3BD4VHh8tmuz2T2ntkSGRspvl6+0rZMWzN3O7N/ZlefHgAAAOAxCLoBN2KxWOSXo7/Ih9s+lAvhF8y+WnlqSf/q/aVYlmKuPj0AAADA4xB0A25i36V9MmbzGNl+YbvZzpcxn1kCrEGBBuLl5eXq0wMAAAA8EkE3kM5duXNFJv81Wb775zuxiEWCfIPkpfIvScdyHSXAJ8DVpwcAAAB4NIJuIJ2Kjo2Wbw98Kx/v+FhuRN4w+5oVaSZ9q/SV0Ayhrj49AAAAAATdQPq0+exmCdscJoeuHjLbpbKWkgHVB0jV0KquPjUAAAAAdhjpBtKRMzfPyAdbP5Dfj/9utkMCQqTXw72kdYnW4uPt4+rTAwAAABAHQTeQDtyJviMzd8+Uz3d/LhExEWaN7TYl20iPh3uYwBsAAABA2kTQDaTxJcB0VPvDrR/KmVtnzL5qodWkf7X+UipbKVefHgAAAID7IOgG0qiDVw6aJcA2n9tstrU4Wr+q/aRJoSYsAQYAAACkEwTdQBpzLeKaTN0xVeYfmC8xlhiz7FeXh7pI54c6m+XAAAAAAKQf3pKGDR8+3Izo2T9Kly5te/7OnTvSvXt3yZ49u2TMmFFat24t58+fd+k5A8kVExsjC/5ZIM0XNZe5++eagPuxQo/JDy1/kNcrvU7ADQAAAKRDaX6ku1y5cvLHH3/Ytn19/++U+/TpIz/99JMsWLBAQkJCpEePHtKqVStZt26di84WSJ7t57ebVPJ9l/eZ7eJZikv/6v2lZp6aXFIAAAAgHUvzQbcG2aGhoXftv3btmnz++ecyd+5cadiwodk3c+ZMKVOmjGzcuFFq1iRYQdp3/tZ5Gb9tvPx89Geznck/k3Sv1F3alGojft5+rj49AAAAAO4edB88eFDy5s0rgYGBUqtWLQkLC5OCBQvKtm3bJCoqSho3bmw7VlPP9bkNGzbcM+iOiIgwD6vr1687/fcAHP4GYyJkzp458umuT+V29G3xEi9pXbK19Hy4p2QLzMbFAgAAANxEmg66a9SoIbNmzZJSpUrJ2bNnZcSIEfLII4/I7t275dy5c+Lv7y9ZsmRxeE3u3LnNc/eigbu+F+CKJcBWnVwl47aMk1M3T5l9lXJWkoE1BkrZ7GX5QAAAAAA3k6aD7mbNmtm+r1ChggnCCxUqJN9++60EBSW/ivPAgQOlb9++DiPdBQoUeODzBe7lyLUjMm7zOFl35r+aA7mCcknfqn3liSJPsAQYAAAA4KbSdNAdl45qlyxZUg4dOiSPPfaYREZGytWrVx1Gu7V6eXxzwO0FBASYB5AabkTekOl/T5e5++ZKtCXazNXuWK6jdCvfTYL9gvkQAAAAADeWppcMi+vmzZty+PBhyZMnj1SpUkX8/Pxk+fLltucPHDggJ06cMHO/AVeLtcTKooOLzBJgc/bOMQF3/fz1ZfHTi+WNym8QcANI13SqVrVq1SRTpkySK1cuadmypWmH7bG0JwAAaTzo7tevn6xevVqOHTsm69evl2eeeUZ8fHykbdu2Zomwrl27mjTxlStXmsJqnTt3NgE3lcvhajsv7pR2P7WToeuHyuU7l6Vw5sIyrfE0mdxoshTMXNDVpwcAD0zb5+7du5sVQ37//XdT3LRJkyZy69Yth6U9lyxZYpb21OPPnDljlvYEAMCTpOn08lOnTpkA+9KlS5IzZ06pW7euadz1ezVhwgTx9vaW1q1bm2rkTZs2lalTp7r6tOHB/r39r0zYNkF+PPyj2c7gl0Feq/iavFD6BfHzYQkwAO5j2bJlDtta+FRHvLUTvF69eiztCQBAegi6582bd8/ndRmxKVOmmAfgSlExUfL1vq9l+s7pcivqv1Gep4s9Lb2r9JYcQTn4cAC4vWvXrpmv2bL9t+xhcpb2ZElPAIA7StNBN5Ae/HnqT7ME2LHrx8x2+RzlZUD1AVIhZwVXnxoApIrY2Fjp3bu31KlTRx566CGzLzlLe7KkJwDAHRF0A8l04voJE2yvPrXabGcPzG5Gtp8q9pR4e6XpcgkAkKJ0bvfu3btl7dq1D/Q+LOkJAHBHBN1AAmJiY2T7he1yMfyi5AzOKZVzVRYfbx8JjwqXGTtnmIrkUbFR4uvlK+3KtJNXKr4imfwzcT0BeJQePXrI0qVLZc2aNZI/f37bfl2+M6lLe7KkJwDAHRF0A/H44/gfMmbzGDkfft62L3dwbmlcqLH8fux3uXD7gtlXJ28debv621I0pCjXEYBHsVgs0rNnT1m0aJGsWrVKihQp4vC8/dKeWvBUsbQnAMATEXQD8QTcfVf1FYtYHPZrAK7F0lSBTAXk7Wpvy6P5HxUvLy+uIQCPTCmfO3eu/PDDD2atbus8bV3SMygoyGFpTy2uljlzZhOks7QnAMDTEHQDcVLKdYQ7bsBtL6NfRvm+xfcS5BfEtQPgsaZNm2a+1q9f32H/zJkzpVOnTuZ7lvYEAICgG3Cgc7jtU8rjczPqpuy+tFuqhVbj6gHw6PTy+2FpTwAARCixDNjRomkpeRwAAAAAz0bQDdjRKuUpeRwAAAAAz0bQDdjRZcG0SrmXxF8cTfeHBoea4wAAAADgfgi6ATu6DveA6gPM93EDb+t2/+r9zXEAAAAAcD8E3UAcuhb3+PrjJVdwLof9OgKu+/V5AAAAAEgMlgwD4qGBdYMCDUw1cy2apnO4NaWcEW4AAAAASUHQDSRAA2yWBQMAAADwIEgvBwAAAADASQi6AQAAAABwEoJuAAAAAACchDndAADA7VWtWlXOnTvn6tOAC509e5brD8AlCLoBAIDb04D79OnTrj4NpAGZMmVy9SkA8DAE3QAAwO2FhoYm63WxN2+l+LkgZXhnzJCsgPvdd9/lIwCQqgi6AQCA29u6dWuyXndx8scpfi5IGTl79uBSAkgXKKQGAAAAAICTEHQDAAAAAOCJQXdYWJhUq1bNzL/JlSuXtGzZUg4cOOBwTP369cXLy8vh8eqrr7rsnAEAAAAASBdB9+rVq6V79+6yceNG+f333yUqKkqaNGkit245FjXp1q2bWQbC+hg3bpzLzhkAAAAAgHRRSG3ZsmUO27NmzTIj3tu2bZN69erZ9gcHBye7KikAAAAAAB450h3XtWvXzNds2bI57P/6668lR44c8tBDD8nAgQMlPDz8nu8TEREh169fd3gAAAAAAOBRI932YmNjpXfv3lKnTh0TXFu98MILUqhQIcmbN6/s3LlT+vfvb+Z9L1y48J5zxUeMGJFKZw4AAAAA8FTpJujWud27d++WtWvXOux/+eWXbd+XL19e8uTJI40aNZLDhw9LsWLF4n0vHQ3v27evbVtHugsUKODEswcAAAAAeKJ0EXT36NFDli5dKmvWrJH8+fPf89gaNWqYr4cOHUow6A4ICDAPAAAAAAA8Nui2WCzSs2dPWbRokaxatUqKFCly39fs2LHDfNURbwAAAAAAXMk3raeUz507V3744QezVve5c+fM/pCQEAkKCjIp5Pr8E088IdmzZzdzuvv06WMqm1eoUMHVpw8AAAAA8HBpunr5tGnTTMXy+vXrm5Fr62P+/PnmeX9/f/njjz/M2t2lS5eWN998U1q3bi1Llixx9akDAODWdMpXixYtTCFTLy8vWbx48V3ZakOHDjXttnaUN27cWA4ePOiy8wUAwFXS9Ei3Ntj3osXPVq9enWrnAwAA/nPr1i2pWLGidOnSRVq1anXXZRk3bpxMmjRJZs+ebaaHDRkyRJo2bSp79+6VwMBALiMAwGOk6aAbAACkTc2aNTOPhDrNJ06cKIMHD5ann37a7JszZ47kzp3bjIg///zzqXy2AAC4TppOLwcAAOnP0aNHTR0WTSm30nosusLIhg0bEnxdRESEWcbT/gEAQHpH0A0AAFKUtfCpjmzb023rc/EJCwszwbn1odPIAABI7wi6AQBAmjBw4EBTQNX6OHnypKtPCQCAB0bQDQAAUlRoaKj5ev78eYf9um19Lj4BAQGSOXNmhwcAAOkdQTcAAEhRWq1cg+vly5fb9un87E2bNkmtWrW42gAAj0L1cgAAkGQ3b96UQ4cOORRP27Fjh2TLlk0KFiwovXv3llGjRkmJEiVsS4bpmt4tW7bkagMAPApBNwAASLKtW7dKgwYNbNt9+/Y1Xzt27CizZs2St99+26zl/fLLL8vVq1elbt26smzZMtboBgB4HIJuAACQZPXr1zfrcSfEy8tLRo4caR4AAHgy5nQDAAAAAOAkBN0AAAAAADgJQTcAAAAAAE5C0A0AAAAAgJMQdAMAAAAA4CQE3QAAAAAAOAlBNwAAAAAATkLQDQAAAAAAQTcAAAAAAOkLI90AAAAAADiJr7Pe2N2dvnpbrtyKTPLrsmbwl3xZgpxyTgAAAACAtIWgO5kBd8MPVklEdGySXxvg6y0r+tUn8AYAAAAAD0B6eTLoCHdyAm6lr0vOCDkAAAAAIP0h6AYAAAAAwEncJuieMmWKFC5cWAIDA6VGjRqyefNmV58SAAAAAMDDuUXQPX/+fOnbt68MGzZMtm/fLhUrVpSmTZvKhQsXXH1qAAAAAAAP5hZB9/jx46Vbt27SuXNnKVu2rEyfPl2Cg4Pliy++cPWpAQAAAAA8WLoPuiMjI2Xbtm3SuHFj2z5vb2+zvWHDhnhfExERIdevX3d4AAAAAACQ0tJ90P3vv/9KTEyM5M6d22G/bp87dy7e14SFhUlISIjtUaBAgVQ6WwAAAACAJ0n3QXdyDBw4UK5du2Z7nDx50tWnBAAAAABwQ76SzuXIkUN8fHzk/PnzDvt1OzQ0NN7XBAQEmAcAAAAAAM6U7ke6/f39pUqVKrJ8+XLbvtjYWLNdq1Ytl54bAAAAAMCzpfuRbqXLhXXs2FGqVq0q1atXl4kTJ8qtW7dMNXMAAAAAAFzFLYLu5557Ti5evChDhw41xdMqVaoky5Ytu6u4GgAAAAAAqcktgm7Vo0cP8wAAAAAAIK1I93O6XSFrBn8J8E3epdPX6esBAPAEU6ZMkcKFC0tgYKDUqFFDNm/e7OpTAgAgVbnNSHdqypclSFb0qy9XbkUm+bUacOvrAQBwd/Pnzzd1V6ZPn24Cbq250rRpUzlw4IDkypXL1acHAECqIOhOJg2cCZ4BAEjY+PHjpVu3brbCphp8//TTT/LFF1/IgAEDuHQAAI9AejkAAEhxkZGRsm3bNmncuPH/3XR4e5vtDRs2cMUBAB6DkW4RsVgs5mJcv37d1Z8HAAB3sbZP1vYqPfj3338lJibmrpVEdHv//v3xviYiIsI8rK5du+by9vnG7dsu+9m4t4BU+ruIuR3DR5GGpca/D/wNpG3XXdhGJLZ9JujWBvXGDXMxChQokBqfDQAAyW6vQkJC3PbqhYWFyYgRI+7aT/uMePV/mwsDCXnNff9NRPr5G7hf+0zQLSJ58+aVkydPSqZMmcTLy+uBezv05kDfL3PmzA/0Xp6E68Y1428t7eK/T9dfN+1B1wZd26v0IkeOHOLj4yPnz5932K/boaGh8b5m4MCBpvCaVWxsrFy+fFmyZ8/+wO0z+G8Z/A2Av4GUltj2maD7/88xy58/f4p+AHqDRdDNdUsN/K1x3VILf2uuvW7pbYTb399fqlSpIsuXL5eWLVvagmjd7tGjR7yvCQgIMA97WbJkSZXz9ST8twz+BsDfQMpJTPtM0A0AAJxCR607duwoVatWlerVq5slw27dumWrZg4AgCcg6AYAAE7x3HPPycWLF2Xo0KFy7tw5qVSpkixbtuyu4moAALgzgu4Upmlxw4YNuys9Dlw3/tbSBv4b5Zrxt5a6NJU8oXRypC7+/QN/A+BvwDW8LOlp/REAAAAAANIRb1efAAAAAAAA7oqgGwAAAAAAJyHoBgAAAADASQi6U9iUKVOkcOHCEhgYKDVq1JDNmzen9I9It8LCwqRatWqSKVMmyZUrl1m39cCBAw7H3LlzR7p37y7Zs2eXjBkzSuvWreX8+fMuO+e0ZsyYMeLl5SW9e/e27eOaxe/06dPSvn1787cUFBQk5cuXl61bt9qe13IWWlE5T5485vnGjRvLwYMHxVPFxMTIkCFDpEiRIuZ6FCtWTN59911znay4ZiJr1qyRFi1aSN68ec1/i4sXL3a4jom5RpcvX5Z27dqZNVJ1DequXbvKzZs3U+2zhue5398t3F9i7sHg3qZNmyYVKlSwrc9dq1Yt+eWXX1x9Wh6DoDsFzZ8/36xJqtXLt2/fLhUrVpSmTZvKhQsXUvLHpFurV682AfXGjRvl999/l6ioKGnSpIlZs9WqT58+smTJElmwYIE5/syZM9KqVSuXnndasWXLFvnkk0/MP5j2uGZ3u3LlitSpU0f8/PxMg7J371758MMPJWvWrLZjxo0bJ5MmTZLp06fLpk2bJEOGDOa/V+3E8ERjx441DfLHH38s+/btM9t6jSZPnmw7hmsm5t8r/bddO1jjk5hrpAH3nj17zL+DS5cuNQHRyy+/nCqfMzzT/f5u4f4Scw8G95Y/f34zeLNt2zYzCNGwYUN5+umnTXuEVKDVy5EyqlevbunevbttOyYmxpI3b15LWFgYlzgeFy5c0CE0y+rVq8321atXLX5+fpYFCxbYjtm3b585ZsOGDR59DW/cuGEpUaKE5ffff7c8+uijljfeeMPs55rFr3///pa6desmeD1jY2MtoaGhlvfff9+2T69lQECA5ZtvvrF4oieffNLSpUsXh32tWrWytGvXznzPNbub/tu0aNEi23ZirtHevXvN67Zs2WI75pdffrF4eXlZTp8+7YRPFrj33y08U9x7MHimrFmzWj777DNXn4ZHYKQ7hURGRpqeI00ltPL29jbbGzZsSKkf41auXbtmvmbLls181eunPa/217B06dJSsGBBj7+G2jv95JNPOlwbrlnCfvzxR6latao8++yzJo3u4Ycflk8//dT2/NGjR+XcuXMO1zMkJMRMCfHU/15r164ty5cvl3/++cds//3337J27Vpp1qyZ2eaa3V9irpF+1ZRy/fu00uO1vdCRcQBwxT0YPG9K2bx580ymg6aZw/l8U+FneIR///3X/AHnzp3bYb9u79+/32XnlVbFxsaaecmaAvzQQw+ZfXqz6u/vb25I415Dfc5T6T+KOl1B08vj4prF78iRIyZVWqd7DBo0yFy7Xr16mb+vjh072v6e4vvv1VP/1gYMGCDXr183HV0+Pj7m37PRo0ebVGjFNbu/xFwj/aodQfZ8fX3Nja+n/u0BcP09GDzDrl27TJCtU560dtKiRYukbNmyrj4tj0DQDZeN3O7evduMpCFhJ0+elDfeeMPMv9LifEj8DYWOJL733ntmW0e69e9N59lq0I27ffvtt/L111/L3LlzpVy5crJjxw5zU6aFl7hmAOA+uAfzXKVKlTLtu2Y6fPfdd6Z91/n+BN7OR3p5CsmRI4cZHYpbaVu3Q0NDU+rHuIUePXqY4kErV640RR2s9Dppmv7Vq1cdjvfka6gp91qIr3LlymY0TB/6j6MWatLvdQSNa3Y3rRwdtwEpU6aMnDhxwnxv/Xviv9f/89Zbb5nR7ueff95Uen/xxRdNkT6teMs1S5zE/F3p17jFNaOjo01Fc0/9dw6A6+/B4Bk046948eJSpUoV075rgcWPPvrI1aflEQi6U/CPWP+AdU6k/WibbjNX4j9av0X/sddUlhUrVpiliezp9dNq0/bXUJez0EDJU69ho0aNTCqQ9kpaHzqCqym/1u+5ZnfTlLm4S6HoXOVChQqZ7/VvTwMc+781Ta3WObWe+rcWHh5u5hXb045E/XdMcc3uLzHXSL9qx6J2qFnpv4d6nXXuNwC44h4MnknbnoiICFefhkcgvTwF6fxRTdPQQKh69eoyceJEU6Cgc+fOKflj0nU6k6au/vDDD2adSOv8RS00pOvZ6lddr1avo85v1DUEe/bsaW5Sa9asKZ5Ir1Pc+Va6BJGuPW3dzzW7m47QamEwTS9v06aNbN68WWbMmGEeyrrW+ahRo6REiRLm5kPXqNZUal271BPpGr46h1sLF2p6+V9//SXjx4+XLl26mOe5Zv/R9bQPHTrkUDxNO8D03yy9dvf7u9KMi8cff1y6detmpjto8Ui9EdYMAz0OcMXfLdzf/e7B4P4GDhxoiqPqf/M3btwwfw+rVq2SX3/91dWn5hlcXT7d3UyePNlSsGBBi7+/v1lCbOPGja4+pTRD/9zie8ycOdN2zO3bty2vv/66WcIgODjY8swzz1jOnj3r0vNOa+yXDFNcs/gtWbLE8tBDD5nlmkqXLm2ZMWOGw/O6vNOQIUMsuXPnNsc0atTIcuDAASd/emnX9evXzd+V/vsVGBhoKVq0qOWdd96xRERE2I7hmlksK1eujPffsY4dOyb6Gl26dMnStm1bS8aMGS2ZM2e2dO7c2SwLCLjq7xbuLzH3YHBvuixooUKFTIySM2dO0z799ttvrj4tj+Gl/+fqwB8AAAAAAHfEnG4AAAAAAJyEoBsAAAAAACch6AYAAAAAwEkIugEAAAAAcBKCbgAAAAAAnISgGwAAAAAAJyHoBgAAAADASQi6AQAAAABwEoJuAAAAwE106tRJWrZs6erTAGCHoBuArZH28vIyD39/fylevLiMHDlSoqOjuUIAAKQB1nY6ocfw4cPlo48+klmzZrn6VAHY8bXfAODZHn/8cZk5c6ZERETIzz//LN27dxc/Pz8ZOHCgS88rMjLSdAQAAODJzp49a/t+/vz5MnToUDlw4IBtX8aMGc0DQNrCSDcAm4CAAAkNDZVChQrJa6+9Jo0bN5Yff/xRrly5Ih06dJCsWbNKcHCwNGvWTA4ePGheY7FYJGfOnPLdd9/Z3qdSpUqSJ08e2/batWvNe4eHh5vtq1evyksvvWRelzlzZmnYsKH8/ffftuO1p17f47PPPpMiRYpIYGAgnxIAwONpG219hISEmNFt+30acMdNL69fv7707NlTevfubdrx3Llzy6effiq3bt2Szp07S6ZMmUx22y+//OJwfXfv3m3ae31Pfc2LL74o//77r8d/BkByEHQDSFBQUJAZZdYGfOvWrSYA37Bhgwm0n3jiCYmKijINfr169WTVqlXmNRqg79u3T27fvi379+83+1avXi3VqlUzAbt69tln5cKFC6aB37Ztm1SuXFkaNWokly9ftv3sQ4cOyffffy8LFy6UHTt28CkBAJBMs2fPlhw5csjmzZtNAK4d69oW165dW7Zv3y5NmjQxQbV957h2iD/88MOm/V+2bJmcP39e2rRpw2cAJANBN4C7aFD9xx9/yK+//ioFCxY0wbaOOj/yyCNSsWJF+frrr+X06dOyePFiWy+6Nehes2aNaaTt9+nXRx991DbqrY3+ggULpGrVqlKiRAn54IMPJEuWLA6j5Rrsz5kzx7xXhQoV+JQAAEgmbbsHDx5s2lydMqYZZBqEd+vWzezTNPVLly7Jzp07zfEff/yxaX/fe+89KV26tPn+iy++kJUrV8o///zD5wAkEUE3AJulS5eaNDJtjDWl7LnnnjOj3L6+vlKjRg3bcdmzZ5dSpUqZEW2lAfXevXvl4sWLZlRbA25r0K2j4evXrzfbStPIb968ad7DOvdMH0ePHpXDhw/bfoamuGv6OQAAeDD2ndc+Pj6mDS5fvrxtn6aPK81Cs7bVGmDbt9MafCv7thpA4lBIDYBNgwYNZNq0aaZoWd68eU2wraPc96MNd7Zs2UzArY/Ro0ebuWVjx46VLVu2mMBbU9iUBtw639s6Cm5PR7utMmTIwCcDAEAK0KKo9nRqmP0+3VaxsbG2trpFixamHY/LvmYLgMQh6AbgEOhqMRV7ZcqUMcuGbdq0yRY4awqaVkstW7asrbHW1PMffvhB9uzZI3Xr1jXzt7UK+ieffGLSyK1BtM7fPnfunAnoCxcuzNUHACCN0bZa66poO63tNYAHQ3o5gHvSuV5PP/20mfel87E15ax9+/aSL18+s99K08e/+eYbU3Vc09C8vb1NgTWd/22dz620InqtWrVMZdXffvtNjh07ZtLP33nnHVOsBQAAuJYuGarFTdu2bWsy1jSlXOu8aLXzmJgYPh4giQi6AdyXrt1dpUoVad68uQmYtdCaruNtn5qmgbU2xNa520q/j7tPR8X1tRqQa+NdsmRJef755+X48eO2OWUAAMB1dIrZunXrTBuulc11GpkuOabTwLRTHUDSeFn07hkAAAAAAKQ4uqoAAAAAAHASgm4AAAAAAJyEoBsAAAAAACch6AYAAAAAwEkIugEAAAAAcBKCbgAAAAAAnISgGwAAAAAAJyHoBgAAAADASQi6AQAAAABwEoJuAAAAAACchKAbAAAAAAAnIegGAAAAAECc4/8BG6hf5E6PdMwAAAAASUVORK5CYII="
+ },
+ "metadata": {},
+ "output_type": "display_data",
+ "jetTransient": {
+ "display_id": null
+ }
+ }
+ ],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "source": "At **t=1**, demand (15 MW) is below the minimum load (30 MW). The solver\nkeeps the unit off (`commit=0`), so `power=0` and `fuel=0` — the `active`\nparameter enforces this. Demand is met by the backup source.\n\nAt **t=2** and **t=3**, the unit commits and operates on the PWL curve.",
+ "metadata": {}
}
],
"metadata": {
@@ -533,9 +875,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.11"
+ "version": "3.12.3"
}
},
"nbformat": 4,
- "nbformat_minor": 5
+ "nbformat_minor": 4
}
diff --git a/linopy/__init__.py b/linopy/__init__.py
index 415950eb..b1dc33b9 100644
--- a/linopy/__init__.py
+++ b/linopy/__init__.py
@@ -20,7 +20,7 @@
from linopy.io import read_netcdf
from linopy.model import Model, Variable, Variables, available_solvers
from linopy.objective import Objective
-from linopy.piecewise import breakpoints
+from linopy.piecewise import breakpoints, piecewise, segments, slopes_to_points
from linopy.remote import RemoteHandler
try:
@@ -44,6 +44,9 @@
"Variables",
"available_solvers",
"breakpoints",
+ "piecewise",
+ "segments",
+ "slopes_to_points",
"align",
"merge",
"options",
diff --git a/linopy/common.py b/linopy/common.py
index 0823deac..09f67355 100644
--- a/linopy/common.py
+++ b/linopy/common.py
@@ -161,26 +161,6 @@ def pandas_to_dataarray(
axis.name or get_from_iterable(dims, i) or f"dim_{i}"
for i, axis in enumerate(arr.axes)
]
- if coords is not None:
- pandas_coords = dict(zip(dims, arr.axes))
- if isinstance(coords, Sequence):
- coords = dict(zip(dims, coords))
- shared_dims = set(pandas_coords.keys()) & set(coords.keys())
- non_aligned = []
- for dim in shared_dims:
- coord = coords[dim]
- if not isinstance(coord, pd.Index):
- coord = pd.Index(coord)
- if not pandas_coords[dim].equals(coord):
- non_aligned.append(dim)
- if any(non_aligned):
- warn(
- f"coords for dimension(s) {non_aligned} is not aligned with the pandas object. "
- "Previously, the indexes of the pandas were ignored and overwritten in "
- "these cases. Now, the pandas object's coordinates are taken considered"
- " for alignment."
- )
-
return DataArray(arr, coords=None, dims=dims, **kwargs)
@@ -213,18 +193,19 @@ def numpy_to_dataarray(
if arr.ndim == 0:
return DataArray(arr.item(), coords=coords, dims=dims, **kwargs)
- ndim = max(arr.ndim, 0 if coords is None else len(coords))
if isinstance(dims, Iterable | Sequence):
dims = list(dims)
elif dims is not None:
dims = [dims]
if dims is not None and len(dims):
- # fill up dims with default names to match the number of dimensions
- dims = [get_from_iterable(dims, i) or f"dim_{i}" for i in range(ndim)]
+ dims = [get_from_iterable(dims, i) or f"dim_{i}" for i in range(arr.ndim)]
- if isinstance(coords, list) and dims is not None and len(dims):
- coords = dict(zip(dims, coords))
+ if dims is not None and len(dims) and coords is not None:
+ if isinstance(coords, list):
+ coords = dict(zip(dims, coords[: arr.ndim]))
+ elif is_dict_like(coords):
+ coords = {k: v for k, v in coords.items() if k in dims}
return DataArray(arr, coords=coords, dims=dims, **kwargs)
diff --git a/linopy/constants.py b/linopy/constants.py
index c2467b83..00bbd705 100644
--- a/linopy/constants.py
+++ b/linopy/constants.py
@@ -38,14 +38,22 @@
PWL_LAMBDA_SUFFIX = "_lambda"
PWL_CONVEX_SUFFIX = "_convex"
-PWL_LINK_SUFFIX = "_link"
+PWL_X_LINK_SUFFIX = "_x_link"
+PWL_Y_LINK_SUFFIX = "_y_link"
PWL_DELTA_SUFFIX = "_delta"
PWL_FILL_SUFFIX = "_fill"
PWL_BINARY_SUFFIX = "_binary"
PWL_SELECT_SUFFIX = "_select"
-DEFAULT_BREAKPOINT_DIM = "breakpoint"
-DEFAULT_SEGMENT_DIM = "segment"
-DEFAULT_LINK_DIM = "var"
+PWL_AUX_SUFFIX = "_aux"
+PWL_LP_SUFFIX = "_lp"
+PWL_LP_DOMAIN_SUFFIX = "_lp_domain"
+PWL_INC_BINARY_SUFFIX = "_inc_binary"
+PWL_INC_LINK_SUFFIX = "_inc_link"
+PWL_INC_ORDER_SUFFIX = "_inc_order"
+PWL_ACTIVE_BOUND_SUFFIX = "_active_bound"
+BREAKPOINT_DIM = "_breakpoint"
+SEGMENT_DIM = "_segment"
+LP_SEG_DIM = f"{BREAKPOINT_DIM}_seg"
GROUPED_TERM_DIM = "_grouped_term"
GROUP_DIM = "_group"
FACTOR_DIM = "_factor"
diff --git a/linopy/constraints.py b/linopy/constraints.py
index d3ebef19..3190d19b 100644
--- a/linopy/constraints.py
+++ b/linopy/constraints.py
@@ -1091,6 +1091,34 @@ def flat(self) -> pd.DataFrame:
df["key"] = df.labels.map(map_labels)
return df
+ def to_polars(self) -> pl.DataFrame:
+ """
+ Convert all constraints to a single polars DataFrame.
+
+ The resulting dataframe is a long format with columns
+ `labels`, `coeffs`, `vars`, `rhs`, `sign`, `key`.
+ """
+ dfs = [self[k].to_polars() for k in self]
+ if not dfs:
+ return pl.DataFrame(
+ {
+ "labels": pl.Series([], dtype=pl.Int64),
+ "coeffs": pl.Series([], dtype=pl.Float64),
+ "vars": pl.Series([], dtype=pl.Int64),
+ "sign": pl.Series([], dtype=pl.String),
+ "rhs": pl.Series([], dtype=pl.Float64),
+ "key": pl.Series([], dtype=pl.Int64),
+ }
+ )
+
+ df = pl.concat(dfs, how="vertical_relaxed")
+ labels = (
+ df.select("labels")
+ .unique(maintain_order=True)
+ .with_row_index(name="key", offset=0)
+ )
+ return df.join(labels, on="labels", how="left")
+
def to_matrix(self, filter_missings: bool = True) -> scipy.sparse.csc_matrix:
"""
Construct a constraint matrix in sparse format.
@@ -1098,25 +1126,88 @@ def to_matrix(self, filter_missings: bool = True) -> scipy.sparse.csc_matrix:
Missing values, i.e. -1 in labels and vars, are ignored filtered
out.
"""
- # TODO: rename "filter_missings" to "~labels_as_coordinates"
- cons = self.flat
-
if not len(self):
raise ValueError("No constraints available to convert to matrix.")
- if filter_missings:
- vars = self.model.variables.flat
- shape = (cons.key.max() + 1, vars.key.max() + 1)
- cons["vars"] = cons.vars.map(vars.set_index("labels").key)
- return scipy.sparse.csc_matrix(
- (cons.coeffs, (cons.key, cons.vars)), shape=shape
- )
+ def _build_dense_key_map(
+ arrays: list[np.ndarray], total_size: int
+ ) -> tuple[np.ndarray, int]:
+ mapping = np.full(total_size, -1, dtype=np.int64)
+ next_key = 0
+ for labels in arrays:
+ labels = labels[labels != -1]
+ if labels.size:
+ # Keep first-seen order while de-duplicating labels.
+ first_idx = np.unique(labels, return_index=True)[1]
+ labels = labels[np.sort(first_idx)]
+ n = labels.size
+ if n:
+ mapping[labels] = np.arange(next_key, next_key + n)
+ next_key += n
+ return mapping, next_key
+
+ # Build sparse triplets directly from NumPy arrays to avoid dataframe overhead.
+ row_parts: list[np.ndarray] = []
+ col_parts: list[np.ndarray] = []
+ data_parts: list[np.ndarray] = []
+
+ for _, constraint in self.items():
+ labels = constraint.labels.values.reshape(-1)
+ vars_arr = constraint.vars.values
+ coeffs_arr = constraint.coeffs.values
+
+ term_axis = constraint.vars.get_axis_num(constraint.term_dim)
+ if term_axis != vars_arr.ndim - 1:
+ vars_arr = np.moveaxis(vars_arr, term_axis, -1)
+ coeffs_arr = np.moveaxis(coeffs_arr, term_axis, -1)
+
+ nterm = vars_arr.shape[-1]
+ row = np.repeat(labels, nterm)
+ col = vars_arr.reshape(-1)
+ data = coeffs_arr.reshape(-1)
+
+ mask = (row != -1) & (col != -1) & (data != 0)
+ if mask.any():
+ row_parts.append(row[mask])
+ col_parts.append(col[mask])
+ data_parts.append(data[mask])
+
+ if row_parts:
+ row = np.concatenate(row_parts)
+ col = np.concatenate(col_parts)
+ data = np.concatenate(data_parts)
else:
- shape = self.model.shape
- return scipy.sparse.csc_matrix(
- (cons.coeffs, (cons.labels, cons.vars)), shape=shape
+ row = np.array([], dtype=np.int64)
+ col = np.array([], dtype=np.int64)
+ data = np.array([], dtype=float)
+
+ if filter_missings:
+ # Keep the filtered matrix row space aligned with ``self.flat``:
+ # only constraints that still have at least one active coefficient
+ # get a key when ``filter_missings=True``.
+ cons_map, next_con_key = _build_dense_key_map([row], self.model._cCounter)
+ vars_map, next_var_key = _build_dense_key_map(
+ [
+ var.labels.values.reshape(-1)
+ for _, var in self.model.variables.items()
+ ],
+ self.model._xCounter,
)
+ shape = (next_con_key, next_var_key)
+
+ row = cons_map[row]
+ col = vars_map[col]
+ keep = (row != -1) & (col != -1)
+ if not keep.all():
+ row = row[keep]
+ col = col[keep]
+ data = data[keep]
+ return scipy.sparse.csc_matrix((data, (row, col)), shape=shape)
+
+ shape = self.model.shape
+ return scipy.sparse.csc_matrix((data, (row, col)), shape=shape)
+
def reset_dual(self) -> None:
"""
Reset the stored solution of variables.
diff --git a/linopy/expressions.py b/linopy/expressions.py
index 649989f7..d2ae9022 100644
--- a/linopy/expressions.py
+++ b/linopy/expressions.py
@@ -9,6 +9,7 @@
import functools
import logging
+import operator
from abc import ABC, abstractmethod
from collections.abc import Callable, Hashable, Iterator, Mapping, Sequence
from dataclasses import dataclass, field
@@ -91,23 +92,33 @@
if TYPE_CHECKING:
from linopy.constraints import AnonymousScalarConstraint, Constraint
from linopy.model import Model
+ from linopy.piecewise import PiecewiseConstraintDescriptor, PiecewiseExpression
from linopy.variables import ScalarVariable, Variable
-SUPPORTED_CONSTANT_TYPES = (
- np.number,
- int,
- float,
- DataArray,
- pd.Series,
- pd.DataFrame,
- np.ndarray,
- pl.Series,
-)
-
FILL_VALUE = {"vars": -1, "coeffs": np.nan, "const": np.nan}
+def _to_piecewise_constraint_descriptor(
+ lhs: Any, rhs: Any, operator: str
+) -> PiecewiseConstraintDescriptor | None:
+ """Build a piecewise descriptor for reversed RHS syntax if applicable."""
+ from linopy.piecewise import PiecewiseExpression
+
+ if not isinstance(rhs, PiecewiseExpression):
+ return None
+
+ if operator == "<=":
+ return rhs.__ge__(lhs)
+ if operator == ">=":
+ return rhs.__le__(lhs)
+ if operator == "==":
+ return rhs.__eq__(lhs)
+
+ msg = f"Unsupported operator '{operator}' for piecewise dispatch."
+ raise ValueError(msg)
+
+
def exprwrap(
method: Callable, *default_args: Any, **new_default_kwargs: Any
) -> Callable:
@@ -533,44 +544,165 @@ def _multiply_by_linear_expression(
res = res + self.reset_const() * other.const
return res
+ def _align_constant(
+ self: GenericExpression,
+ other: DataArray,
+ fill_value: float = 0,
+ join: str | None = None,
+ ) -> tuple[DataArray, DataArray, bool]:
+ """
+ Align a constant DataArray with self.const.
+
+ Parameters
+ ----------
+ other : DataArray
+ The constant to align.
+ fill_value : float, default: 0
+ Fill value for missing coordinates.
+ join : str, optional
+ Alignment method. If None, uses size-aware default behavior.
+
+ Returns
+ -------
+ self_const : DataArray
+ The expression's const, potentially reindexed.
+ aligned : DataArray
+ The aligned constant.
+ needs_data_reindex : bool
+ Whether the expression's data needs reindexing.
+ """
+ if join is None:
+ if other.sizes == self.const.sizes:
+ return self.const, other.assign_coords(coords=self.coords), False
+ return (
+ self.const,
+ other.reindex_like(self.const, fill_value=fill_value),
+ False,
+ )
+ elif join == "override":
+ return self.const, other.assign_coords(coords=self.coords), False
+ else:
+ self_const, aligned = xr.align(
+ self.const,
+ other,
+ join=join,
+ fill_value=fill_value, # type: ignore[call-overload]
+ )
+ return self_const, aligned, True
+
+ def _add_constant(
+ self: GenericExpression, other: ConstantLike, join: str | None = None
+ ) -> GenericExpression:
+ # NaN values in self.const or other are filled with 0 (additive identity)
+ # so that missing data does not silently propagate through arithmetic.
+ if np.isscalar(other) and join is None:
+ return self.assign(const=self.const.fillna(0) + other)
+ da = as_dataarray(other, coords=self.coords, dims=self.coord_dims)
+ self_const, da, needs_data_reindex = self._align_constant(
+ da, fill_value=0, join=join
+ )
+ da = da.fillna(0)
+ self_const = self_const.fillna(0)
+ if needs_data_reindex:
+ return self.__class__(
+ self.data.reindex_like(self_const, fill_value=self._fill_value).assign(
+ const=self_const + da
+ ),
+ self.model,
+ )
+ return self.assign(const=self_const + da)
+
+ def _apply_constant_op(
+ self: GenericExpression,
+ other: ConstantLike,
+ op: Callable[[DataArray, DataArray], DataArray],
+ fill_value: float,
+ join: str | None = None,
+ ) -> GenericExpression:
+ """
+ Apply a constant operation (mul, div, etc.) to this expression with a scalar or array.
+
+ NaN values are filled with neutral elements before the operation:
+ - factor (other) is filled with fill_value (0 for mul, 1 for div)
+ - coeffs and const are filled with 0 (additive identity)
+ """
+ factor = as_dataarray(other, coords=self.coords, dims=self.coord_dims)
+ self_const, factor, needs_data_reindex = self._align_constant(
+ factor, fill_value=fill_value, join=join
+ )
+ factor = factor.fillna(fill_value)
+ self_const = self_const.fillna(0)
+ if needs_data_reindex:
+ data = self.data.reindex_like(self_const, fill_value=self._fill_value)
+ coeffs = data.coeffs.fillna(0)
+ return self.__class__(
+ assign_multiindex_safe(
+ data, coeffs=op(coeffs, factor), const=op(self_const, factor)
+ ),
+ self.model,
+ )
+ coeffs = self.coeffs.fillna(0)
+ return self.assign(coeffs=op(coeffs, factor), const=op(self_const, factor))
+
def _multiply_by_constant(
- self: GenericExpression, other: ConstantLike
+ self: GenericExpression, other: ConstantLike, join: str | None = None
) -> GenericExpression:
- multiplier = as_dataarray(other, coords=self.coords, dims=self.coord_dims)
- coeffs = self.coeffs * multiplier
- assert all(coeffs.sizes[d] == s for d, s in self.coeffs.sizes.items())
- const = self.const * multiplier
- return self.assign(coeffs=coeffs, const=const)
+ return self._apply_constant_op(other, operator.mul, fill_value=0, join=join)
+
+ def _divide_by_constant(
+ self: GenericExpression, other: ConstantLike, join: str | None = None
+ ) -> GenericExpression:
+ return self._apply_constant_op(other, operator.truediv, fill_value=1, join=join)
def __div__(self: GenericExpression, other: SideLike) -> GenericExpression:
try:
- if isinstance(
- other,
- variables.Variable
- | variables.ScalarVariable
- | LinearExpression
- | ScalarLinearExpression
- | QuadraticExpression,
- ):
+ if isinstance(other, SUPPORTED_EXPRESSION_TYPES):
raise TypeError(
"unsupported operand type(s) for /: "
f"{type(self)} and {type(other)}"
"Non-linear expressions are not yet supported."
)
- return self._multiply_by_constant(other=1 / other)
+ return self._divide_by_constant(other)
except TypeError:
return NotImplemented
def __truediv__(self: GenericExpression, other: SideLike) -> GenericExpression:
return self.__div__(other)
- def __le__(self, rhs: SideLike) -> Constraint:
+ @overload
+ def __le__(self, rhs: PiecewiseExpression) -> PiecewiseConstraintDescriptor: ...
+
+ @overload
+ def __le__(self, rhs: SideLike) -> Constraint: ...
+
+ def __le__(self, rhs: SideLike) -> Constraint | PiecewiseConstraintDescriptor:
+ descriptor = _to_piecewise_constraint_descriptor(self, rhs, "<=")
+ if descriptor is not None:
+ return descriptor
return self.to_constraint(LESS_EQUAL, rhs)
- def __ge__(self, rhs: SideLike) -> Constraint:
+ @overload
+ def __ge__(self, rhs: PiecewiseExpression) -> PiecewiseConstraintDescriptor: ...
+
+ @overload
+ def __ge__(self, rhs: SideLike) -> Constraint: ...
+
+ def __ge__(self, rhs: SideLike) -> Constraint | PiecewiseConstraintDescriptor:
+ descriptor = _to_piecewise_constraint_descriptor(self, rhs, ">=")
+ if descriptor is not None:
+ return descriptor
return self.to_constraint(GREATER_EQUAL, rhs)
- def __eq__(self, rhs: SideLike) -> Constraint: # type: ignore
+ @overload # type: ignore[override]
+ def __eq__(self, rhs: PiecewiseExpression) -> PiecewiseConstraintDescriptor: ...
+
+ @overload
+ def __eq__(self, rhs: SideLike) -> Constraint: ...
+
+ def __eq__(self, rhs: SideLike) -> Constraint | PiecewiseConstraintDescriptor:
+ descriptor = _to_piecewise_constraint_descriptor(self, rhs, "==")
+ if descriptor is not None:
+ return descriptor
return self.to_constraint(EQUAL, rhs)
def __gt__(self, other: Any) -> NotImplementedType:
@@ -584,36 +716,160 @@ def __lt__(self, other: Any) -> NotImplementedType:
)
def add(
- self: GenericExpression, other: SideLike
+ self: GenericExpression,
+ other: SideLike,
+ join: str | None = None,
) -> GenericExpression | QuadraticExpression:
"""
Add an expression to others.
- """
- return self.__add__(other)
+
+ Parameters
+ ----------
+ other : expression-like
+ The expression to add.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ if join is None:
+ return self.__add__(other)
+ if isinstance(other, SUPPORTED_CONSTANT_TYPES):
+ return self._add_constant(other, join=join)
+ other = as_expression(other, model=self.model, dims=self.coord_dims)
+ if isinstance(other, LinearExpression) and isinstance(
+ self, QuadraticExpression
+ ):
+ other = other.to_quadexpr()
+ return merge([self, other], cls=self.__class__, join=join) # type: ignore[list-item]
def sub(
- self: GenericExpression, other: SideLike
+ self: GenericExpression,
+ other: SideLike,
+ join: str | None = None,
) -> GenericExpression | QuadraticExpression:
"""
Subtract others from expression.
+
+ Parameters
+ ----------
+ other : expression-like
+ The expression to subtract.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
"""
- return self.__sub__(other)
+ return self.add(-other, join=join)
def mul(
- self: GenericExpression, other: SideLike
+ self: GenericExpression,
+ other: SideLike,
+ join: str | None = None,
) -> GenericExpression | QuadraticExpression:
"""
Multiply the expr by a factor.
- """
- return self.__mul__(other)
+
+ Parameters
+ ----------
+ other : expression-like
+ The factor to multiply by.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ if join is None:
+ return self.__mul__(other)
+ if isinstance(other, SUPPORTED_EXPRESSION_TYPES):
+ raise TypeError(
+ "join parameter is not supported for expression-expression multiplication"
+ )
+ return self._multiply_by_constant(other, join=join)
def div(
- self: GenericExpression, other: VariableLike | ConstantLike
+ self: GenericExpression,
+ other: VariableLike | ConstantLike,
+ join: str | None = None,
) -> GenericExpression | QuadraticExpression:
"""
Divide the expr by a factor.
+
+ Parameters
+ ----------
+ other : constant-like
+ The divisor.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ if join is None:
+ return self.__div__(other)
+ if isinstance(other, SUPPORTED_EXPRESSION_TYPES):
+ raise TypeError(
+ "unsupported operand type(s) for /: "
+ f"{type(self)} and {type(other)}. "
+ "Non-linear expressions are not yet supported."
+ )
+ return self._divide_by_constant(other, join=join)
+
+ def le(
+ self: GenericExpression,
+ rhs: SideLike,
+ join: str | None = None,
+ ) -> Constraint:
"""
- return self.__div__(other)
+ Less than or equal constraint.
+
+ Parameters
+ ----------
+ rhs : expression-like
+ Right-hand side of the constraint.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_constraint(LESS_EQUAL, rhs, join=join)
+
+ def ge(
+ self: GenericExpression,
+ rhs: SideLike,
+ join: str | None = None,
+ ) -> Constraint:
+ """
+ Greater than or equal constraint.
+
+ Parameters
+ ----------
+ rhs : expression-like
+ Right-hand side of the constraint.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_constraint(GREATER_EQUAL, rhs, join=join)
+
+ def eq(
+ self: GenericExpression,
+ rhs: SideLike,
+ join: str | None = None,
+ ) -> Constraint:
+ """
+ Equality constraint.
+
+ Parameters
+ ----------
+ rhs : expression-like
+ Right-hand side of the constraint.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_constraint(EQUAL, rhs, join=join)
def pow(self, other: int) -> QuadraticExpression:
"""
@@ -854,7 +1110,9 @@ def cumsum(
dim_dict = {dim_name: self.data.sizes[dim_name] for dim_name in dim}
return self.rolling(dim=dim_dict).sum(keep_attrs=keep_attrs, skipna=skipna)
- def to_constraint(self, sign: SignLike, rhs: SideLike) -> Constraint:
+ def to_constraint(
+ self, sign: SignLike, rhs: SideLike, join: str | None = None
+ ) -> Constraint:
"""
Convert a linear expression to a constraint.
@@ -863,7 +1121,14 @@ def to_constraint(self, sign: SignLike, rhs: SideLike) -> Constraint:
sign : str, array-like
Sign(s) of the constraints.
rhs : constant, Variable, LinearExpression
- Right-hand side of the constraint.
+ Right-hand side of the constraint. If a DataArray, it is
+ reindexed to match expression coordinates (fill_value=np.nan).
+ Extra dimensions in the RHS not present in the expression
+ raise a ValueError. NaN entries in the RHS mean "no constraint".
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
Returns
-------
@@ -876,9 +1141,36 @@ def to_constraint(self, sign: SignLike, rhs: SideLike) -> Constraint:
f"Both sides of the constraint are constant. At least one side must contain variables. {self} {rhs}"
)
- all_to_lhs = (self - rhs).data
+ if isinstance(rhs, SUPPORTED_CONSTANT_TYPES):
+ rhs = as_dataarray(rhs, coords=self.coords, dims=self.coord_dims)
+
+ extra_dims = set(rhs.dims) - set(self.coord_dims)
+ if extra_dims:
+ logger.warning(
+ f"Constant RHS contains dimensions {extra_dims} not present "
+ f"in the expression, which might lead to inefficiencies. "
+ f"Consider collapsing the dimensions by taking min/max."
+ )
+ rhs = rhs.reindex_like(self.const, fill_value=np.nan)
+
+ # Remember where RHS is NaN (meaning "no constraint") before the
+ # subtraction, which may fill NaN with 0 as part of normal
+ # expression arithmetic.
+ if isinstance(rhs, DataArray):
+ rhs_nan_mask = rhs.isnull()
+ else:
+ rhs_nan_mask = None
+
+ all_to_lhs = self.sub(rhs, join=join).data
+ computed_rhs = -all_to_lhs.const
+
+ # Restore NaN at positions where the original constant RHS had no
+ # value so that downstream code still treats them as unconstrained.
+ if rhs_nan_mask is not None and rhs_nan_mask.any():
+ computed_rhs = xr.where(rhs_nan_mask, np.nan, computed_rhs)
+
data = assign_multiindex_safe(
- all_to_lhs[["coeffs", "vars"]], sign=sign, rhs=-all_to_lhs.const
+ all_to_lhs[["coeffs", "vars"]], sign=sign, rhs=computed_rhs
)
return constraints.Constraint(data, model=self.model)
@@ -1312,11 +1604,11 @@ def __add__(
return other.__add__(self)
try:
- if np.isscalar(other):
- return self.assign(const=self.const + other)
-
- other = as_expression(other, model=self.model, dims=self.coord_dims)
- return merge([self, other], cls=self.__class__)
+ if isinstance(other, SUPPORTED_CONSTANT_TYPES):
+ return self._add_constant(other)
+ else:
+ other = as_expression(other, model=self.model, dims=self.coord_dims)
+ return merge([self, other], cls=self.__class__)
except TypeError:
return NotImplemented
@@ -1824,13 +2116,7 @@ def __mul__(self, other: SideLike) -> QuadraticExpression:
"""
Multiply the expr by a factor.
"""
- if isinstance(
- other,
- BaseExpression
- | ScalarLinearExpression
- | variables.Variable
- | variables.ScalarVariable,
- ):
+ if isinstance(other, SUPPORTED_EXPRESSION_TYPES):
raise TypeError(
"unsupported operand type(s) for *: "
f"{type(self)} and {type(other)}. "
@@ -1852,15 +2138,15 @@ def __add__(self, other: SideLike) -> QuadraticExpression:
dimension names of self will be filled in other
"""
try:
- if np.isscalar(other):
- return self.assign(const=self.const + other)
-
- other = as_expression(other, model=self.model, dims=self.coord_dims)
+ if isinstance(other, SUPPORTED_CONSTANT_TYPES):
+ return self._add_constant(other)
+ else:
+ other = as_expression(other, model=self.model, dims=self.coord_dims)
- if isinstance(other, LinearExpression):
- other = other.to_quadexpr()
+ if isinstance(other, LinearExpression):
+ other = other.to_quadexpr()
- return merge([self, other], cls=self.__class__)
+ return merge([self, other], cls=self.__class__)
except TypeError:
return NotImplemented
@@ -1878,13 +2164,7 @@ def __sub__(self, other: SideLike) -> QuadraticExpression:
dimension names of self will be filled in other
"""
try:
- if np.isscalar(other):
- return self.assign(const=self.const - other)
-
- other = as_expression(other, model=self.model, dims=self.coord_dims)
- if type(other) is LinearExpression:
- other = other.to_quadexpr()
- return merge([self, -other], cls=self.__class__)
+ return self.__add__(-other)
except TypeError:
return NotImplemented
@@ -1906,13 +2186,7 @@ def __matmul__(
"""
Matrix multiplication with other, similar to xarray dot.
"""
- if isinstance(
- other,
- BaseExpression
- | ScalarLinearExpression
- | variables.Variable
- | variables.ScalarVariable,
- ):
+ if isinstance(other, SUPPORTED_EXPRESSION_TYPES):
raise TypeError(
"Higher order non-linear expressions are not yet supported."
)
@@ -1933,7 +2207,9 @@ def solution(self) -> DataArray:
sol = (self.coeffs * vals.prod(FACTOR_DIM)).sum(TERM_DIM) + self.const
return sol.rename("solution")
- def to_constraint(self, sign: SignLike, rhs: SideLike) -> NotImplementedType:
+ def to_constraint(
+ self, sign: SignLike, rhs: SideLike, join: str | None = None
+ ) -> NotImplementedType:
raise NotImplementedError(
"Quadratic expressions cannot be used in constraints."
)
@@ -2065,6 +2341,7 @@ def merge(
],
dim: str = TERM_DIM,
cls: type[GenericExpression] = None, # type: ignore
+ join: str | None = None,
**kwargs: Any,
) -> GenericExpression:
"""
@@ -2084,6 +2361,10 @@ def merge(
Dimension along which the expressions should be concatenated.
cls : type
Explicitly set the type of the resulting expression (So that the type checker will know the return type)
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left", "right",
+ "exact", "override". When None (default), auto-detects based on
+ expression shapes.
**kwargs
Additional keyword arguments passed to xarray.concat. Defaults to
{coords: "minimal", compat: "override"} or, in the special case described
@@ -2118,7 +2399,9 @@ def merge(
model = exprs[0].model
- if cls in linopy_types and dim in HELPER_DIMS:
+ if join is not None:
+ override = join == "override"
+ elif cls in linopy_types and dim in HELPER_DIMS:
coord_dims = [
{k: v for k, v in e.sizes.items() if k not in HELPER_DIMS} for e in exprs
]
@@ -2139,7 +2422,9 @@ def merge(
elif cls == variables.Variable:
kwargs["fill_value"] = variables.FILL_VALUE
- if override:
+ if join is not None:
+ kwargs["join"] = join
+ elif override:
kwargs["join"] = "override"
else:
kwargs.setdefault("join", "outer")
@@ -2279,6 +2564,10 @@ def __truediv__(self, other: float | int) -> ScalarLinearExpression:
return self.__div__(other)
def __le__(self, other: int | float) -> AnonymousScalarConstraint:
+ descriptor = _to_piecewise_constraint_descriptor(self, other, "<=")
+ if descriptor is not None:
+ return descriptor # type: ignore[return-value]
+
if not isinstance(other, int | float | np.number):
raise TypeError(
f"unsupported operand type(s) for <=: {type(self)} and {type(other)}"
@@ -2287,6 +2576,10 @@ def __le__(self, other: int | float) -> AnonymousScalarConstraint:
return constraints.AnonymousScalarConstraint(self, LESS_EQUAL, other)
def __ge__(self, other: int | float) -> AnonymousScalarConstraint:
+ descriptor = _to_piecewise_constraint_descriptor(self, other, ">=")
+ if descriptor is not None:
+ return descriptor # type: ignore[return-value]
+
if not isinstance(other, int | float | np.number):
raise TypeError(
f"unsupported operand type(s) for >=: {type(self)} and {type(other)}"
@@ -2294,7 +2587,13 @@ def __ge__(self, other: int | float) -> AnonymousScalarConstraint:
return constraints.AnonymousScalarConstraint(self, GREATER_EQUAL, other)
- def __eq__(self, other: int | float) -> AnonymousScalarConstraint: # type: ignore
+ def __eq__( # type: ignore[override]
+ self, other: int | float
+ ) -> AnonymousScalarConstraint:
+ descriptor = _to_piecewise_constraint_descriptor(self, other, "==")
+ if descriptor is not None:
+ return descriptor # type: ignore[return-value]
+
if not isinstance(other, int | float | np.number):
raise TypeError(
f"unsupported operand type(s) for ==: {type(self)} and {type(other)}"
@@ -2317,3 +2616,23 @@ def to_linexpr(self) -> LinearExpression:
vars = xr.DataArray(list(self.vars), dims=TERM_DIM)
ds = xr.Dataset({"coeffs": coeffs, "vars": vars})
return LinearExpression(ds, self.model)
+
+
+SUPPORTED_CONSTANT_TYPES = (
+ np.number,
+ np.bool_,
+ int,
+ float,
+ DataArray,
+ pd.Series,
+ pd.DataFrame,
+ np.ndarray,
+ pl.Series,
+)
+
+SUPPORTED_EXPRESSION_TYPES = (
+ BaseExpression,
+ ScalarLinearExpression,
+ variables.Variable,
+ variables.ScalarVariable,
+)
diff --git a/linopy/io.py b/linopy/io.py
index 54090e87..54e55767 100644
--- a/linopy/io.py
+++ b/linopy/io.py
@@ -5,6 +5,7 @@
from __future__ import annotations
+import contextlib
import logging
import shutil
import time
@@ -234,7 +235,11 @@ def bounds_to_file(
"""
Write out variables of a model to a lp file.
"""
- names = list(m.variables.continuous) + list(m.variables.integers)
+ names = (
+ list(m.variables.continuous)
+ + list(m.variables.integers)
+ + list(m.variables.semi_continuous)
+ )
if not len(list(names)):
return
@@ -304,6 +309,44 @@ def binaries_to_file(
_format_and_write(df, columns, f)
+def semi_continuous_to_file(
+ m: Model,
+ f: BufferedWriter,
+ progress: bool = False,
+ slice_size: int = 2_000_000,
+ explicit_coordinate_names: bool = False,
+) -> None:
+ """
+ Write out semi-continuous variables of a model to a lp file.
+ """
+ names = m.variables.semi_continuous
+ if not len(list(names)):
+ return
+
+ print_variable, _ = get_printers(
+ m, explicit_coordinate_names=explicit_coordinate_names
+ )
+
+ f.write(b"\n\nsemi-continuous\n\n")
+ if progress:
+ names = tqdm(
+ list(names),
+ desc="Writing semi-continuous variables.",
+ colour=TQDM_COLOR,
+ )
+
+ for name in names:
+ var = m.variables[name]
+ for var_slice in var.iterate_slices(slice_size):
+ df = var_slice.to_polars()
+
+ columns = [
+ *print_variable(pl.col("labels")),
+ ]
+
+ _format_and_write(df, columns, f)
+
+
def integers_to_file(
m: Model,
f: BufferedWriter,
@@ -509,6 +552,13 @@ def to_lp_file(
slice_size=slice_size,
explicit_coordinate_names=explicit_coordinate_names,
)
+ semi_continuous_to_file(
+ m,
+ f=f,
+ progress=progress,
+ slice_size=slice_size,
+ explicit_coordinate_names=explicit_coordinate_names,
+ )
sos_to_file(
m,
f=f,
@@ -594,6 +644,12 @@ def to_mosek(
if m.variables.sos:
raise NotImplementedError("SOS constraints are not supported by MOSEK.")
+ if m.variables.semi_continuous:
+ raise NotImplementedError(
+ "Semi-continuous variables are not supported by MOSEK. "
+ "Use a solver that supports them (gurobi, cplex, highs)."
+ )
+
import mosek
print_variable, print_constraint = get_printers_scalar(
@@ -720,7 +776,11 @@ def to_gurobipy(
names = np.vectorize(print_variable)(M.vlabels).astype(object)
kwargs = {}
- if len(m.binaries.labels) + len(m.integers.labels):
+ if (
+ len(m.binaries.labels)
+ + len(m.integers.labels)
+ + len(list(m.variables.semi_continuous))
+ ):
kwargs["vtype"] = M.vtypes
x = model.addMVar(M.vlabels.shape, M.lb, M.ub, name=list(names), **kwargs)
@@ -793,11 +853,17 @@ def to_highspy(m: Model, explicit_coordinate_names: bool = False) -> Highs:
M = m.matrices
h = highspy.Highs()
h.addVars(len(M.vlabels), M.lb, M.ub)
- if len(m.binaries) + len(m.integers):
+ if len(m.binaries) + len(m.integers) + len(list(m.variables.semi_continuous)):
vtypes = M.vtypes
- labels = np.arange(len(vtypes))[(vtypes == "B") | (vtypes == "I")]
- n = len(labels)
- h.changeColsIntegrality(n, labels, ones_like(labels))
+ # Map linopy vtypes to HiGHS integrality values:
+ # 0 = continuous, 1 = integer, 2 = semi-continuous
+ integrality_map = {"C": 0, "B": 1, "I": 1, "S": 2}
+ int_mask = (vtypes == "B") | (vtypes == "I") | (vtypes == "S")
+ labels = np.arange(len(vtypes))[int_mask]
+ integrality = np.array(
+ [integrality_map[v] for v in vtypes[int_mask]], dtype=np.int32
+ )
+ h.changeColsIntegrality(len(labels), labels, integrality)
if len(m.binaries):
labels = np.arange(len(vtypes))[vtypes == "B"]
n = len(labels)
@@ -836,6 +902,271 @@ def to_highspy(m: Model, explicit_coordinate_names: bool = False) -> Highs:
return h
+def to_xpress(
+ m: Model, explicit_coordinate_names: bool = False, progress: bool | None = None
+) -> Any:
+ """
+ Export the model to xpress using native array-loading APIs.
+
+ The model is transferred through loadLP/loadQP/loadMIQP, matching the
+ underlying Xpress Optimizer C API data layout.
+ """
+ import xpress
+
+ if progress is None:
+ progress = m._xCounter > 10_000
+
+ def _name_array(labels: np.ndarray, formatter: Callable[[Any], str]) -> np.ndarray:
+ flat_labels = labels.ravel()
+ return np.fromiter(
+ (formatter(label) for label in flat_labels),
+ dtype=object,
+ count=flat_labels.size,
+ )
+
+ M = m.matrices
+ t_start = time.perf_counter()
+ t_stage = t_start
+ problem = xpress.problem()
+ vlabels = M.vlabels
+ clabels = M.clabels
+ A = M.A
+ Q = M.Q
+ int32_max = np.iinfo(np.int32).max
+ use_int32_indices = (
+ len(vlabels) <= int32_max
+ and len(clabels) <= int32_max
+ and (A is None or A.nnz <= int32_max)
+ and (Q is None or Q.nnz <= int32_max)
+ )
+ index_dtype = np.int32 if use_int32_indices else np.int64
+
+ def _emit_progress_message(message: str) -> None:
+ if not progress:
+ return
+ logger.info(message)
+
+ _emit_progress_message(
+ " Xpress direct IO: building model "
+ f"(nvars={len(vlabels)}, ncons={len(clabels)}, "
+ f"annz={int(A.nnz) if A is not None else 0}, "
+ f"qnnz={int(Q.nnz) if Q is not None else 0}, "
+ f"explicit_names={explicit_coordinate_names})"
+ )
+
+ def call_xpress(new_api: str, old_api: str, **kwargs: Any) -> None:
+ try:
+ getattr(problem, new_api)(**kwargs)
+ except AttributeError:
+ getattr(problem, old_api)(**kwargs)
+
+ def add_names(namespace: int, names: list[str]) -> None:
+ if not names:
+ return
+ try:
+ problem.addNames(namespace, names, 0, len(names) - 1)
+ except AttributeError:
+ problem.addnames(namespace, names, 0, len(names) - 1)
+
+ def _log_stage(stage: str, detail: str = "") -> None:
+ if not progress:
+ return
+ nonlocal t_stage
+ now = time.perf_counter()
+ msg = (
+ f" Xpress direct IO: {stage} ({now - t_stage:.3f}s stage, "
+ f"{now - t_start:.3f}s total)"
+ )
+ if detail:
+ msg = f"{msg} {detail}"
+ _emit_progress_message(msg)
+ t_stage = now
+
+ if A is not None and A.nnz:
+ if A.format != "csc":
+ A = A.tocsc()
+ start = A.indptr.astype(index_dtype, copy=False)
+ rowind = A.indices.astype(index_dtype, copy=False)
+ rowcoef = A.data.astype(float, copy=False)
+ else:
+ start = None
+ rowind = None
+ rowcoef = None
+
+ _log_stage("prepared linear constraint matrix")
+
+ lb = np.asarray(M.lb, dtype=float)
+ ub = np.asarray(M.ub, dtype=float)
+
+ lb_inf = np.isneginf(lb)
+ if lb_inf.any():
+ lb = lb.copy()
+ lb[lb_inf] = -xpress.infinity
+
+ ub_inf = np.isposinf(ub)
+ if ub_inf.any():
+ ub = ub.copy()
+ ub[ub_inf] = xpress.infinity
+
+ _log_stage("prepared variable bounds")
+
+ if len(clabels):
+ sense = M.sense
+ rowtype = np.full(sense.shape, "E", dtype="U1")
+ rowtype[sense == "<"] = "L"
+ rowtype[sense == ">"] = "G"
+ rhs = np.asarray(M.b, dtype=float)
+ else:
+ rowtype = None
+ rhs = None
+
+ _log_stage("prepared row senses and rhs")
+
+ objqcol1: np.ndarray | None
+ objqcol2: np.ndarray | None
+ objqcoef: np.ndarray | None
+ if Q is not None and Q.nnz:
+ if Q.format == "coo": # codespell:ignore coo
+ mask = Q.row <= Q.col
+ objqcol1 = Q.row[mask].astype(index_dtype, copy=False)
+ objqcol2 = Q.col[mask].astype(index_dtype, copy=False)
+ objqcoef = Q.data[mask].astype(float, copy=False)
+ else:
+ Qt = triu(Q, format="coo") # codespell:ignore coo
+ objqcol1 = Qt.row.astype(index_dtype, copy=False)
+ objqcol2 = Qt.col.astype(index_dtype, copy=False)
+ objqcoef = Qt.data.astype(float, copy=False)
+ else:
+ objqcol1 = None
+ objqcol2 = None
+ objqcoef = None
+
+ _log_stage("prepared quadratic objective terms")
+
+ vtypes = M.vtypes
+ integer_mask = (vtypes == "B") | (vtypes == "I")
+ is_mip = bool(np.any(integer_mask))
+ objcoef = np.asarray(M.c, dtype=float)
+
+ if is_mip:
+ entind = np.flatnonzero(integer_mask).astype(index_dtype, copy=False)
+ coltype = vtypes[entind]
+ call_xpress(
+ "loadMIQP",
+ "loadmiqp",
+ probname="",
+ rowtype=rowtype,
+ rhs=rhs,
+ rng=None,
+ objcoef=objcoef,
+ start=start,
+ collen=None,
+ rowind=rowind,
+ rowcoef=rowcoef,
+ lb=lb,
+ ub=ub,
+ objqcol1=objqcol1,
+ objqcol2=objqcol2,
+ objqcoef=objqcoef,
+ coltype=coltype,
+ entind=entind,
+ limit=None,
+ settype=None,
+ setstart=None,
+ setind=None,
+ refval=None,
+ )
+ elif objqcoef is not None:
+ call_xpress(
+ "loadQP",
+ "loadqp",
+ probname="",
+ rowtype=rowtype,
+ rhs=rhs,
+ rng=None,
+ objcoef=objcoef,
+ start=start,
+ collen=None,
+ rowind=rowind,
+ rowcoef=rowcoef,
+ lb=lb,
+ ub=ub,
+ objqcol1=objqcol1,
+ objqcol2=objqcol2,
+ objqcoef=objqcoef,
+ )
+ else:
+ call_xpress(
+ "loadLP",
+ "loadlp",
+ probname="",
+ rowtype=rowtype,
+ rhs=rhs,
+ rng=None,
+ objcoef=objcoef,
+ start=start,
+ collen=None,
+ rowind=rowind,
+ rowcoef=rowcoef,
+ lb=lb,
+ ub=ub,
+ )
+
+ _log_stage("loaded matrix data into Xpress")
+
+ if m.objective.sense == "max":
+ changed_sense = False
+ with contextlib.suppress(AttributeError):
+ problem.chgObjSense(xpress.ObjSense.MAXIMIZE)
+ changed_sense = True
+ if not changed_sense:
+ with contextlib.suppress(AttributeError):
+ problem.chgobjsense(xpress.maximize)
+
+ _log_stage("set objective sense")
+
+ if explicit_coordinate_names:
+ print_variable, print_constraint = get_printers_scalar(
+ m, explicit_coordinate_names=explicit_coordinate_names
+ )
+ row_namespace = getattr(getattr(xpress, "Namespaces", None), "ROW", 1)
+ col_namespace = getattr(getattr(xpress, "Namespaces", None), "COLUMN", 2)
+
+ add_names(col_namespace, _name_array(vlabels, print_variable).tolist())
+ add_names(row_namespace, _name_array(clabels, print_constraint).tolist())
+
+ _log_stage("attached variable/constraint names")
+
+ if m.variables.sos:
+ for var_name in m.variables.sos:
+ var = m.variables.sos[var_name]
+ sos_type: int = var.attrs[SOS_TYPE_ATTR] # type: ignore[assignment]
+ sos_dim: str = var.attrs[SOS_DIM_ATTR] # type: ignore[assignment]
+
+ def add_sos(s: xr.DataArray, sos_type: int, sos_dim: str) -> None:
+ s = s.squeeze()
+ labels = s.values.astype(index_dtype, copy=False).flatten()
+ mask = labels != -1
+ if not mask.any():
+ return
+ indices = labels[mask].tolist()
+ weights = s.coords[sos_dim].values[mask].tolist()
+ problem.addSOS(indices, weights, type=sos_type)
+
+ others = [dim for dim in var.labels.dims if dim != sos_dim]
+ if not others:
+ add_sos(var.labels, sos_type, sos_dim)
+ else:
+ stacked = var.labels.stack(_sos_group=others)
+ for _, s in stacked.groupby("_sos_group"):
+ add_sos(s.unstack("_sos_group"), sos_type, sos_dim)
+ _log_stage("attached SOS constraints")
+
+ _log_stage("finished direct model build")
+
+ return problem
+
+
def to_cupdlpx(m: Model, explicit_coordinate_names: bool = False) -> cupdlpxModel:
"""
Export the model to cupdlpx.
@@ -856,6 +1187,12 @@ def to_cupdlpx(m: Model, explicit_coordinate_names: bool = False) -> cupdlpxMode
-------
model : cupdlpx.Model
"""
+ if m.variables.semi_continuous:
+ raise NotImplementedError(
+ "Semi-continuous variables are not supported by cuPDLPx. "
+ "Use a solver that supports them (gurobi, cplex, highs)."
+ )
+
import cupdlpx
if explicit_coordinate_names:
diff --git a/linopy/matrices.py b/linopy/matrices.py
index a55bb0bd..cddc9897 100644
--- a/linopy/matrices.py
+++ b/linopy/matrices.py
@@ -18,6 +18,7 @@
from scipy.sparse._csc import csc_matrix
from linopy import expressions
+from linopy.constants import FACTOR_DIM
if TYPE_CHECKING:
from linopy.model import Model
@@ -51,11 +52,136 @@ def __init__(self, model: Model) -> None:
def clean_cached_properties(self) -> None:
"""Clear the cache for all cached properties of an object"""
- for cached_prop in ["flat_vars", "flat_cons", "sol", "dual"]:
+ for cached_prop in [
+ "flat_vars",
+ "flat_cons",
+ "sol",
+ "dual",
+ "_variable_data",
+ "_constraint_data",
+ ]:
# check existence of cached_prop without creating it
if cached_prop in self.__dict__:
delattr(self, cached_prop)
+ @cached_property
+ def _variable_data(self) -> tuple[ndarray, ndarray, ndarray, ndarray, ndarray]:
+ """Dense-by-key variable vectors and label->key map."""
+ m = self._parent
+
+ label_to_key = np.full(m._xCounter, -1, dtype=np.int64)
+ labels_parts: list[np.ndarray] = []
+ lb_parts: list[np.ndarray] = []
+ ub_parts: list[np.ndarray] = []
+ vtype_parts: list[np.ndarray] = []
+ next_key = 0
+
+ for _, variable in m.variables.items():
+ labels = variable.labels.values.reshape(-1)
+ mask = labels != -1
+ labels = labels[mask]
+ n = labels.size
+ if not n:
+ continue
+
+ label_to_key[labels] = np.arange(next_key, next_key + n)
+ next_key += n
+
+ lb = np.broadcast_to(variable.lower.values, variable.labels.shape).reshape(
+ -1
+ )
+ ub = np.broadcast_to(variable.upper.values, variable.labels.shape).reshape(
+ -1
+ )
+
+ labels_parts.append(labels)
+ lb_parts.append(lb[mask])
+ ub_parts.append(ub[mask])
+
+ if variable.attrs["binary"]:
+ vtype = "B"
+ elif variable.attrs["integer"]:
+ vtype = "I"
+ else:
+ vtype = "C"
+ vtype_parts.append(np.full(n, vtype, dtype=" tuple[ndarray, ndarray, ndarray, ndarray]:
+ """Dense-by-key constraint vectors and label->key map."""
+ m = self._parent
+
+ label_to_key = np.full(m._cCounter, -1, dtype=np.int64)
+ labels_parts: list[np.ndarray] = []
+ rhs_parts: list[np.ndarray] = []
+ sense_parts: list[np.ndarray] = []
+ next_key = 0
+
+ for _, constraint in m.constraints.items():
+ labels = constraint.labels.values.reshape(-1)
+ vars_arr = constraint.vars.values
+ coeffs_arr = constraint.coeffs.values
+
+ term_axis = constraint.vars.get_axis_num(constraint.term_dim)
+ if term_axis != vars_arr.ndim - 1:
+ vars_arr = np.moveaxis(vars_arr, term_axis, -1)
+ coeffs_arr = np.moveaxis(coeffs_arr, term_axis, -1)
+
+ active = ((vars_arr != -1) & (coeffs_arr != 0)).any(axis=-1).reshape(-1)
+ mask = (labels != -1) & active
+ labels = labels[mask]
+ n = labels.size
+ if not n:
+ continue
+
+ label_to_key[labels] = np.arange(next_key, next_key + n)
+ next_key += n
+
+ rhs = np.broadcast_to(
+ constraint.rhs.values, constraint.labels.shape
+ ).reshape(-1)
+ sign = np.broadcast_to(
+ constraint.sign.values, constraint.labels.shape
+ ).reshape(-1)
+ sign = sign[mask]
+ sense = np.full(sign.shape, "=", dtype="="] = ">"
+
+ labels_parts.append(labels)
+ rhs_parts.append(rhs[mask])
+ sense_parts.append(sense)
+
+ if labels_parts:
+ return (
+ np.concatenate(labels_parts),
+ np.concatenate(sense_parts),
+ np.concatenate(rhs_parts),
+ label_to_key,
+ )
+ return (
+ np.array([], dtype=np.int64),
+ np.array([], dtype=" pd.DataFrame:
m = self._parent
@@ -69,8 +195,8 @@ def flat_cons(self) -> pd.DataFrame:
@property
def vlabels(self) -> ndarray:
"""Vector of labels of all non-missing variables."""
- df: pd.DataFrame = self.flat_vars
- return create_vector(df.key, df.labels, -1)
+ labels, *_ = self._variable_data
+ return labels
@property
def vtypes(self) -> ndarray:
@@ -83,6 +209,8 @@ def vtypes(self) -> ndarray:
val = "B"
elif name in m.integers:
val = "I"
+ elif name in m.semi_continuous:
+ val = "S"
else:
val = "C"
specs.append(pd.Series(val, index=m.variables[name].flat.labels))
@@ -94,8 +222,8 @@ def vtypes(self) -> ndarray:
@property
def lb(self) -> ndarray:
"""Vector of lower bounds of all non-missing variables."""
- df: pd.DataFrame = self.flat_vars
- return create_vector(df.key, df.lower)
+ _, lb, _, _, _ = self._variable_data
+ return lb
@cached_property
def sol(self) -> ndarray:
@@ -124,16 +252,14 @@ def dual(self) -> ndarray:
@property
def ub(self) -> ndarray:
"""Vector of upper bounds of all non-missing variables."""
- df: pd.DataFrame = self.flat_vars
- return create_vector(df.key, df.upper)
+ _, _, ub, _, _ = self._variable_data
+ return ub
@property
def clabels(self) -> ndarray:
"""Vector of labels of all non-missing constraints."""
- df: pd.DataFrame = self.flat_cons
- if df.empty:
- return np.array([], dtype=int)
- return create_vector(df.key, df.labels, fill_value=-1)
+ labels, _, _, _ = self._constraint_data
+ return labels
@property
def A(self) -> csc_matrix | None:
@@ -141,33 +267,57 @@ def A(self) -> csc_matrix | None:
m = self._parent
if not len(m.constraints):
return None
- A: csc_matrix = m.constraints.to_matrix(filter_missings=False)
- return A[self.clabels][:, self.vlabels]
+ return m.constraints.to_matrix(filter_missings=True)
@property
def sense(self) -> ndarray:
"""Vector of senses of all non-missing constraints."""
- df: pd.DataFrame = self.flat_cons
- return create_vector(df.key, df.sign.astype(np.dtype(" ndarray:
"""Vector of right-hand-sides of all non-missing constraints."""
- df: pd.DataFrame = self.flat_cons
- return create_vector(df.key, df.rhs)
+ _, _, rhs, _ = self._constraint_data
+ return rhs
@property
def c(self) -> ndarray:
"""Vector of objective coefficients of all non-missing variables."""
m = self._parent
- ds = m.objective.flat
- if isinstance(m.objective.expression, expressions.QuadraticExpression):
- ds = ds[(ds.vars1 == -1) | (ds.vars2 == -1)]
- ds["vars"] = ds.vars1.where(ds.vars1 != -1, ds.vars2)
-
- vars: pd.Series = ds.vars.map(self.flat_vars.set_index("labels").key)
- shape: int = self.flat_vars.key.max() + 1
- return create_vector(vars, ds.coeffs, fill_value=0.0, shape=shape)
+ _, _, _, _, label_to_key = self._variable_data
+ nvars = len(self.vlabels)
+ if nvars == 0:
+ return np.array([], dtype=float)
+
+ expr = m.objective.expression
+
+ if isinstance(expr, expressions.QuadraticExpression):
+ vars_arr = expr.data.vars.values
+ coeffs = expr.data.coeffs.values.reshape(-1)
+ factor_axis = expr.data.vars.get_axis_num(FACTOR_DIM)
+
+ vars1 = np.take(vars_arr, 0, axis=factor_axis).reshape(-1)
+ vars2 = np.take(vars_arr, 1, axis=factor_axis).reshape(-1)
+ mask = ((vars1 == -1) ^ (vars2 == -1)) & (coeffs != 0)
+ lin_vars = np.where(vars1 == -1, vars2, vars1)
+ labels = lin_vars[mask]
+ coeffs = coeffs[mask]
+ else:
+ vars_arr = expr.vars.values.reshape(-1)
+ coeffs = expr.coeffs.values.reshape(-1)
+ mask = (vars_arr != -1) & (coeffs != 0)
+ labels = vars_arr[mask]
+ coeffs = coeffs[mask]
+
+ keys = label_to_key[labels]
+ valid = keys != -1
+ if not np.any(valid):
+ return np.zeros(nvars, dtype=float)
+ return np.bincount(keys[valid], weights=coeffs[valid], minlength=nvars).astype(
+ float,
+ copy=False,
+ )
@property
def Q(self) -> csc_matrix | None:
diff --git a/linopy/model.py b/linopy/model.py
index 049093de..9a4f7975 100644
--- a/linopy/model.py
+++ b/linopy/model.py
@@ -60,11 +60,11 @@
to_highspy,
to_mosek,
to_netcdf,
+ to_xpress,
)
from linopy.matrices import MatrixAccessor
from linopy.objective import Objective
from linopy.piecewise import (
- add_disjunctive_piecewise_constraints,
add_piecewise_constraints,
)
from linopy.remote import RemoteHandler
@@ -241,12 +241,20 @@ def objective(self) -> Objective:
@objective.setter
def objective(
self, obj: Objective | LinearExpression | QuadraticExpression
- ) -> Objective:
+ ) -> None:
+ """
+ Set the objective function.
+
+ Parameters
+ ----------
+ obj : Objective, LinearExpression, or QuadraticExpression
+ The objective to assign to the model. If not an Objective instance,
+ it will be wrapped in an Objective.
+ """
if not isinstance(obj, Objective):
obj = Objective(obj, self)
self._objective = obj
- return self._objective
@property
def sense(self) -> str:
@@ -257,6 +265,9 @@ def sense(self) -> str:
@sense.setter
def sense(self, value: str) -> None:
+ """
+ Set the sense of the objective function.
+ """
self.objective.sense = value
@property
@@ -271,6 +282,9 @@ def parameters(self) -> Dataset:
@parameters.setter
def parameters(self, value: Dataset | Mapping) -> None:
+ """
+ Set the parameters of the model.
+ """
self._parameters = Dataset(value)
@property
@@ -296,6 +310,9 @@ def status(self) -> str:
@status.setter
def status(self, value: str) -> None:
+ """
+ Set the status of the model.
+ """
self._status = ModelStatus[value].value
@property
@@ -307,11 +324,13 @@ def termination_condition(self) -> str:
@termination_condition.setter
def termination_condition(self, value: str) -> None:
- # TODO: remove if-clause, only kept for backward compatibility
- if value:
- self._termination_condition = TerminationCondition[value].value
- else:
+ """
+ Set the termination condition of the model.
+ """
+ if value == "":
self._termination_condition = value
+ else:
+ self._termination_condition = TerminationCondition[value].value
@property
def chunk(self) -> T_Chunks:
@@ -322,6 +341,9 @@ def chunk(self) -> T_Chunks:
@chunk.setter
def chunk(self, value: T_Chunks) -> None:
+ """
+ Set the chunk sizes of the model.
+ """
self._chunk = value
@property
@@ -339,6 +361,9 @@ def force_dim_names(self) -> bool:
@force_dim_names.setter
def force_dim_names(self, value: bool) -> None:
+ """
+ Set whether to force custom dimension names for variables and constraints.
+ """
self._force_dim_names = bool(value)
@property
@@ -351,6 +376,9 @@ def auto_mask(self) -> bool:
@auto_mask.setter
def auto_mask(self, value: bool) -> None:
+ """
+ Set whether to automatically mask variables and constraints with NaN values.
+ """
self._auto_mask = bool(value)
@property
@@ -362,6 +390,9 @@ def solver_dir(self) -> Path:
@solver_dir.setter
def solver_dir(self, value: str | Path) -> None:
+ """
+ Set the solver directory of the model.
+ """
if not isinstance(value, str | Path):
raise TypeError("'solver_dir' must path-like.")
self._solver_dir = Path(value)
@@ -470,6 +501,7 @@ def add_variables(
mask: DataArray | ndarray | Series | None = None,
binary: bool = False,
integer: bool = False,
+ semi_continuous: bool = False,
**kwargs: Any,
) -> Variable:
"""
@@ -508,6 +540,11 @@ def add_variables(
integer : bool
Whether the new variable is a integer variable which are used for
Mixed-Integer problems.
+ semi_continuous : bool
+ Whether the new variable is a semi-continuous variable. A
+ semi-continuous variable can take the value 0 or any value
+ between its lower and upper bounds. Requires a positive lower
+ bound.
**kwargs :
Additional keyword arguments are passed to the DataArray creation.
@@ -550,8 +587,10 @@ def add_variables(
if name in self.variables:
raise ValueError(f"Variable '{name}' already assigned to model")
- if binary and integer:
- raise ValueError("Variable cannot be both binary and integer.")
+ if sum([binary, integer, semi_continuous]) > 1:
+ raise ValueError(
+ "Variable can only be one of binary, integer, or semi-continuous."
+ )
if binary:
if (lower != -inf) or (upper != inf):
@@ -559,6 +598,12 @@ def add_variables(
else:
lower, upper = 0, 1
+ if semi_continuous:
+ if not np.isscalar(lower) or float(lower) <= 0: # type: ignore[arg-type]
+ raise ValueError(
+ "Semi-continuous variables require a positive scalar lower bound."
+ )
+
data = Dataset(
{
"lower": as_dataarray(lower, coords, **kwargs),
@@ -596,7 +641,11 @@ def add_variables(
data.labels.values = np.where(mask.values, data.labels.values, -1)
data = data.assign_attrs(
- label_range=(start, end), name=name, binary=binary, integer=integer
+ label_range=(start, end),
+ name=name,
+ binary=binary,
+ integer=integer,
+ semi_continuous=semi_continuous,
)
if self.chunk:
@@ -665,7 +714,6 @@ def add_sos_constraints(
variable.attrs.update(attrs_update)
add_piecewise_constraints = add_piecewise_constraints
- add_disjunctive_piecewise_constraints = add_disjunctive_piecewise_constraints
def add_constraints(
self,
@@ -783,6 +831,16 @@ def add_constraints(
# TODO: add a warning here, routines should be safe against this
data = data.drop_vars(drop_dims)
+ rhs_nan = data.rhs.isnull()
+ if rhs_nan.any():
+ data = assign_multiindex_safe(data, rhs=data.rhs.fillna(0))
+ rhs_mask = ~rhs_nan
+ mask = (
+ rhs_mask
+ if mask is None
+ else (as_dataarray(mask).astype(bool) & rhs_mask)
+ )
+
data["labels"] = -1
(data,) = xr.broadcast(data, exclude=[TERM_DIM])
@@ -979,6 +1037,13 @@ def integers(self) -> Variables:
"""
return self.variables.integers
+ @property
+ def semi_continuous(self) -> Variables:
+ """
+ Get all semi-continuous variables.
+ """
+ return self.variables.semi_continuous
+
@property
def is_linear(self) -> bool:
return self.objective.is_linear
@@ -989,9 +1054,11 @@ def is_quadratic(self) -> bool:
@property
def type(self) -> str:
- if (len(self.binaries) or len(self.integers)) and len(self.continuous):
+ if (
+ len(self.binaries) or len(self.integers) or len(self.semi_continuous)
+ ) and len(self.continuous):
variable_type = "MI"
- elif len(self.binaries) or len(self.integers):
+ elif len(self.binaries) or len(self.integers) or len(self.semi_continuous):
variable_type = "I"
else:
variable_type = ""
@@ -1430,6 +1497,15 @@ def solve(
"Use reformulate_sos=True or 'auto', or a solver that supports SOS (gurobi, cplex)."
)
+ if self.variables.semi_continuous:
+ if not solver_supports(
+ solver_name, SolverFeature.SEMI_CONTINUOUS_VARIABLES
+ ):
+ raise ValueError(
+ f"Solver {solver_name} does not support semi-continuous variables. "
+ "Use a solver that supports them (gurobi, cplex, highs)."
+ )
+
try:
solver_class = getattr(solvers, f"{solvers.SolverName(solver_name).name}")
# initialize the solver as object of solver subclass
@@ -1638,7 +1714,14 @@ def _compute_infeasibilities_gurobi(self, solver_model: Any) -> list[int]:
return labels
def _compute_infeasibilities_xpress(self, solver_model: Any) -> list[int]:
- """Compute infeasibilities for Xpress solver."""
+ """
+ Compute infeasibilities for Xpress solver.
+
+ This function correctly maps solver constraint positions to linopy
+ constraint labels, handling masked constraints where some labels may
+ be skipped (e.g., labels [0, 2, 4] with gaps instead of sequential
+ [0, 1, 2]).
+ """
# Compute all IIS
try: # Try new API first
solver_model.IISAll()
@@ -1652,20 +1735,21 @@ def _compute_infeasibilities_xpress(self, solver_model: Any) -> list[int]:
labels = set()
- # Create constraint mapping for efficient lookups
- constraint_to_index = {
- constraint: idx
- for idx, constraint in enumerate(solver_model.getConstraint())
- }
+ clabels = self.matrices.clabels
+ constraint_position_map = {}
+ for position, constraint_obj in enumerate(solver_model.getConstraint()):
+ if 0 <= position < len(clabels):
+ constraint_label = clabels[position]
+ if constraint_label >= 0:
+ constraint_position_map[constraint_obj] = constraint_label
# Retrieve each IIS
for iis_num in range(1, num_iis + 1):
iis_constraints = self._extract_iis_constraints(solver_model, iis_num)
- # Convert constraint objects to indices
for constraint_obj in iis_constraints:
- if constraint_obj in constraint_to_index:
- labels.add(constraint_to_index[constraint_obj])
+ if constraint_obj in constraint_position_map:
+ labels.add(constraint_position_map[constraint_obj])
# Note: Silently skip constraints not found in mapping
# This can happen if the model structure changed after solving
@@ -1801,6 +1885,8 @@ def reset_solution(self) -> None:
to_highspy = to_highspy
+ to_xpress = to_xpress
+
to_cupdlpx = to_cupdlpx
to_block_files = to_block_files
diff --git a/linopy/monkey_patch_xarray.py b/linopy/monkey_patch_xarray.py
index dc60608c..1e526c92 100644
--- a/linopy/monkey_patch_xarray.py
+++ b/linopy/monkey_patch_xarray.py
@@ -1,37 +1,45 @@
from __future__ import annotations
from collections.abc import Callable
-from functools import partialmethod, update_wrapper
-from types import NotImplementedType
+from functools import update_wrapper
from typing import Any
from xarray import DataArray
from linopy import expressions, variables
-
-def monkey_patch(cls: type[DataArray], pass_unpatched_method: bool = False) -> Callable:
- def deco(func: Callable) -> Callable:
- func_name = func.__name__
- wrapped = getattr(cls, func_name)
- update_wrapper(func, wrapped)
- if pass_unpatched_method:
- func = partialmethod(func, unpatched_method=wrapped) # type: ignore
- setattr(cls, func_name, func)
- return func
-
- return deco
-
-
-@monkey_patch(DataArray, pass_unpatched_method=True)
-def __mul__(
- da: DataArray, other: Any, unpatched_method: Callable
-) -> DataArray | NotImplementedType:
- if isinstance(
- other,
- variables.Variable
- | expressions.LinearExpression
- | expressions.QuadraticExpression,
- ):
- return NotImplemented
- return unpatched_method(da, other)
+_LINOPY_TYPES = (
+ variables.Variable,
+ variables.ScalarVariable,
+ expressions.LinearExpression,
+ expressions.ScalarLinearExpression,
+ expressions.QuadraticExpression,
+)
+
+
+def _make_patched_op(op_name: str) -> None:
+ """Patch a DataArray operator to return NotImplemented for linopy types, enabling reflected operators."""
+ original = getattr(DataArray, op_name)
+
+ def patched(
+ da: DataArray, other: Any, unpatched_method: Callable = original
+ ) -> Any:
+ if isinstance(other, _LINOPY_TYPES):
+ return NotImplemented
+ return unpatched_method(da, other)
+
+ update_wrapper(patched, original)
+ setattr(DataArray, op_name, patched)
+
+
+for _op in (
+ "__mul__",
+ "__add__",
+ "__sub__",
+ "__truediv__",
+ "__le__",
+ "__ge__",
+ "__eq__",
+):
+ _make_patched_op(_op)
+del _op
diff --git a/linopy/piecewise.py b/linopy/piecewise.py
index 5128d1e5..78f7be65 100644
--- a/linopy/piecewise.py
+++ b/linopy/piecewise.py
@@ -1,14 +1,16 @@
"""
Piecewise linear constraint formulations.
-Provides SOS2, incremental, and disjunctive piecewise linear constraint
-methods for use with linopy.Model.
+Provides SOS2, incremental, pure LP, and disjunctive piecewise linear
+constraint methods for use with linopy.Model.
"""
from __future__ import annotations
-from collections.abc import Mapping, Sequence
-from typing import TYPE_CHECKING, Literal
+from collections.abc import Sequence
+from dataclasses import dataclass
+from numbers import Real
+from typing import TYPE_CHECKING, Literal, TypeAlias
import numpy as np
import pandas as pd
@@ -16,17 +18,25 @@
from xarray import DataArray
from linopy.constants import (
- DEFAULT_BREAKPOINT_DIM,
- DEFAULT_LINK_DIM,
- DEFAULT_SEGMENT_DIM,
+ BREAKPOINT_DIM,
HELPER_DIMS,
+ LP_SEG_DIM,
+ PWL_ACTIVE_BOUND_SUFFIX,
+ PWL_AUX_SUFFIX,
PWL_BINARY_SUFFIX,
PWL_CONVEX_SUFFIX,
PWL_DELTA_SUFFIX,
PWL_FILL_SUFFIX,
+ PWL_INC_BINARY_SUFFIX,
+ PWL_INC_LINK_SUFFIX,
+ PWL_INC_ORDER_SUFFIX,
PWL_LAMBDA_SUFFIX,
- PWL_LINK_SUFFIX,
+ PWL_LP_DOMAIN_SUFFIX,
+ PWL_LP_SUFFIX,
PWL_SELECT_SUFFIX,
+ PWL_X_LINK_SUFFIX,
+ PWL_Y_LINK_SUFFIX,
+ SEGMENT_DIM,
)
if TYPE_CHECKING:
@@ -35,15 +45,38 @@
from linopy.model import Model
from linopy.types import LinExprLike
+# Accepted input types for breakpoint-like data
+BreaksLike: TypeAlias = (
+ Sequence[float] | DataArray | pd.Series | pd.DataFrame | dict[str, Sequence[float]]
+)
+
+# Accepted input types for segment-like data (2D: segments × breakpoints)
+SegmentsLike: TypeAlias = (
+ Sequence[Sequence[float]]
+ | DataArray
+ | pd.DataFrame
+ | dict[str, Sequence[Sequence[float]]]
+)
+
+
+# ---------------------------------------------------------------------------
+# DataArray construction helpers
+# ---------------------------------------------------------------------------
-def _list_to_array(values: list[float], bp_dim: str) -> DataArray:
+
+def _sequence_to_array(values: Sequence[float]) -> DataArray:
arr = np.asarray(values, dtype=float)
if arr.ndim != 1:
- raise ValueError(f"Expected a 1D list of numeric values, got shape {arr.shape}")
- return DataArray(arr, dims=[bp_dim], coords={bp_dim: np.arange(len(arr))})
+ raise ValueError(
+ f"Expected a 1D sequence of numeric values, got shape {arr.shape}"
+ )
+ return DataArray(
+ arr, dims=[BREAKPOINT_DIM], coords={BREAKPOINT_DIM: np.arange(len(arr))}
+ )
-def _dict_to_array(d: dict[str, list[float]], dim: str, bp_dim: str) -> DataArray:
+def _dict_to_array(d: dict[str, Sequence[float]], dim: str) -> DataArray:
+ """Convert a dict of ragged sequences to a NaN-padded 2D DataArray."""
max_len = max(len(v) for v in d.values())
keys = list(d.keys())
data = np.full((len(keys), max_len), np.nan)
@@ -52,323 +85,478 @@ def _dict_to_array(d: dict[str, list[float]], dim: str, bp_dim: str) -> DataArra
data[i, : len(vals)] = vals
return DataArray(
data,
- dims=[dim, bp_dim],
- coords={dim: keys, bp_dim: np.arange(max_len)},
+ dims=[dim, BREAKPOINT_DIM],
+ coords={dim: keys, BREAKPOINT_DIM: np.arange(max_len)},
)
-def _segments_list_to_array(
- values: list[Sequence[float]], bp_dim: str, seg_dim: str
-) -> DataArray:
+def _dataframe_to_array(df: pd.DataFrame, dim: str) -> DataArray:
+ # rows = entities (index), columns = breakpoints
+ data = np.asarray(df.values, dtype=float)
+ return DataArray(
+ data,
+ dims=[dim, BREAKPOINT_DIM],
+ coords={dim: list(df.index), BREAKPOINT_DIM: np.arange(df.shape[1])},
+ )
+
+
+def _coerce_breaks(values: BreaksLike, dim: str | None = None) -> DataArray:
+ """Convert any BreaksLike input to a DataArray with BREAKPOINT_DIM."""
+ if isinstance(values, DataArray):
+ if BREAKPOINT_DIM not in values.dims:
+ raise ValueError(
+ f"DataArray must have a '{BREAKPOINT_DIM}' dimension, "
+ f"got dims {list(values.dims)}"
+ )
+ return values
+ if isinstance(values, pd.DataFrame):
+ if dim is None:
+ raise ValueError("'dim' is required when input is a DataFrame")
+ return _dataframe_to_array(values, dim)
+ if isinstance(values, pd.Series):
+ return _sequence_to_array(values)
+ if isinstance(values, dict):
+ if dim is None:
+ raise ValueError("'dim' is required when input is a dict")
+ return _dict_to_array(values, dim)
+ # Sequence (list, tuple, etc.)
+ return _sequence_to_array(values)
+
+
+def _segments_list_to_array(values: Sequence[Sequence[float]]) -> DataArray:
max_len = max(len(seg) for seg in values)
data = np.full((len(values), max_len), np.nan)
for i, seg in enumerate(values):
data[i, : len(seg)] = seg
return DataArray(
data,
- dims=[seg_dim, bp_dim],
- coords={seg_dim: np.arange(len(values)), bp_dim: np.arange(max_len)},
+ dims=[SEGMENT_DIM, BREAKPOINT_DIM],
+ coords={
+ SEGMENT_DIM: np.arange(len(values)),
+ BREAKPOINT_DIM: np.arange(max_len),
+ },
)
def _dict_segments_to_array(
- d: dict[str, list[Sequence[float]]], dim: str, bp_dim: str, seg_dim: str
+ d: dict[str, Sequence[Sequence[float]]], dim: str
) -> DataArray:
parts = []
for key, seg_list in d.items():
- arr = _segments_list_to_array(seg_list, bp_dim, seg_dim)
+ arr = _segments_list_to_array(seg_list)
parts.append(arr.expand_dims({dim: [key]}))
combined = xr.concat(parts, dim=dim)
max_bp = max(max(len(seg) for seg in sl) for sl in d.values())
max_seg = max(len(sl) for sl in d.values())
- if combined.sizes[bp_dim] < max_bp or combined.sizes[seg_dim] < max_seg:
+ if combined.sizes[BREAKPOINT_DIM] < max_bp or combined.sizes[SEGMENT_DIM] < max_seg:
combined = combined.reindex(
- {bp_dim: np.arange(max_bp), seg_dim: np.arange(max_seg)},
+ {BREAKPOINT_DIM: np.arange(max_bp), SEGMENT_DIM: np.arange(max_seg)},
fill_value=np.nan,
)
return combined
-def _get_entity_keys(
- kwargs: Mapping[str, object],
-) -> list[str]:
- first_dict = next(v for v in kwargs.values() if isinstance(v, dict))
- return list(first_dict.keys())
+# ---------------------------------------------------------------------------
+# Public factory functions
+# ---------------------------------------------------------------------------
-def _validate_factory_args(
- values: list | dict | None,
- kwargs: dict,
-) -> None:
- if values is not None and kwargs:
- raise ValueError("Cannot pass both positional 'values' and keyword arguments")
- if values is None and not kwargs:
- raise ValueError("Must pass either positional 'values' or keyword arguments")
+def slopes_to_points(
+ x_points: list[float], slopes: list[float], y0: float
+) -> list[float]:
+ """
+ Convert segment slopes + initial y-value to y-coordinates at each breakpoint.
+ Parameters
+ ----------
+ x_points : list[float]
+ Breakpoint x-coordinates (length n).
+ slopes : list[float]
+ Slope of each segment (length n-1).
+ y0 : float
+ y-value at the first breakpoint.
-def _resolve_kwargs(
- kwargs: dict[str, list[float] | dict[str, list[float]] | DataArray],
- dim: str | None,
- bp_dim: str,
- link_dim: str,
+ Returns
+ -------
+ list[float]
+ y-coordinates at each breakpoint (length n).
+
+ Raises
+ ------
+ ValueError
+ If ``len(slopes) != len(x_points) - 1``.
+ """
+ if len(slopes) != len(x_points) - 1:
+ raise ValueError(
+ f"len(slopes) must be len(x_points) - 1, "
+ f"got {len(slopes)} slopes and {len(x_points)} x_points"
+ )
+ y_points: list[float] = [y0]
+ for i, s in enumerate(slopes):
+ y_points.append(y_points[-1] + s * (x_points[i + 1] - x_points[i]))
+ return y_points
+
+
+def breakpoints(
+ values: BreaksLike | None = None,
+ *,
+ slopes: BreaksLike | None = None,
+ x_points: BreaksLike | None = None,
+ y0: float | dict[str, float] | pd.Series | DataArray | None = None,
+ dim: str | None = None,
) -> DataArray:
- has_dict = any(isinstance(v, dict) for v in kwargs.values())
- if has_dict and dim is None:
- raise ValueError("'dim' is required when any kwarg value is a dict")
-
- arrays: dict[str, DataArray] = {}
- for name, val in kwargs.items():
- if isinstance(val, DataArray):
- arrays[name] = val
- elif isinstance(val, dict):
- assert dim is not None
- arrays[name] = _dict_to_array(val, dim, bp_dim)
- elif isinstance(val, list):
- base = _list_to_array(val, bp_dim)
- if has_dict:
- base = base.expand_dims({dim: _get_entity_keys(kwargs)})
- arrays[name] = base
- else:
+ """
+ Create a breakpoint DataArray for piecewise linear constraints.
+
+ Two modes (mutually exclusive):
+
+ **Points mode**: ``breakpoints(values, ...)``
+
+ **Slopes mode**: ``breakpoints(slopes=..., x_points=..., y0=...)``
+
+ Parameters
+ ----------
+ values : BreaksLike, optional
+ Breakpoint values. Accepted types: ``Sequence[float]``,
+ ``pd.Series``, ``pd.DataFrame``, or ``xr.DataArray``.
+ A 1D input (list, Series) creates 1D breakpoints.
+ A 2D input (DataFrame, multi-dim DataArray) creates per-entity
+ breakpoints (``dim`` is required for DataFrame).
+ slopes : BreaksLike, optional
+ Segment slopes. Mutually exclusive with ``values``.
+ x_points : BreaksLike, optional
+ Breakpoint x-coordinates. Required with ``slopes``.
+ y0 : float, dict, pd.Series, or DataArray, optional
+ Initial y-value. Required with ``slopes``. A scalar broadcasts to
+ all entities. A dict/Series/DataArray provides per-entity values.
+ dim : str, optional
+ Entity dimension name. Required when ``values`` or ``slopes`` is a
+ ``pd.DataFrame`` or ``dict``.
+
+ Returns
+ -------
+ DataArray
+ """
+ # Validate mutual exclusivity
+ if values is not None and slopes is not None:
+ raise ValueError("'values' and 'slopes' are mutually exclusive")
+ if values is not None and (x_points is not None or y0 is not None):
+ raise ValueError("'x_points' and 'y0' are forbidden when 'values' is given")
+ if slopes is not None:
+ if x_points is None or y0 is None:
+ raise ValueError("'slopes' requires both 'x_points' and 'y0'")
+
+ # Slopes mode: convert to points, then fall through to coerce
+ if slopes is not None:
+ if x_points is None or y0 is None:
+ raise ValueError("'slopes' requires both 'x_points' and 'y0'")
+ slopes_arr = _coerce_breaks(slopes, dim)
+ xp_arr = _coerce_breaks(x_points, dim)
+
+ # 1D case: single set of breakpoints
+ if slopes_arr.ndim == 1:
+ if not isinstance(y0, Real):
+ raise TypeError("When 'slopes' is 1D, 'y0' must be a scalar float")
+ pts = slopes_to_points(
+ list(xp_arr.values), list(slopes_arr.values), float(y0)
+ )
+ return _sequence_to_array(pts)
+
+ # Multi-dim case: per-entity slopes
+ # Identify the entity dimension (not BREAKPOINT_DIM)
+ entity_dims = [d for d in slopes_arr.dims if d != BREAKPOINT_DIM]
+ if len(entity_dims) != 1:
raise ValueError(
- f"kwarg '{name}' must be a list, dict, or DataArray, got {type(val)}"
+ f"Expected exactly one entity dimension in slopes, got {entity_dims}"
+ )
+ entity_dim = str(entity_dims[0])
+ entity_keys = slopes_arr.coords[entity_dim].values
+
+ # Resolve y0 per entity
+ if isinstance(y0, Real):
+ y0_map: dict[str, float] = {str(k): float(y0) for k in entity_keys}
+ elif isinstance(y0, dict):
+ y0_map = {str(k): float(y0[k]) for k in entity_keys}
+ elif isinstance(y0, pd.Series):
+ y0_map = {str(k): float(y0[k]) for k in entity_keys}
+ elif isinstance(y0, DataArray):
+ y0_map = {
+ str(k): float(y0.sel({entity_dim: k}).item()) for k in entity_keys
+ }
+ else:
+ raise TypeError(
+ f"'y0' must be a float, Series, DataArray, or dict, got {type(y0)}"
)
- parts = [arr.expand_dims({link_dim: [name]}) for name, arr in arrays.items()]
- return xr.concat(parts, dim=link_dim)
+ # Compute points per entity
+ computed: dict[str, Sequence[float]] = {}
+ for key in entity_keys:
+ sk = str(key)
+ sl = list(slopes_arr.sel({entity_dim: key}).values)
+ # Remove trailing NaN from slopes
+ sl = [v for v in sl if not np.isnan(v)]
+ if entity_dim in xp_arr.dims:
+ xp = list(xp_arr.sel({entity_dim: key}).values)
+ xp = [v for v in xp if not np.isnan(v)]
+ else:
+ xp = [v for v in xp_arr.values if not np.isnan(v)]
+ computed[sk] = slopes_to_points(xp, sl, y0_map[sk])
+
+ return _dict_to_array(computed, entity_dim)
+ # Points mode
+ if values is None:
+ raise ValueError("Must pass either 'values' or 'slopes'")
-def _resolve_segment_kwargs(
- kwargs: dict[
- str, list[Sequence[float]] | dict[str, list[Sequence[float]]] | DataArray
- ],
- dim: str | None,
- bp_dim: str,
- seg_dim: str,
- link_dim: str,
-) -> DataArray:
- has_dict = any(isinstance(v, dict) for v in kwargs.values())
- if has_dict and dim is None:
- raise ValueError("'dim' is required when any kwarg value is a dict")
-
- arrays: dict[str, DataArray] = {}
- for name, val in kwargs.items():
- if isinstance(val, DataArray):
- arrays[name] = val
- elif isinstance(val, dict):
- assert dim is not None
- arrays[name] = _dict_segments_to_array(val, dim, bp_dim, seg_dim)
- elif isinstance(val, list):
- base = _segments_list_to_array(val, bp_dim, seg_dim)
- if has_dict:
- base = base.expand_dims({dim: _get_entity_keys(kwargs)})
- arrays[name] = base
- else:
+ return _coerce_breaks(values, dim)
+
+
+def _coerce_segments(values: SegmentsLike, dim: str | None = None) -> DataArray:
+ """Convert any SegmentsLike input to a DataArray with SEGMENT_DIM and BREAKPOINT_DIM."""
+ if isinstance(values, DataArray):
+ if SEGMENT_DIM not in values.dims or BREAKPOINT_DIM not in values.dims:
raise ValueError(
- f"kwarg '{name}' must be a list, dict, or DataArray, got {type(val)}"
+ f"DataArray must have both '{SEGMENT_DIM}' and '{BREAKPOINT_DIM}' "
+ f"dimensions, got dims {list(values.dims)}"
)
-
- parts = [arr.expand_dims({link_dim: [name]}) for name, arr in arrays.items()]
- combined = xr.concat(parts, dim=link_dim)
- max_bp = max(a.sizes.get(bp_dim, 0) for a in arrays.values())
- max_seg = max(a.sizes.get(seg_dim, 0) for a in arrays.values())
- if (
- combined.sizes.get(bp_dim, 0) < max_bp
- or combined.sizes.get(seg_dim, 0) < max_seg
- ):
- combined = combined.reindex(
- {bp_dim: np.arange(max_bp), seg_dim: np.arange(max_seg)},
- fill_value=np.nan,
+ return values
+ if isinstance(values, pd.DataFrame):
+ data = np.asarray(values.values, dtype=float)
+ return DataArray(
+ data,
+ dims=[SEGMENT_DIM, BREAKPOINT_DIM],
+ coords={
+ SEGMENT_DIM: np.arange(data.shape[0]),
+ BREAKPOINT_DIM: np.arange(data.shape[1]),
+ },
)
- return combined
+ if isinstance(values, dict):
+ if dim is None:
+ raise ValueError("'dim' is required when 'values' is a dict")
+ return _dict_segments_to_array(values, dim)
+ # Sequence[Sequence[float]]
+ return _segments_list_to_array(list(values))
+
+
+def segments(
+ values: SegmentsLike,
+ *,
+ dim: str | None = None,
+) -> DataArray:
+ """
+ Create a segmented breakpoint DataArray for disjunctive piecewise constraints.
+ Parameters
+ ----------
+ values : SegmentsLike
+ Segment breakpoints. Accepted types: ``Sequence[Sequence[float]]``,
+ ``pd.DataFrame`` (rows=segments, columns=breakpoints),
+ ``xr.DataArray`` (must have ``SEGMENT_DIM`` and ``BREAKPOINT_DIM``),
+ or ``dict[str, Sequence[Sequence[float]]]`` (requires ``dim``).
+ dim : str, optional
+ Entity dimension name. Required when ``values`` is a dict.
-class _BreakpointFactory:
+ Returns
+ -------
+ DataArray
"""
- Factory for creating breakpoint DataArrays for piecewise linear constraints.
+ return _coerce_segments(values, dim)
+
- Use ``linopy.breakpoints(...)`` for continuous breakpoints and
- ``linopy.breakpoints.segments(...)`` for disjunctive (disconnected) segments.
+# ---------------------------------------------------------------------------
+# Piecewise expression and descriptor types
+# ---------------------------------------------------------------------------
+
+
+class PiecewiseExpression:
"""
+ Lazy descriptor representing a piecewise linear function of an expression.
- def __call__(
- self,
- values: list[float] | dict[str, list[float]] | None = None,
- *,
- dim: str | None = None,
- bp_dim: str = DEFAULT_BREAKPOINT_DIM,
- link_dim: str = DEFAULT_LINK_DIM,
- **kwargs: list[float] | dict[str, list[float]] | DataArray,
- ) -> DataArray:
- """
- Create a breakpoint DataArray for piecewise linear constraints.
-
- Parameters
- ----------
- values : list or dict, optional
- Breakpoint values. A list creates 1D breakpoints. A dict creates
- per-entity breakpoints (requires ``dim``). Cannot be used with kwargs.
- dim : str, optional
- Entity dimension name. Required when ``values`` is a dict.
- bp_dim : str, default "breakpoint"
- Name for the breakpoint dimension.
- link_dim : str, default "var"
- Name for the link dimension when using kwargs.
- **kwargs : list, dict, or DataArray
- Per-variable breakpoints. Each kwarg becomes a coordinate on the
- link dimension.
-
- Returns
- -------
- DataArray
- Breakpoint array with appropriate dimensions and coordinates.
- """
- _validate_factory_args(values, kwargs)
-
- if values is not None:
- if isinstance(values, list):
- return _list_to_array(values, bp_dim)
- if isinstance(values, dict):
- if dim is None:
- raise ValueError("'dim' is required when 'values' is a dict")
- return _dict_to_array(values, dim, bp_dim)
- raise TypeError(f"'values' must be a list or dict, got {type(values)}")
-
- return _resolve_kwargs(kwargs, dim, bp_dim, link_dim)
-
- def segments(
- self,
- values: list[Sequence[float]] | dict[str, list[Sequence[float]]] | None = None,
- *,
- dim: str | None = None,
- bp_dim: str = DEFAULT_BREAKPOINT_DIM,
- seg_dim: str = DEFAULT_SEGMENT_DIM,
- link_dim: str = DEFAULT_LINK_DIM,
- **kwargs: list[Sequence[float]] | dict[str, list[Sequence[float]]] | DataArray,
- ) -> DataArray:
- """
- Create a segmented breakpoint DataArray for disjunctive piecewise constraints.
-
- Parameters
- ----------
- values : list or dict, optional
- Segment breakpoints. A list of lists creates 2D breakpoints
- ``[segment, breakpoint]``. A dict creates per-entity segments
- (requires ``dim``). Cannot be used with kwargs.
- dim : str, optional
- Entity dimension name. Required when ``values`` is a dict.
- bp_dim : str, default "breakpoint"
- Name for the breakpoint dimension.
- seg_dim : str, default "segment"
- Name for the segment dimension.
- link_dim : str, default "var"
- Name for the link dimension when using kwargs.
- **kwargs : list, dict, or DataArray
- Per-variable segment breakpoints.
-
- Returns
- -------
- DataArray
- Breakpoint array with segment and breakpoint dimensions.
- """
- _validate_factory_args(values, kwargs)
-
- if values is not None:
- if isinstance(values, list):
- return _segments_list_to_array(values, bp_dim, seg_dim)
- if isinstance(values, dict):
- if dim is None:
- raise ValueError("'dim' is required when 'values' is a dict")
- return _dict_segments_to_array(values, dim, bp_dim, seg_dim)
- raise TypeError(f"'values' must be a list or dict, got {type(values)}")
-
- return _resolve_segment_kwargs(kwargs, dim, bp_dim, seg_dim, link_dim)
-
-
-breakpoints = _BreakpointFactory()
-
-
-def _auto_broadcast_breakpoints(
- bp: DataArray,
- expr: LinExprLike | dict[str, LinExprLike],
- dim: str,
- link_dim: str | None = None,
- exclude_dims: set[str] | None = None,
-) -> DataArray:
- _, target_dims = _validate_piecewise_expr(expr)
+ Created by :func:`piecewise`. Supports comparison operators so that
+ ``piecewise(x, ...) >= y`` produces a
+ :class:`PiecewiseConstraintDescriptor`.
+ """
- skip = {dim} | set(HELPER_DIMS)
- if link_dim is not None:
- skip.add(link_dim)
- if exclude_dims is not None:
- skip.update(exclude_dims)
+ __slots__ = ("active", "disjunctive", "expr", "x_points", "y_points")
- target_dims -= skip
- missing = target_dims - {str(d) for d in bp.dims}
+ def __init__(
+ self,
+ expr: LinExprLike,
+ x_points: DataArray,
+ y_points: DataArray,
+ disjunctive: bool,
+ active: LinExprLike | None = None,
+ ) -> None:
+ self.expr = expr
+ self.x_points = x_points
+ self.y_points = y_points
+ self.disjunctive = disjunctive
+ self.active = active
+
+ # y <= pw → Python tries y.__le__(pw) → NotImplemented → pw.__ge__(y)
+ def __ge__(self, other: LinExprLike) -> PiecewiseConstraintDescriptor:
+ return PiecewiseConstraintDescriptor(lhs=other, sign="<=", piecewise_func=self)
+
+ # y >= pw → Python tries y.__ge__(pw) → NotImplemented → pw.__le__(y)
+ def __le__(self, other: LinExprLike) -> PiecewiseConstraintDescriptor:
+ return PiecewiseConstraintDescriptor(lhs=other, sign=">=", piecewise_func=self)
+
+ # y == pw → Python tries y.__eq__(pw) → NotImplemented → pw.__eq__(y)
+ def __eq__(self, other: object) -> PiecewiseConstraintDescriptor: # type: ignore[override]
+ from linopy.expressions import LinearExpression
+ from linopy.variables import Variable
+
+ if not isinstance(other, Variable | LinearExpression):
+ return NotImplemented
+ return PiecewiseConstraintDescriptor(lhs=other, sign="==", piecewise_func=self)
+
+
+@dataclass
+class PiecewiseConstraintDescriptor:
+ """Holds all information needed to add a piecewise constraint to a model."""
+
+ lhs: LinExprLike
+ sign: str # "<=", ">=", "=="
+ piecewise_func: PiecewiseExpression
+
+
+def _detect_disjunctive(x_points: DataArray, y_points: DataArray) -> bool:
+ """
+ Detect whether point arrays represent a disjunctive formulation.
- if not missing:
- return bp
+ Both ``x_points`` and ``y_points`` **must** use the well-known dimension
+ names ``BREAKPOINT_DIM`` and, for disjunctive formulations,
+ ``SEGMENT_DIM``. Use the :func:`breakpoints` / :func:`segments` factory
+ helpers to build arrays with the correct dimension names.
+ """
+ x_has_bp = BREAKPOINT_DIM in x_points.dims
+ y_has_bp = BREAKPOINT_DIM in y_points.dims
+ if not x_has_bp and not y_has_bp:
+ raise ValueError(
+ "x_points and y_points must have a breakpoint dimension. "
+ f"Got x_points dims {list(x_points.dims)} and y_points dims "
+ f"{list(y_points.dims)}. Use the breakpoints() or segments() "
+ f"factory to create correctly-dimensioned arrays."
+ )
+ if not x_has_bp:
+ raise ValueError(
+ "x_points is missing the breakpoint dimension, "
+ f"got dims {list(x_points.dims)}. "
+ "Use the breakpoints() or segments() factory."
+ )
+ if not y_has_bp:
+ raise ValueError(
+ "y_points is missing the breakpoint dimension, "
+ f"got dims {list(y_points.dims)}. "
+ "Use the breakpoints() or segments() factory."
+ )
- expand_map: dict[str, list] = {}
- all_exprs = expr.values() if isinstance(expr, dict) else [expr]
- for d in missing:
- for e in all_exprs:
- if d in e.coords:
- expand_map[str(d)] = list(e.coords[d].values)
- break
+ x_has_seg = SEGMENT_DIM in x_points.dims
+ y_has_seg = SEGMENT_DIM in y_points.dims
+ if x_has_seg != y_has_seg:
+ raise ValueError(
+ "If one of x_points/y_points has a segment dimension, "
+ f"both must. x_points dims: {list(x_points.dims)}, "
+ f"y_points dims: {list(y_points.dims)}."
+ )
- if expand_map:
- bp = bp.expand_dims(expand_map)
+ return x_has_seg
- return bp
+def piecewise(
+ expr: LinExprLike,
+ x_points: BreaksLike,
+ y_points: BreaksLike,
+ active: LinExprLike | None = None,
+) -> PiecewiseExpression:
+ """
+ Create a piecewise linear function descriptor.
-def _extra_coords(breakpoints: DataArray, *exclude_dims: str | None) -> list[pd.Index]:
- excluded = {d for d in exclude_dims if d is not None}
- return [
- pd.Index(breakpoints.coords[d].values, name=d)
- for d in breakpoints.dims
- if d not in excluded
- ]
+ Parameters
+ ----------
+ expr : Variable or LinearExpression
+ The "x" side expression.
+ x_points : BreaksLike
+ Breakpoint x-coordinates.
+ y_points : BreaksLike
+ Breakpoint y-coordinates.
+ active : Variable or LinearExpression, optional
+ Binary variable that scales the piecewise function. When
+ ``active=0``, all auxiliary variables are forced to zero, which
+ in turn forces the reconstructed x and y to zero. When
+ ``active=1``, the normal piecewise domain ``[x₀, xₙ]`` is
+ active. This is the only behavior the linear formulation
+ supports — selectively *relaxing* the constraint (letting x and
+ y float freely when off) would require big-M or indicator
+ constraints.
+ Returns
+ -------
+ PiecewiseExpression
+ """
+ if not isinstance(x_points, DataArray):
+ x_points = _coerce_breaks(x_points)
+ if not isinstance(y_points, DataArray):
+ y_points = _coerce_breaks(y_points)
-def _validate_breakpoints(breakpoints: DataArray, dim: str) -> None:
- if dim not in breakpoints.dims:
+ disjunctive = _detect_disjunctive(x_points, y_points)
+
+ # Validate compatible shapes along breakpoint dimension
+ if x_points.sizes[BREAKPOINT_DIM] != y_points.sizes[BREAKPOINT_DIM]:
raise ValueError(
- f"breakpoints must have dimension '{dim}', "
- f"but only has dimensions {list(breakpoints.dims)}"
+ f"x_points and y_points must have same size along '{BREAKPOINT_DIM}', "
+ f"got {x_points.sizes[BREAKPOINT_DIM]} and "
+ f"{y_points.sizes[BREAKPOINT_DIM]}"
)
+ # Validate compatible shapes along segment dimension
+ if disjunctive:
+ if x_points.sizes[SEGMENT_DIM] != y_points.sizes[SEGMENT_DIM]:
+ raise ValueError(
+ f"x_points and y_points must have same size along '{SEGMENT_DIM}'"
+ )
+
+ return PiecewiseExpression(expr, x_points, y_points, disjunctive, active)
+
+
+# ---------------------------------------------------------------------------
+# Internal validation and utility functions
+# ---------------------------------------------------------------------------
-def _validate_numeric_breakpoint_coords(breakpoints: DataArray, dim: str) -> None:
- if not pd.api.types.is_numeric_dtype(breakpoints.coords[dim]):
+
+def _validate_numeric_breakpoint_coords(bp: DataArray) -> None:
+ if not pd.api.types.is_numeric_dtype(bp.coords[BREAKPOINT_DIM]):
raise ValueError(
- f"Breakpoint dimension '{dim}' must have numeric coordinates "
- f"for SOS2 weights, but got {breakpoints.coords[dim].dtype}"
+ f"Breakpoint dimension '{BREAKPOINT_DIM}' must have numeric coordinates "
+ f"for SOS2 weights, but got {bp.coords[BREAKPOINT_DIM].dtype}"
)
-def _check_strict_monotonicity(breakpoints: DataArray, dim: str) -> bool:
- """
- Check if breakpoints are strictly monotonic along dim.
-
- Each slice along non-dim dimensions is checked independently,
- allowing different slices to have opposite directions (e.g., one
- increasing and another decreasing). NaN values are ignored.
- """
- diffs = breakpoints.diff(dim)
+def _check_strict_monotonicity(bp: DataArray) -> bool:
+ """Check if breakpoints are strictly monotonic along BREAKPOINT_DIM (ignoring NaN)."""
+ diffs = bp.diff(BREAKPOINT_DIM)
pos = (diffs > 0) | diffs.isnull()
neg = (diffs < 0) | diffs.isnull()
- all_pos_per_slice = pos.all(dim)
- all_neg_per_slice = neg.all(dim)
- has_non_nan = (~diffs.isnull()).any(dim)
+ all_pos_per_slice = pos.all(BREAKPOINT_DIM)
+ all_neg_per_slice = neg.all(BREAKPOINT_DIM)
+ has_non_nan = (~diffs.isnull()).any(BREAKPOINT_DIM)
monotonic = (all_pos_per_slice | all_neg_per_slice) & has_non_nan
return bool(monotonic.all())
-def _has_trailing_nan_only(breakpoints: DataArray, dim: str) -> bool:
- """Check that NaN values in breakpoints only appear as trailing entries along dim."""
- valid = ~breakpoints.isnull()
- cummin = np.minimum.accumulate(valid.values, axis=valid.dims.index(dim))
+def _check_strict_increasing(bp: DataArray) -> bool:
+ """Check if breakpoints are strictly increasing along BREAKPOINT_DIM."""
+ diffs = bp.diff(BREAKPOINT_DIM)
+ pos = (diffs > 0) | diffs.isnull()
+ has_non_nan = (~diffs.isnull()).any(BREAKPOINT_DIM)
+ increasing = pos.all(BREAKPOINT_DIM) & has_non_nan
+ return bool(increasing.all())
+
+
+def _has_trailing_nan_only(bp: DataArray) -> bool:
+ """Check that NaN values only appear as trailing entries along BREAKPOINT_DIM."""
+ valid = ~bp.isnull()
+ cummin = np.minimum.accumulate(valid.values, axis=valid.dims.index(BREAKPOINT_DIM))
cummin_da = DataArray(cummin, coords=valid.coords, dims=valid.dims)
return not bool((valid & ~cummin_da).any())
@@ -381,521 +569,654 @@ def _to_linexpr(expr: LinExprLike) -> LinearExpression:
return expr.to_linexpr()
-def _validate_piecewise_expr(
- expr: LinExprLike | dict[str, LinExprLike],
-) -> tuple[bool, set[str]]:
- from linopy.expressions import LinearExpression
- from linopy.variables import Variable
+def _extra_coords(points: DataArray, *exclude_dims: str | None) -> list[pd.Index]:
+ excluded = {d for d in exclude_dims if d is not None}
+ return [
+ pd.Index(points.coords[d].values, name=d)
+ for d in points.dims
+ if d not in excluded
+ ]
- _types = (Variable, LinearExpression)
- if isinstance(expr, _types):
- return True, {str(d) for d in expr.coord_dims}
+def _broadcast_points(
+ points: DataArray,
+ *exprs: LinExprLike,
+ disjunctive: bool = False,
+) -> DataArray:
+ """Broadcast points to cover all dimensions from exprs."""
+ skip: set[str] = {BREAKPOINT_DIM} | set(HELPER_DIMS)
+ if disjunctive:
+ skip.add(SEGMENT_DIM)
- if isinstance(expr, dict):
- dims: set[str] = set()
- for key, val in expr.items():
- if not isinstance(val, _types):
- raise TypeError(
- f"dict value for key '{key}' must be a Variable or "
- f"LinearExpression, got {type(val)}"
- )
- dims.update(str(d) for d in val.coord_dims)
- return False, dims
+ target_dims: set[str] = set()
+ for e in exprs:
+ le = _to_linexpr(e)
+ target_dims.update(str(d) for d in le.coord_dims)
- raise TypeError(
- f"'expr' must be a Variable, LinearExpression, or dict of these, "
- f"got {type(expr)}"
- )
+ missing = target_dims - skip - {str(d) for d in points.dims}
+ if not missing:
+ return points
+ expand_map: dict[str, list] = {}
+ for d in missing:
+ for e in exprs:
+ le = _to_linexpr(e)
+ if d in le.coords:
+ expand_map[str(d)] = list(le.coords[d].values)
+ break
-def _compute_mask(
- mask: DataArray | None,
- breakpoints: DataArray,
+ if expand_map:
+ points = points.expand_dims(expand_map)
+ return points
+
+
+def _compute_combined_mask(
+ x_points: DataArray,
+ y_points: DataArray,
skip_nan_check: bool,
) -> DataArray | None:
- if mask is not None:
- return mask
if skip_nan_check:
+ if bool(x_points.isnull().any()) or bool(y_points.isnull().any()):
+ raise ValueError(
+ "skip_nan_check=True but breakpoints contain NaN. "
+ "Either remove NaN values or set skip_nan_check=False."
+ )
return None
- return ~breakpoints.isnull()
-
-
-def _resolve_link_dim(
- breakpoints: DataArray,
- expr_keys: set[str],
- exclude_dims: set[str],
-) -> str:
- for d in breakpoints.dims:
- if d in exclude_dims:
- continue
- coord_set = {str(c) for c in breakpoints.coords[d].values}
- if coord_set == expr_keys:
- return str(d)
- raise ValueError(
- "Could not auto-detect linking dimension from breakpoints. "
- "Ensure breakpoints have a dimension whose coordinates match "
- f"the expression dict keys. "
- f"Breakpoint dimensions: {list(breakpoints.dims)}, "
- f"expression keys: {list(expr_keys)}"
- )
+ return ~(x_points.isnull() | y_points.isnull())
-def _build_stacked_expr(
- model: Model,
- expr_dict: dict[str, LinExprLike],
- breakpoints: DataArray,
- link_dim: str,
-) -> LinearExpression:
- from linopy.expressions import LinearExpression
+def _detect_convexity(
+ x_points: DataArray,
+ y_points: DataArray,
+) -> Literal["convex", "concave", "linear", "mixed"]:
+ """
+ Detect convexity of the piecewise function.
+
+ Requires strictly increasing x breakpoints and computes slopes and
+ second differences in the given order.
+ """
+ if not _check_strict_increasing(x_points):
+ raise ValueError(
+ "Convexity detection requires strictly increasing x_points. "
+ "Pass breakpoints in increasing x-order or use method='sos2'."
+ )
- link_coords = list(breakpoints.coords[link_dim].values)
+ dx = x_points.diff(BREAKPOINT_DIM)
+ dy = y_points.diff(BREAKPOINT_DIM)
- expr_data_list = []
- for k in link_coords:
- e = expr_dict[str(k)]
- linexpr = _to_linexpr(e)
- expr_data_list.append(linexpr.data.expand_dims({link_dim: [k]}))
+ valid = ~(dx.isnull() | dy.isnull() | (dx == 0))
+ slopes = dy / dx
- stacked_data = xr.concat(expr_data_list, dim=link_dim)
- return LinearExpression(stacked_data, model)
+ if slopes.sizes[BREAKPOINT_DIM] < 2:
+ return "linear"
+ slope_diffs = slopes.diff(BREAKPOINT_DIM)
-def _resolve_expr(
+ valid_diffs = valid.isel({BREAKPOINT_DIM: slice(None, -1)})
+ valid_diffs_hi = valid.isel({BREAKPOINT_DIM: slice(1, None)})
+ valid_diffs_combined = valid_diffs.values & valid_diffs_hi.values
+
+ sd_values = slope_diffs.values
+ if valid_diffs_combined.size == 0 or not valid_diffs_combined.any():
+ return "linear"
+
+ valid_sd = sd_values[valid_diffs_combined]
+ all_nonneg = bool(np.all(valid_sd >= -1e-10))
+ all_nonpos = bool(np.all(valid_sd <= 1e-10))
+
+ if all_nonneg and all_nonpos:
+ return "linear"
+ if all_nonneg:
+ return "convex"
+ if all_nonpos:
+ return "concave"
+ return "mixed"
+
+
+# ---------------------------------------------------------------------------
+# Internal formulation functions
+# ---------------------------------------------------------------------------
+
+
+def _add_pwl_lp(
model: Model,
- expr: LinExprLike | dict[str, LinExprLike],
- breakpoints: DataArray,
- dim: str,
- mask: DataArray | None,
- skip_nan_check: bool,
- exclude_dims: set[str] | None = None,
-) -> tuple[LinearExpression, str | None, DataArray | None, DataArray | None]:
- is_single, _ = _validate_piecewise_expr(expr)
-
- computed_mask = _compute_mask(mask, breakpoints, skip_nan_check)
-
- if is_single:
- target_expr = _to_linexpr(expr) # type: ignore[arg-type]
- return target_expr, None, computed_mask, computed_mask
-
- expr_dict: dict[str, LinExprLike] = expr # type: ignore[assignment]
- expr_keys = set(expr_dict.keys())
- all_exclude = {dim} | (exclude_dims or set())
- resolved_link_dim = _resolve_link_dim(breakpoints, expr_keys, all_exclude)
- lambda_mask = None
- if computed_mask is not None:
- if resolved_link_dim not in computed_mask.dims:
- computed_mask = computed_mask.broadcast_like(breakpoints)
- lambda_mask = computed_mask.any(dim=resolved_link_dim)
- target_expr = _build_stacked_expr(model, expr_dict, breakpoints, resolved_link_dim)
- return target_expr, resolved_link_dim, computed_mask, lambda_mask
-
-
-def _add_pwl_sos2(
+ name: str,
+ x_expr: LinearExpression,
+ y_expr: LinearExpression,
+ sign: str,
+ x_points: DataArray,
+ y_points: DataArray,
+) -> Constraint:
+ """Add pure LP tangent-line constraints."""
+ dx = x_points.diff(BREAKPOINT_DIM)
+ dy = y_points.diff(BREAKPOINT_DIM)
+ slopes = dy / dx
+
+ slopes = slopes.rename({BREAKPOINT_DIM: LP_SEG_DIM})
+ n_seg = slopes.sizes[LP_SEG_DIM]
+ slopes[LP_SEG_DIM] = np.arange(n_seg)
+
+ x_base = x_points.isel({BREAKPOINT_DIM: slice(None, -1)})
+ y_base = y_points.isel({BREAKPOINT_DIM: slice(None, -1)})
+ x_base = x_base.rename({BREAKPOINT_DIM: LP_SEG_DIM})
+ y_base = y_base.rename({BREAKPOINT_DIM: LP_SEG_DIM})
+ x_base[LP_SEG_DIM] = np.arange(n_seg)
+ y_base[LP_SEG_DIM] = np.arange(n_seg)
+
+ rhs = y_base - slopes * x_base
+ lhs = y_expr - slopes * x_expr
+
+ if sign == "<=":
+ con = model.add_constraints(lhs <= rhs, name=f"{name}{PWL_LP_SUFFIX}")
+ else:
+ con = model.add_constraints(lhs >= rhs, name=f"{name}{PWL_LP_SUFFIX}")
+
+ # Domain bound constraints to keep x within [x_min, x_max]
+ x_lo = x_points.min(dim=BREAKPOINT_DIM)
+ x_hi = x_points.max(dim=BREAKPOINT_DIM)
+ model.add_constraints(x_expr >= x_lo, name=f"{name}{PWL_LP_DOMAIN_SUFFIX}_lo")
+ model.add_constraints(x_expr <= x_hi, name=f"{name}{PWL_LP_DOMAIN_SUFFIX}_hi")
+
+ return con
+
+
+def _add_pwl_sos2_core(
model: Model,
name: str,
- breakpoints: DataArray,
- dim: str,
+ x_expr: LinearExpression,
target_expr: LinearExpression,
- lambda_coords: list[pd.Index],
+ x_points: DataArray,
+ y_points: DataArray,
lambda_mask: DataArray | None,
+ active: LinearExpression | None = None,
) -> Constraint:
+ """
+ Core SOS2 formulation linking x_expr and target_expr via breakpoints.
+
+ Creates lambda variables, SOS2 constraint, convexity constraint,
+ and linking constraints for both x and target.
+
+ When ``active`` is provided, the convexity constraint becomes
+ ``sum(lambda) == active`` instead of ``== 1``, forcing all lambda
+ (and thus x, y) to zero when ``active=0``.
+ """
+ extra = _extra_coords(x_points, BREAKPOINT_DIM)
+ lambda_coords = extra + [
+ pd.Index(x_points.coords[BREAKPOINT_DIM].values, name=BREAKPOINT_DIM)
+ ]
+
lambda_name = f"{name}{PWL_LAMBDA_SUFFIX}"
convex_name = f"{name}{PWL_CONVEX_SUFFIX}"
- link_name = f"{name}{PWL_LINK_SUFFIX}"
+ x_link_name = f"{name}{PWL_X_LINK_SUFFIX}"
+ y_link_name = f"{name}{PWL_Y_LINK_SUFFIX}"
lambda_var = model.add_variables(
lower=0, upper=1, coords=lambda_coords, name=lambda_name, mask=lambda_mask
)
- model.add_sos_constraints(lambda_var, sos_type=2, sos_dim=dim)
+ model.add_sos_constraints(lambda_var, sos_type=2, sos_dim=BREAKPOINT_DIM)
- convex_con = model.add_constraints(lambda_var.sum(dim=dim) == 1, name=convex_name)
+ # Convexity constraint: sum(lambda) == 1 or sum(lambda) == active
+ rhs = active if active is not None else 1
+ convex_con = model.add_constraints(
+ lambda_var.sum(dim=BREAKPOINT_DIM) == rhs, name=convex_name
+ )
+
+ x_weighted = (lambda_var * x_points).sum(dim=BREAKPOINT_DIM)
+ model.add_constraints(x_expr == x_weighted, name=x_link_name)
- weighted_sum = (lambda_var * breakpoints).sum(dim=dim)
- model.add_constraints(target_expr == weighted_sum, name=link_name)
+ y_weighted = (lambda_var * y_points).sum(dim=BREAKPOINT_DIM)
+ model.add_constraints(target_expr == y_weighted, name=y_link_name)
return convex_con
-def _add_pwl_incremental(
+def _add_pwl_incremental_core(
model: Model,
name: str,
- breakpoints: DataArray,
- dim: str,
+ x_expr: LinearExpression,
target_expr: LinearExpression,
- extra_coords: list[pd.Index],
- breakpoint_mask: DataArray | None,
- link_dim: str | None,
+ x_points: DataArray,
+ y_points: DataArray,
+ bp_mask: DataArray | None,
+ active: LinearExpression | None = None,
) -> Constraint:
+ """
+ Core incremental formulation linking x_expr and target_expr.
+
+ Creates delta variables, fill-order constraints, and x/target link constraints.
+
+ When ``active`` is provided, delta bounds are tightened to
+ ``δ_i ≤ active`` and base terms become ``x₀ * active``,
+ ``y₀ * active``, forcing x and y to zero when ``active=0``.
+ """
delta_name = f"{name}{PWL_DELTA_SUFFIX}"
fill_name = f"{name}{PWL_FILL_SUFFIX}"
- link_name = f"{name}{PWL_LINK_SUFFIX}"
-
- n_segments = breakpoints.sizes[dim] - 1
- seg_dim = f"{dim}_seg"
- seg_index = pd.Index(range(n_segments), name=seg_dim)
- delta_coords = extra_coords + [seg_index]
-
- steps = breakpoints.diff(dim).rename({dim: seg_dim})
- steps[seg_dim] = seg_index
-
- if breakpoint_mask is not None:
- bp_mask = breakpoint_mask
- if link_dim is not None:
- bp_mask = bp_mask.all(dim=link_dim)
- mask_lo = bp_mask.isel({dim: slice(None, -1)}).rename({dim: seg_dim})
- mask_hi = bp_mask.isel({dim: slice(1, None)}).rename({dim: seg_dim})
- mask_lo[seg_dim] = seg_index
- mask_hi[seg_dim] = seg_index
+ x_link_name = f"{name}{PWL_X_LINK_SUFFIX}"
+ y_link_name = f"{name}{PWL_Y_LINK_SUFFIX}"
+
+ n_segments = x_points.sizes[BREAKPOINT_DIM] - 1
+ seg_index = pd.Index(range(n_segments), name=LP_SEG_DIM)
+ extra = _extra_coords(x_points, BREAKPOINT_DIM)
+ delta_coords = extra + [seg_index]
+
+ x_steps = x_points.diff(BREAKPOINT_DIM).rename({BREAKPOINT_DIM: LP_SEG_DIM})
+ x_steps[LP_SEG_DIM] = seg_index
+ y_steps = y_points.diff(BREAKPOINT_DIM).rename({BREAKPOINT_DIM: LP_SEG_DIM})
+ y_steps[LP_SEG_DIM] = seg_index
+
+ if bp_mask is not None:
+ mask_lo = bp_mask.isel({BREAKPOINT_DIM: slice(None, -1)}).rename(
+ {BREAKPOINT_DIM: LP_SEG_DIM}
+ )
+ mask_hi = bp_mask.isel({BREAKPOINT_DIM: slice(1, None)}).rename(
+ {BREAKPOINT_DIM: LP_SEG_DIM}
+ )
+ mask_lo[LP_SEG_DIM] = seg_index
+ mask_hi[LP_SEG_DIM] = seg_index
delta_mask: DataArray | None = mask_lo & mask_hi
else:
delta_mask = None
+ # When active is provided, upper bound is active (binary) instead of 1
+ delta_upper = 1
delta_var = model.add_variables(
- lower=0, upper=1, coords=delta_coords, name=delta_name, mask=delta_mask
+ lower=0,
+ upper=delta_upper,
+ coords=delta_coords,
+ name=delta_name,
+ mask=delta_mask,
)
+ if active is not None:
+ # Tighten delta bounds: δ_i ≤ active
+ active_bound_name = f"{name}{PWL_ACTIVE_BOUND_SUFFIX}"
+ model.add_constraints(delta_var <= active, name=active_bound_name)
+
+ # Binary indicator variables: y_i for each segment
+ inc_binary_name = f"{name}{PWL_INC_BINARY_SUFFIX}"
+ inc_link_name = f"{name}{PWL_INC_LINK_SUFFIX}"
+ inc_order_name = f"{name}{PWL_INC_ORDER_SUFFIX}"
+
+ binary_var = model.add_variables(
+ binary=True, coords=delta_coords, name=inc_binary_name, mask=delta_mask
+ )
+
+ # Link constraints: δ_i ≤ y_i for all segments
+ model.add_constraints(delta_var <= binary_var, name=inc_link_name)
+
+ # Order constraints: y_{i+1} ≤ δ_i for i = 0..n-2
fill_con: Constraint | None = None
if n_segments >= 2:
- delta_lo = delta_var.isel({seg_dim: slice(None, -1)}, drop=True)
- delta_hi = delta_var.isel({seg_dim: slice(1, None)}, drop=True)
+ delta_lo = delta_var.isel({LP_SEG_DIM: slice(None, -1)}, drop=True)
+ delta_hi = delta_var.isel({LP_SEG_DIM: slice(1, None)}, drop=True)
+ # Keep existing fill constraint as LP relaxation tightener
fill_con = model.add_constraints(delta_hi <= delta_lo, name=fill_name)
- bp0 = breakpoints.isel({dim: 0})
- weighted_sum = (delta_var * steps).sum(dim=seg_dim) + bp0
- link_con = model.add_constraints(target_expr == weighted_sum, name=link_name)
+ binary_hi = binary_var.isel({LP_SEG_DIM: slice(1, None)}, drop=True)
+ model.add_constraints(binary_hi <= delta_lo, name=inc_order_name)
+
+ x0 = x_points.isel({BREAKPOINT_DIM: 0})
+ y0 = y_points.isel({BREAKPOINT_DIM: 0})
+
+ # When active is provided, multiply base terms by active
+ x_base: DataArray | LinearExpression = x0
+ y_base: DataArray | LinearExpression = y0
+ if active is not None:
+ x_base = x0 * active
+ y_base = y0 * active
+
+ x_weighted = (delta_var * x_steps).sum(dim=LP_SEG_DIM) + x_base
+ model.add_constraints(x_expr == x_weighted, name=x_link_name)
- return fill_con if fill_con is not None else link_con
+ y_weighted = (delta_var * y_steps).sum(dim=LP_SEG_DIM) + y_base
+ model.add_constraints(target_expr == y_weighted, name=y_link_name)
+ return fill_con if fill_con is not None else model.constraints[y_link_name]
-def _add_dpwl_sos2(
+
+def _add_dpwl_sos2_core(
model: Model,
name: str,
- breakpoints: DataArray,
- dim: str,
- segment_dim: str,
+ x_expr: LinearExpression,
target_expr: LinearExpression,
- lambda_coords: list[pd.Index],
+ x_points: DataArray,
+ y_points: DataArray,
lambda_mask: DataArray | None,
- binary_coords: list[pd.Index],
- binary_mask: DataArray | None,
+ active: LinearExpression | None = None,
) -> Constraint:
+ """
+ Core disjunctive SOS2 formulation with separate x/y points.
+
+ When ``active`` is provided, the segment selection becomes
+ ``sum(z_k) == active`` instead of ``== 1``, forcing all segment
+ binaries, lambdas, and thus x and y to zero when ``active=0``.
+ """
binary_name = f"{name}{PWL_BINARY_SUFFIX}"
select_name = f"{name}{PWL_SELECT_SUFFIX}"
lambda_name = f"{name}{PWL_LAMBDA_SUFFIX}"
convex_name = f"{name}{PWL_CONVEX_SUFFIX}"
- link_name = f"{name}{PWL_LINK_SUFFIX}"
+ x_link_name = f"{name}{PWL_X_LINK_SUFFIX}"
+ y_link_name = f"{name}{PWL_Y_LINK_SUFFIX}"
+
+ extra = _extra_coords(x_points, BREAKPOINT_DIM, SEGMENT_DIM)
+ lambda_coords = extra + [
+ pd.Index(x_points.coords[SEGMENT_DIM].values, name=SEGMENT_DIM),
+ pd.Index(x_points.coords[BREAKPOINT_DIM].values, name=BREAKPOINT_DIM),
+ ]
+ binary_coords = extra + [
+ pd.Index(x_points.coords[SEGMENT_DIM].values, name=SEGMENT_DIM),
+ ]
+
+ binary_mask = (
+ lambda_mask.any(dim=BREAKPOINT_DIM) if lambda_mask is not None else None
+ )
binary_var = model.add_variables(
binary=True, coords=binary_coords, name=binary_name, mask=binary_mask
)
+ # Segment selection: sum(z_k) == 1 or sum(z_k) == active
+ rhs = active if active is not None else 1
select_con = model.add_constraints(
- binary_var.sum(dim=segment_dim) == 1, name=select_name
+ binary_var.sum(dim=SEGMENT_DIM) == rhs, name=select_name
)
lambda_var = model.add_variables(
lower=0, upper=1, coords=lambda_coords, name=lambda_name, mask=lambda_mask
)
- model.add_sos_constraints(lambda_var, sos_type=2, sos_dim=dim)
+ model.add_sos_constraints(lambda_var, sos_type=2, sos_dim=BREAKPOINT_DIM)
+
+ model.add_constraints(
+ lambda_var.sum(dim=BREAKPOINT_DIM) == binary_var, name=convex_name
+ )
- model.add_constraints(lambda_var.sum(dim=dim) == binary_var, name=convex_name)
+ x_weighted = (lambda_var * x_points).sum(dim=[SEGMENT_DIM, BREAKPOINT_DIM])
+ model.add_constraints(x_expr == x_weighted, name=x_link_name)
- weighted_sum = (lambda_var * breakpoints).sum(dim=[segment_dim, dim])
- model.add_constraints(target_expr == weighted_sum, name=link_name)
+ y_weighted = (lambda_var * y_points).sum(dim=[SEGMENT_DIM, BREAKPOINT_DIM])
+ model.add_constraints(target_expr == y_weighted, name=y_link_name)
return select_con
+# ---------------------------------------------------------------------------
+# Main entry point
+# ---------------------------------------------------------------------------
+
+
def add_piecewise_constraints(
model: Model,
- expr: LinExprLike | dict[str, LinExprLike],
- breakpoints: DataArray,
- dim: str = DEFAULT_BREAKPOINT_DIM,
- mask: DataArray | None = None,
+ descriptor: PiecewiseConstraintDescriptor | Constraint,
+ method: Literal["sos2", "incremental", "auto", "lp"] = "auto",
name: str | None = None,
skip_nan_check: bool = False,
- method: Literal["sos2", "incremental", "auto"] = "sos2",
) -> Constraint:
"""
- Add a piecewise linear constraint using SOS2 or incremental formulation.
+ Add a piecewise linear constraint from a :class:`PiecewiseConstraintDescriptor`.
- This method creates a piecewise linear constraint that links one or more
- variables/expressions together via a set of breakpoints. It supports two
- formulations:
+ Typically called as::
- - **SOS2** (default): Uses SOS2 (Special Ordered Set of type 2) with lambda
- (interpolation) variables. Works for any breakpoints.
- - **Incremental**: Uses delta variables with filling-order constraints.
- Pure LP formulation (no SOS2 or binary variables), but requires strictly
- monotonic breakpoints.
+ m.add_piecewise_constraints(piecewise(x, x_points, y_points) >= y)
Parameters
----------
model : Model
- The linopy model to add the constraint to.
- expr : Variable, LinearExpression, or dict of these
- The variable(s) or expression(s) to be linked by the piecewise constraint.
- - If a single Variable/LinearExpression is passed, the breakpoints
- directly specify the piecewise points for that expression.
- - If a dict is passed, the keys must match coordinates of a dimension
- of the breakpoints, allowing multiple expressions to be linked.
- breakpoints : xr.DataArray
- The breakpoint values defining the piecewise linear function.
- Must have `dim` as one of its dimensions. If `expr` is a dict,
- must also have a dimension with coordinates matching the dict keys.
- dim : str, default "breakpoint"
- The dimension in breakpoints that represents the breakpoint index.
- This dimension's coordinates must be numeric (used as SOS2 weights
- for the SOS2 method).
- mask : xr.DataArray, optional
- Boolean mask indicating which piecewise constraints are valid.
- If None, auto-detected from NaN values in breakpoints (unless
- skip_nan_check is True).
+ The linopy model.
+ descriptor : PiecewiseConstraintDescriptor
+ Created by comparing a variable/expression with a :class:`PiecewiseExpression`.
+ method : {"auto", "sos2", "incremental", "lp"}, default "auto"
+ Formulation method.
name : str, optional
- Base name for the generated variables and constraints.
- If None, auto-generates names like "pwl0", "pwl1", etc.
+ Base name for generated variables/constraints.
skip_nan_check : bool, default False
- If True, skip automatic NaN detection in breakpoints. Use this
- when you know breakpoints contain no NaN values for better performance.
- method : Literal["sos2", "incremental", "auto"], default "sos2"
- Formulation method. One of:
- - ``"sos2"``: SOS2 formulation with lambda variables (default).
- - ``"incremental"``: Incremental (delta) formulation. Requires strictly
- monotonic breakpoints. Pure LP, no SOS2 or binary variables.
- - ``"auto"``: Automatically selects ``"incremental"`` if breakpoints are
- strictly monotonic, otherwise falls back to ``"sos2"``.
+ If True, skip NaN detection.
Returns
-------
Constraint
- For SOS2: the convexity constraint (sum of lambda = 1).
- For incremental: the filling-order constraint (or the link
- constraint if only 2 breakpoints).
-
- Raises
- ------
- ValueError
- If expr is not a Variable, LinearExpression, or dict of these.
- If breakpoints doesn't have the required dim dimension.
- If the linking dimension cannot be auto-detected when expr is a dict.
- If dim coordinates are not numeric (SOS2 method only).
- If breakpoints are not strictly monotonic (incremental method).
- If method is not one of 'sos2', 'incremental', 'auto'.
-
- Examples
- --------
- Single variable piecewise constraint:
-
- >>> from linopy import Model
- >>> import xarray as xr
- >>> m = Model()
- >>> x = m.add_variables(name="x")
- >>> breakpoints = xr.DataArray([0, 10, 50, 100], dims=["bp"])
- >>> _ = m.add_piecewise_constraints(x, breakpoints, dim="bp")
-
- Notes
- -----
- **SOS2 formulation:**
-
- 1. Lambda variables λ_i with bounds [0, 1] are created for each breakpoint
- 2. SOS2 constraint ensures at most two adjacent λ_i can be non-zero
- 3. Convexity constraint: Σ λ_i = 1
- 4. Linking constraints: expr = Σ λ_i × breakpoint_i (for each expression)
-
- **Incremental formulation** (for strictly monotonic breakpoints bp₀ < bp₁ < ... < bpₙ):
-
- 1. Delta variables δᵢ ∈ [0, 1] for i = 1, ..., n (one per segment)
- 2. Filling-order constraints: δᵢ₊₁ ≤ δᵢ for i = 1, ..., n-1
- 3. Linking constraint: expr = bp₀ + Σᵢ δᵢ × (bpᵢ - bpᵢ₋₁)
"""
- if method not in ("sos2", "incremental", "auto"):
+ if not isinstance(descriptor, PiecewiseConstraintDescriptor):
+ raise TypeError(
+ f"Expected PiecewiseConstraintDescriptor, got {type(descriptor)}. "
+ f"Use: m.add_piecewise_constraints(piecewise(x, x_points, y_points) >= y)"
+ )
+
+ if method not in ("sos2", "incremental", "auto", "lp"):
raise ValueError(
- f"method must be 'sos2', 'incremental', or 'auto', got '{method}'"
+ f"method must be 'sos2', 'incremental', 'auto', or 'lp', got '{method}'"
)
- _validate_breakpoints(breakpoints, dim)
- breakpoints = _auto_broadcast_breakpoints(breakpoints, expr, dim)
+ pw = descriptor.piecewise_func
+ sign = descriptor.sign
+ y_lhs = descriptor.lhs
+ x_expr_raw = pw.expr
+ x_points = pw.x_points
+ y_points = pw.y_points
+ disjunctive = pw.disjunctive
+ active = pw.active
- if method in ("incremental", "auto"):
- is_monotonic = _check_strict_monotonicity(breakpoints, dim)
- trailing_nan_only = _has_trailing_nan_only(breakpoints, dim)
- if method == "auto":
- if is_monotonic and trailing_nan_only:
- method = "incremental"
- else:
- method = "sos2"
- elif not is_monotonic:
- raise ValueError(
- "Incremental method requires strictly monotonic breakpoints "
- "along the breakpoint dimension."
- )
- if method == "incremental" and not trailing_nan_only:
- raise ValueError(
- "Incremental method does not support non-trailing NaN breakpoints. "
- "NaN values must only appear at the end of the breakpoint sequence. "
- "Use method='sos2' for breakpoints with gaps."
- )
+ # Broadcast points to match expression dimensions
+ x_points = _broadcast_points(x_points, x_expr_raw, y_lhs, disjunctive=disjunctive)
+ y_points = _broadcast_points(y_points, x_expr_raw, y_lhs, disjunctive=disjunctive)
- if method == "sos2":
- _validate_numeric_breakpoint_coords(breakpoints, dim)
+ # Compute mask
+ mask = _compute_combined_mask(x_points, y_points, skip_nan_check)
+ # Name
if name is None:
name = f"pwl{model._pwlCounter}"
model._pwlCounter += 1
- target_expr, resolved_link_dim, computed_mask, lambda_mask = _resolve_expr(
- model, expr, breakpoints, dim, mask, skip_nan_check
- )
+ # Convert to LinearExpressions
+ x_expr = _to_linexpr(x_expr_raw)
+ y_expr = _to_linexpr(y_lhs)
- extra_coords = _extra_coords(breakpoints, dim, resolved_link_dim)
- lambda_coords = extra_coords + [pd.Index(breakpoints.coords[dim].values, name=dim)]
+ # Convert active to LinearExpression if provided
+ active_expr = _to_linexpr(active) if active is not None else None
- if method == "sos2":
- return _add_pwl_sos2(
- model, name, breakpoints, dim, target_expr, lambda_coords, lambda_mask
+ # Validate: active is not supported with LP method
+ if active_expr is not None and method == "lp":
+ raise ValueError(
+ "The 'active' parameter is not supported with method='lp'. "
+ "Use method='incremental' or method='sos2'."
+ )
+
+ if disjunctive:
+ return _add_disjunctive(
+ model,
+ name,
+ x_expr,
+ y_expr,
+ sign,
+ x_points,
+ y_points,
+ mask,
+ method,
+ active_expr,
)
else:
- return _add_pwl_incremental(
+ return _add_continuous(
model,
name,
- breakpoints,
- dim,
- target_expr,
- extra_coords,
- computed_mask,
- resolved_link_dim,
+ x_expr,
+ y_expr,
+ sign,
+ x_points,
+ y_points,
+ mask,
+ method,
+ skip_nan_check,
+ active_expr,
)
-def add_disjunctive_piecewise_constraints(
+def _add_continuous(
model: Model,
- expr: LinExprLike | dict[str, LinExprLike],
- breakpoints: DataArray,
- dim: str = DEFAULT_BREAKPOINT_DIM,
- segment_dim: str = DEFAULT_SEGMENT_DIM,
- mask: DataArray | None = None,
- name: str | None = None,
- skip_nan_check: bool = False,
+ name: str,
+ x_expr: LinearExpression,
+ y_expr: LinearExpression,
+ sign: str,
+ x_points: DataArray,
+ y_points: DataArray,
+ mask: DataArray | None,
+ method: str,
+ skip_nan_check: bool,
+ active: LinearExpression | None = None,
) -> Constraint:
- """
- Add a disjunctive piecewise linear constraint for disconnected segments.
+ """Handle continuous (non-disjunctive) piecewise constraints."""
+ convexity: Literal["convex", "concave", "linear", "mixed"] | None = None
+
+ # Determine actual method
+ if method == "auto":
+ if sign == "==":
+ if _check_strict_monotonicity(x_points) and _has_trailing_nan_only(
+ x_points
+ ):
+ method = "incremental"
+ else:
+ method = "sos2"
+ else:
+ if not _check_strict_increasing(x_points):
+ raise ValueError(
+ "Automatic method selection for piecewise inequalities requires "
+ "strictly increasing x_points. Pass breakpoints in increasing "
+ "x-order or use method='sos2'."
+ )
+ convexity = _detect_convexity(x_points, y_points)
+ if convexity == "linear":
+ method = "lp"
+ elif (sign == "<=" and convexity == "concave") or (
+ sign == ">=" and convexity == "convex"
+ ):
+ method = "lp"
+ else:
+ method = "sos2"
+ elif method == "lp":
+ if sign == "==":
+ raise ValueError("Pure LP method is not supported for equality constraints")
+ convexity = _detect_convexity(x_points, y_points)
+ if convexity != "linear":
+ if sign == "<=" and convexity != "concave":
+ raise ValueError(
+ f"Pure LP method for '<=' requires concave or linear function, "
+ f"got {convexity}"
+ )
+ if sign == ">=" and convexity != "convex":
+ raise ValueError(
+ f"Pure LP method for '>=' requires convex or linear function, "
+ f"got {convexity}"
+ )
+ elif method == "incremental":
+ if not _check_strict_monotonicity(x_points):
+ raise ValueError("Incremental method requires strictly monotonic x_points")
+ if not _has_trailing_nan_only(x_points):
+ raise ValueError(
+ "Incremental method does not support non-trailing NaN breakpoints. "
+ "NaN values must only appear at the end of the breakpoint sequence."
+ )
- Unlike ``add_piecewise_constraints``, which models continuous piecewise
- linear functions (all segments connected end-to-end), this method handles
- **disconnected segments** (with gaps between them). The variable must lie
- on exactly one segment, selected by binary indicator variables.
+ if method == "sos2":
+ _validate_numeric_breakpoint_coords(x_points)
+ if not _has_trailing_nan_only(x_points):
+ raise ValueError(
+ "SOS2 method does not support non-trailing NaN breakpoints. "
+ "NaN values must only appear at the end of the breakpoint sequence."
+ )
- Uses the disaggregated convex combination formulation (no big-M needed,
- tight LP relaxation):
+ # LP formulation
+ if method == "lp":
+ if active is not None:
+ raise ValueError(
+ "The 'active' parameter is not supported with method='lp'. "
+ "Use method='incremental' or method='sos2'."
+ )
+ return _add_pwl_lp(model, name, x_expr, y_expr, sign, x_points, y_points)
+
+ # SOS2 or incremental formulation
+ if sign == "==":
+ # Direct linking: y = f(x)
+ if method == "sos2":
+ return _add_pwl_sos2_core(
+ model, name, x_expr, y_expr, x_points, y_points, mask, active
+ )
+ else: # incremental
+ return _add_pwl_incremental_core(
+ model, name, x_expr, y_expr, x_points, y_points, mask, active
+ )
+ else:
+ # Inequality: create aux variable z, enforce z = f(x), then y <= z or y >= z
+ aux_name = f"{name}{PWL_AUX_SUFFIX}"
+ aux_coords = _extra_coords(x_points, BREAKPOINT_DIM)
+ z = model.add_variables(coords=aux_coords, name=aux_name)
+ z_expr = _to_linexpr(z)
+
+ if method == "sos2":
+ result = _add_pwl_sos2_core(
+ model, name, x_expr, z_expr, x_points, y_points, mask, active
+ )
+ else: # incremental
+ result = _add_pwl_incremental_core(
+ model, name, x_expr, z_expr, x_points, y_points, mask, active
+ )
- 1. Binary ``y_k ∈ {0,1}`` per segment, ``Σ y_k = 1``
- 2. Lambda ``λ_{k,i} ∈ [0,1]`` per breakpoint in each segment
- 3. Convexity: ``Σ_i λ_{k,i} = y_k``
- 4. SOS2 within each segment (along breakpoint dim)
- 5. Linking: ``expr = Σ_k Σ_i λ_{k,i} × bp_{k,i}``
+ # Add inequality
+ ineq_name = f"{name}_ineq"
+ if sign == "<=":
+ model.add_constraints(y_expr <= z_expr, name=ineq_name)
+ else:
+ model.add_constraints(y_expr >= z_expr, name=ineq_name)
- Parameters
- ----------
- model : Model
- The linopy model to add the constraint to.
- expr : Variable, LinearExpression, or dict of these
- The variable(s) or expression(s) to be linked by the piecewise
- constraint.
- breakpoints : xr.DataArray
- Breakpoint values with at least ``dim`` and ``segment_dim``
- dimensions. Each slice along ``segment_dim`` defines one segment.
- Use NaN to pad segments with fewer breakpoints.
- dim : str, default "breakpoint"
- Dimension for breakpoint indices within each segment.
- Must have numeric coordinates.
- segment_dim : str, default "segment"
- Dimension indexing the segments.
- mask : xr.DataArray, optional
- Boolean mask. If None, auto-detected from NaN values.
- name : str, optional
- Base name for generated variables/constraints. Auto-generated
- if None using the shared ``_pwlCounter``.
- skip_nan_check : bool, default False
- If True, skip NaN detection in breakpoints.
+ return result
- Returns
- -------
- Constraint
- The selection constraint (``Σ y_k = 1``).
- Raises
- ------
- ValueError
- If ``dim`` or ``segment_dim`` not in breakpoints dimensions.
- If ``dim == segment_dim``.
- If ``dim`` coordinates are not numeric.
- If ``expr`` is not a Variable, LinearExpression, or dict.
-
- Examples
- --------
- Two disconnected segments [0,10] and [50,100]:
-
- >>> from linopy import Model
- >>> import xarray as xr
- >>> m = Model()
- >>> x = m.add_variables(name="x")
- >>> breakpoints = xr.DataArray(
- ... [[0, 10], [50, 100]],
- ... dims=["segment", "breakpoint"],
- ... coords={"segment": [0, 1], "breakpoint": [0, 1]},
- ... )
- >>> _ = m.add_disjunctive_piecewise_constraints(x, breakpoints)
- """
- _validate_breakpoints(breakpoints, dim)
- if segment_dim not in breakpoints.dims:
+def _add_disjunctive(
+ model: Model,
+ name: str,
+ x_expr: LinearExpression,
+ y_expr: LinearExpression,
+ sign: str,
+ x_points: DataArray,
+ y_points: DataArray,
+ mask: DataArray | None,
+ method: str,
+ active: LinearExpression | None = None,
+) -> Constraint:
+ """Handle disjunctive piecewise constraints."""
+ if method == "lp":
+ raise ValueError("Pure LP method is not supported for disjunctive constraints")
+ if method == "incremental":
raise ValueError(
- f"breakpoints must have dimension '{segment_dim}', "
- f"but only has dimensions {list(breakpoints.dims)}"
+ "Incremental method is not supported for disjunctive constraints"
)
- if dim == segment_dim:
- raise ValueError(f"dim and segment_dim must be different, both are '{dim}'")
- _validate_numeric_breakpoint_coords(breakpoints, dim)
- breakpoints = _auto_broadcast_breakpoints(
- breakpoints, expr, dim, exclude_dims={segment_dim}
- )
- if name is None:
- name = f"pwl{model._pwlCounter}"
- model._pwlCounter += 1
+ _validate_numeric_breakpoint_coords(x_points)
+ if not _has_trailing_nan_only(x_points):
+ raise ValueError(
+ "Disjunctive SOS2 does not support non-trailing NaN breakpoints. "
+ "NaN values must only appear at the end of the breakpoint sequence."
+ )
- target_expr, resolved_link_dim, computed_mask, lambda_mask = _resolve_expr(
- model,
- expr,
- breakpoints,
- dim,
- mask,
- skip_nan_check,
- exclude_dims={segment_dim},
- )
+ if sign == "==":
+ return _add_dpwl_sos2_core(
+ model, name, x_expr, y_expr, x_points, y_points, mask, active
+ )
+ else:
+ # Create aux variable z, disjunctive SOS2 for z = f(x), then y <= z or y >= z
+ aux_name = f"{name}{PWL_AUX_SUFFIX}"
+ aux_coords = _extra_coords(x_points, BREAKPOINT_DIM, SEGMENT_DIM)
+ z = model.add_variables(coords=aux_coords, name=aux_name)
+ z_expr = _to_linexpr(z)
+
+ result = _add_dpwl_sos2_core(
+ model, name, x_expr, z_expr, x_points, y_points, mask, active
+ )
- extra_coords = _extra_coords(breakpoints, dim, segment_dim, resolved_link_dim)
- lambda_coords = extra_coords + [
- pd.Index(breakpoints.coords[segment_dim].values, name=segment_dim),
- pd.Index(breakpoints.coords[dim].values, name=dim),
- ]
- binary_coords = extra_coords + [
- pd.Index(breakpoints.coords[segment_dim].values, name=segment_dim),
- ]
+ ineq_name = f"{name}_ineq"
+ if sign == "<=":
+ model.add_constraints(y_expr <= z_expr, name=ineq_name)
+ else:
+ model.add_constraints(y_expr >= z_expr, name=ineq_name)
- binary_mask = lambda_mask.any(dim=dim) if lambda_mask is not None else None
-
- return _add_dpwl_sos2(
- model,
- name,
- breakpoints,
- dim,
- segment_dim,
- target_expr,
- lambda_coords,
- lambda_mask,
- binary_coords,
- binary_mask,
- )
+ return result
diff --git a/linopy/solver_capabilities.py b/linopy/solver_capabilities.py
index f0507317..46af60ba 100644
--- a/linopy/solver_capabilities.py
+++ b/linopy/solver_capabilities.py
@@ -7,7 +7,6 @@
from __future__ import annotations
-import platform
from dataclasses import dataclass
from enum import Enum, auto
from importlib.metadata import PackageNotFoundError
@@ -50,6 +49,9 @@ class SolverFeature(Enum):
# Special constraint types
SOS_CONSTRAINTS = auto() # Special Ordered Sets (SOS1/SOS2) constraints
+ # Special variable types
+ SEMI_CONTINUOUS_VARIABLES = auto() # Semi-continuous variable support
+
# Solver-specific
SOLVER_ATTRIBUTE_ACCESS = auto() # Direct access to solver variable attributes
@@ -86,6 +88,7 @@ def supports(self, feature: SolverFeature) -> bool:
SolverFeature.SOLUTION_FILE_NOT_NEEDED,
SolverFeature.IIS_COMPUTATION,
SolverFeature.SOS_CONSTRAINTS,
+ SolverFeature.SEMI_CONTINUOUS_VARIABLES,
SolverFeature.SOLVER_ATTRIBUTE_ACCESS,
}
),
@@ -101,6 +104,7 @@ def supports(self, feature: SolverFeature) -> bool:
SolverFeature.LP_FILE_NAMES,
SolverFeature.READ_MODEL_FROM_FILE,
SolverFeature.SOLUTION_FILE_NOT_NEEDED,
+ SolverFeature.SEMI_CONTINUOUS_VARIABLES,
}
),
),
@@ -134,6 +138,7 @@ def supports(self, feature: SolverFeature) -> bool:
SolverFeature.LP_FILE_NAMES,
SolverFeature.READ_MODEL_FROM_FILE,
SolverFeature.SOS_CONSTRAINTS,
+ SolverFeature.SEMI_CONTINUOUS_VARIABLES,
}
),
),
@@ -144,6 +149,7 @@ def supports(self, feature: SolverFeature) -> bool:
{
SolverFeature.INTEGER_VARIABLES,
SolverFeature.QUADRATIC_OBJECTIVE,
+ SolverFeature.DIRECT_API,
SolverFeature.LP_FILE_NAMES,
SolverFeature.READ_MODEL_FROM_FILE,
SolverFeature.SOLUTION_FILE_NOT_NEEDED,
@@ -154,6 +160,7 @@ def supports(self, feature: SolverFeature) -> bool:
else {
SolverFeature.INTEGER_VARIABLES,
SolverFeature.QUADRATIC_OBJECTIVE,
+ SolverFeature.DIRECT_API,
SolverFeature.LP_FILE_NAMES,
SolverFeature.READ_MODEL_FROM_FILE,
SolverFeature.SOLUTION_FILE_NOT_NEEDED,
@@ -179,21 +186,12 @@ def supports(self, feature: SolverFeature) -> bool:
display_name="SCIP",
features=frozenset(
{
- SolverFeature.INTEGER_VARIABLES,
- SolverFeature.LP_FILE_NAMES,
- SolverFeature.READ_MODEL_FROM_FILE,
- SolverFeature.SOLUTION_FILE_NOT_NEEDED,
- }
- if platform.system() == "Windows"
- else {
SolverFeature.INTEGER_VARIABLES,
SolverFeature.QUADRATIC_OBJECTIVE,
SolverFeature.LP_FILE_NAMES,
SolverFeature.READ_MODEL_FROM_FILE,
SolverFeature.SOLUTION_FILE_NOT_NEEDED,
}
- # SCIP has a bug with quadratic models on Windows, see:
- # https://github.com/PyPSA/linopy/actions/runs/7615240686/job/20739454099?pr=78
),
),
"mosek": SolverInfo(
diff --git a/linopy/solvers.py b/linopy/solvers.py
index 16c07932..2181beb6 100644
--- a/linopy/solvers.py
+++ b/linopy/solvers.py
@@ -14,6 +14,7 @@
import subprocess as sub
import sys
import threading
+import time
import warnings
from abc import ABC, abstractmethod
from collections import namedtuple
@@ -1405,14 +1406,16 @@ def get_solver_solution() -> Solution:
m.solution.get_values(), m.variables.get_names(), dtype=float
)
- if is_lp:
+ try:
dual = pd.Series(
m.solution.get_dual_values(),
m.linear_constraints.get_names(),
dtype=float,
)
- else:
- logger.warning("Dual values of MILP couldn't be parsed")
+ except Exception:
+ logger.warning(
+ "Dual values not available (e.g. barrier solution without crossover)"
+ )
dual = pd.Series(dtype=float)
return Solution(solution, dual, objective)
@@ -1604,8 +1607,39 @@ def solve_problem_from_model(
env: None = None,
explicit_coordinate_names: bool = False,
) -> Result:
- msg = "Direct API not implemented for Xpress"
- raise NotImplementedError(msg)
+ variable_names = (
+ np.asarray(model.matrices.vlabels)
+ if not explicit_coordinate_names
+ else None
+ )
+ constraint_names = (
+ np.asarray(model.matrices.clabels)
+ if not explicit_coordinate_names
+ else None
+ )
+
+ build_start = time.perf_counter()
+ logger.info(" Start building Xpress direct model")
+ m = model.to_xpress(
+ explicit_coordinate_names=explicit_coordinate_names,
+ progress=None,
+ )
+ logger.info(
+ " Finished building Xpress direct model in %.3fs",
+ time.perf_counter() - build_start,
+ )
+
+ return self._solve(
+ m=m,
+ solution_fn=solution_fn,
+ log_fn=log_fn,
+ warmstart_fn=warmstart_fn,
+ basis_fn=basis_fn,
+ io_api="direct",
+ sense=model.sense,
+ variable_names=variable_names,
+ constraint_names=constraint_names,
+ )
def solve_problem_from_file(
self,
@@ -1643,6 +1677,45 @@ def solve_problem_from_file(
-------
Result
"""
+ m = xpress.problem()
+
+ try: # Try new API first
+ m.readProb(path_to_string(problem_fn))
+ except AttributeError: # Fallback to old API
+ m.read(path_to_string(problem_fn))
+
+ return self._solve(
+ m=m,
+ solution_fn=solution_fn,
+ log_fn=log_fn,
+ warmstart_fn=warmstart_fn,
+ basis_fn=basis_fn,
+ io_api=read_io_api_from_problem_file(problem_fn),
+ sense=read_sense_from_problem_file(problem_fn),
+ )
+
+ def _solve(
+ self,
+ m: Any,
+ solution_fn: Path | None,
+ log_fn: Path | None,
+ warmstart_fn: Path | None,
+ basis_fn: Path | None,
+ io_api: str | None,
+ sense: str | None,
+ variable_names: np.ndarray | None = None,
+ constraint_names: np.ndarray | None = None,
+ ) -> Result:
+ def _get_names(
+ namespace: int, count: int, cached: np.ndarray | None = None
+ ) -> np.ndarray | list[str]:
+ if cached is not None:
+ return cached
+ try: # Try new API first
+ return m.getNameList(namespace, 0, count - 1)
+ except AttributeError: # Fallback to old API
+ return m.getnamelist(namespace, 0, count - 1)
+
CONDITION_MAP = {
xpress.SolStatus.NOTFOUND: "unknown",
xpress.SolStatus.OPTIMAL: "optimal",
@@ -1651,16 +1724,6 @@ def solve_problem_from_file(
xpress.SolStatus.UNBOUNDED: "unbounded",
}
- io_api = read_io_api_from_problem_file(problem_fn)
- sense = read_sense_from_problem_file(problem_fn)
-
- m = xpress.problem()
-
- try: # Try new API first
- m.readProb(path_to_string(problem_fn))
- except AttributeError: # Fallback to old API
- m.read(path_to_string(problem_fn))
-
# Set solver options - new API uses setControl per option, old API accepts dict
if self.solver_options is not None:
m.setControl(self.solver_options)
@@ -1677,7 +1740,12 @@ def solve_problem_from_file(
except AttributeError: # Fallback to old API
m.readbasis(path_to_string(warmstart_fn))
+ optimize_start = time.perf_counter()
+ logger.info(" Start Xpress optimize()")
m.optimize()
+ logger.info(
+ " Finished Xpress optimize() in %.3fs", time.perf_counter() - optimize_start
+ )
# if the solver is stopped (timelimit for example), postsolve the problem
if m.attributes.solvestatus == xpress.enums.SolveStatus.STOPPED:
@@ -1712,10 +1780,9 @@ def solve_problem_from_file(
def get_solver_solution() -> Solution:
objective = m.attributes.objval
- try: # Try new API first
- var = m.getNameList(xpress_Namespaces.COLUMN, 0, m.attributes.cols - 1)
- except AttributeError: # Fallback to old API
- var = m.getnamelist(xpress_Namespaces.COLUMN, 0, m.attributes.cols - 1)
+ var = _get_names(
+ xpress_Namespaces.COLUMN, m.attributes.cols, cached=variable_names
+ )
sol = pd.Series(m.getSolution(), index=var, dtype=float)
try:
@@ -1724,14 +1791,11 @@ def get_solver_solution() -> Solution:
except AttributeError: # Fallback to old API
_dual = m.getDual()
- try: # Try new API first
- constraints = m.getNameList(
- xpress_Namespaces.ROW, 0, m.attributes.rows - 1
- )
- except AttributeError: # Fallback to old API
- constraints = m.getnamelist(
- xpress_Namespaces.ROW, 0, m.attributes.rows - 1
- )
+ constraints = _get_names(
+ xpress_Namespaces.ROW,
+ m.attributes.rows,
+ cached=constraint_names,
+ )
dual = pd.Series(_dual, index=constraints, dtype=float)
except (xpress.SolverError, xpress.ModelError, SystemError):
logger.warning("Dual values of MILP couldn't be parsed")
@@ -1745,7 +1809,7 @@ def get_solver_solution() -> Solution:
return Result(status, solution, m)
-KnitroResult = namedtuple("KnitroResult", "reported_runtime")
+KnitroResult = namedtuple("KnitroResult", "knitro_context reported_runtime")
class Knitro(Solver[None]):
@@ -1808,7 +1872,13 @@ def _extract_values(
if n == 0:
return pd.Series(dtype=float)
- values = get_values_fn(kc, n - 1)
+ try:
+ # Compatible with KNITRO >= 15
+ values = get_values_fn(kc)
+ except TypeError:
+ # Fallback for older wrappers requiring explicit indices
+ values = get_values_fn(kc, list(range(n)))
+
names = list(get_names_fn(kc))
return pd.Series(values, index=names, dtype=float)
@@ -1931,12 +2001,14 @@ def get_solver_solution() -> Solution:
knitro.KN_write_mps_file(kc, path_to_string(solution_fn))
return Result(
- status, solution, KnitroResult(reported_runtime=reported_runtime)
+ status,
+ solution,
+ KnitroResult(knitro_context=kc, reported_runtime=reported_runtime),
)
finally:
- with contextlib.suppress(Exception):
- knitro.KN_free(kc)
+ # Intentionally keep the Knitro context alive; do not free `kc` here.
+ pass
mosek_bas_re = re.compile(r" (XL|XU)\s+([^ \t]+)\s+([^ \t]+)| (LL|UL|BS)\s+([^ \t]+)")
diff --git a/linopy/types.py b/linopy/types.py
index 0e3662bf..7238c552 100644
--- a/linopy/types.py
+++ b/linopy/types.py
@@ -17,6 +17,7 @@
QuadraticExpression,
ScalarLinearExpression,
)
+ from linopy.piecewise import PiecewiseConstraintDescriptor
from linopy.variables import ScalarVariable, Variable
# Type aliases using Union for Python 3.9 compatibility
@@ -46,7 +47,9 @@
"LinearExpression",
"QuadraticExpression",
]
-ConstraintLike = Union["Constraint", "AnonymousScalarConstraint"]
+ConstraintLike = Union[
+ "Constraint", "AnonymousScalarConstraint", "PiecewiseConstraintDescriptor"
+]
LinExprLike = Union["Variable", "LinearExpression"]
MaskLike = Union[numpy.ndarray, DataArray, Series, DataFrame] # noqa: UP007
SideLike = Union[ConstantLike, VariableLike, ExpressionLike] # noqa: UP007
diff --git a/linopy/variables.py b/linopy/variables.py
index beaeb4e6..4332a037 100644
--- a/linopy/variables.py
+++ b/linopy/variables.py
@@ -73,6 +73,7 @@
ScalarLinearExpression,
)
from linopy.model import Model
+ from linopy.piecewise import PiecewiseConstraintDescriptor, PiecewiseExpression
logger = logging.getLogger(__name__)
@@ -291,9 +292,15 @@ def at(self) -> AtIndexer:
@property
def loc(self) -> LocIndexer:
+ """
+ Indexing the variable using coordinates.
+ """
return LocIndexer(self)
def to_pandas(self) -> pd.Series:
+ """
+ Convert the variable labels to a pandas Series.
+ """
return self.labels.to_pandas()
def to_linexpr(
@@ -315,6 +322,8 @@ def to_linexpr(
Linear expression with the variables and coefficients.
"""
coefficient = as_dataarray(coefficient, coords=self.coords, dims=self.dims)
+ coefficient = coefficient.reindex_like(self.labels, fill_value=0)
+ coefficient = coefficient.fillna(0)
ds = Dataset({"coeffs": coefficient, "vars": self.labels}).expand_dims(
TERM_DIM, -1
)
@@ -443,7 +452,7 @@ def __matmul__(
return self.to_linexpr() @ other
def __div__(
- self, other: float | int | LinearExpression | Variable
+ self, other: ConstantLike | LinearExpression | Variable
) -> LinearExpression:
"""
Divide variables with a coefficient.
@@ -454,10 +463,10 @@ def __div__(
f"{type(self)} and {type(other)}. "
"Non-linear expressions are not yet supported."
)
- return self.to_linexpr(1 / other)
+ return self.to_linexpr()._divide_by_constant(other)
def __truediv__(
- self, coefficient: float | int | LinearExpression | Variable
+ self, coefficient: ConstantLike | LinearExpression | Variable
) -> LinearExpression:
"""
True divide variables with a coefficient.
@@ -522,13 +531,31 @@ def __rsub__(self, other: ConstantLike) -> LinearExpression:
except TypeError:
return NotImplemented
- def __le__(self, other: SideLike) -> Constraint:
+ @overload
+ def __le__(self, other: PiecewiseExpression) -> PiecewiseConstraintDescriptor: ...
+
+ @overload
+ def __le__(self, other: SideLike) -> Constraint: ...
+
+ def __le__(self, other: SideLike) -> Constraint | PiecewiseConstraintDescriptor:
return self.to_linexpr().__le__(other)
- def __ge__(self, other: SideLike) -> Constraint:
+ @overload
+ def __ge__(self, other: PiecewiseExpression) -> PiecewiseConstraintDescriptor: ...
+
+ @overload
+ def __ge__(self, other: SideLike) -> Constraint: ...
+
+ def __ge__(self, other: SideLike) -> Constraint | PiecewiseConstraintDescriptor:
return self.to_linexpr().__ge__(other)
- def __eq__(self, other: SideLike) -> Constraint: # type: ignore
+ @overload # type: ignore[override]
+ def __eq__(self, other: PiecewiseExpression) -> PiecewiseConstraintDescriptor: ...
+
+ @overload
+ def __eq__(self, other: SideLike) -> Constraint: ...
+
+ def __eq__(self, other: SideLike) -> Constraint | PiecewiseConstraintDescriptor:
return self.to_linexpr().__eq__(other)
def __gt__(self, other: Any) -> NotImplementedType:
@@ -544,29 +571,118 @@ def __lt__(self, other: Any) -> NotImplementedType:
def __contains__(self, value: str) -> bool:
return self.data.__contains__(value)
- def add(self, other: Variable) -> LinearExpression:
+ def add(
+ self, other: SideLike, join: str | None = None
+ ) -> LinearExpression | QuadraticExpression:
"""
Add variables to linear expressions or other variables.
+
+ Parameters
+ ----------
+ other : expression-like
+ The expression to add.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
"""
- return self.__add__(other)
+ return self.to_linexpr().add(other, join=join)
- def sub(self, other: Variable) -> LinearExpression:
+ def sub(
+ self, other: SideLike, join: str | None = None
+ ) -> LinearExpression | QuadraticExpression:
"""
Subtract linear expressions or other variables from the variables.
+
+ Parameters
+ ----------
+ other : expression-like
+ The expression to subtract.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
"""
- return self.__sub__(other)
+ return self.to_linexpr().sub(other, join=join)
- def mul(self, other: int) -> LinearExpression:
+ def mul(
+ self, other: ConstantLike, join: str | None = None
+ ) -> LinearExpression | QuadraticExpression:
"""
Multiply variables with a coefficient.
+
+ Parameters
+ ----------
+ other : constant-like
+ The coefficient to multiply by.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
"""
- return self.__mul__(other)
+ return self.to_linexpr().mul(other, join=join)
- def div(self, other: int) -> LinearExpression:
+ def div(
+ self, other: ConstantLike, join: str | None = None
+ ) -> LinearExpression | QuadraticExpression:
"""
Divide variables with a coefficient.
+
+ Parameters
+ ----------
+ other : constant-like
+ The divisor.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_linexpr().div(other, join=join)
+
+ def le(self, rhs: SideLike, join: str | None = None) -> Constraint:
+ """
+ Less than or equal constraint.
+
+ Parameters
+ ----------
+ rhs : expression-like
+ Right-hand side of the constraint.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_linexpr().le(rhs, join=join)
+
+ def ge(self, rhs: SideLike, join: str | None = None) -> Constraint:
+ """
+ Greater than or equal constraint.
+
+ Parameters
+ ----------
+ rhs : expression-like
+ Right-hand side of the constraint.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_linexpr().ge(rhs, join=join)
+
+ def eq(self, rhs: SideLike, join: str | None = None) -> Constraint:
"""
- return self.__div__(other)
+ Equality constraint.
+
+ Parameters
+ ----------
+ rhs : expression-like
+ Right-hand side of the constraint.
+ join : str, optional
+ How to align coordinates. One of "outer", "inner", "left",
+ "right", "exact", "override". When None (default), uses the
+ current default behavior.
+ """
+ return self.to_linexpr().eq(rhs, join=join)
def pow(self, other: int) -> QuadraticExpression:
"""
@@ -734,10 +850,16 @@ def type(self) -> str:
@property
def coord_dims(self) -> tuple[Hashable, ...]:
+ """
+ Get the coordinate dimensions of the variable.
+ """
return tuple(k for k in self.dims if k not in HELPER_DIMS)
@property
def coord_sizes(self) -> dict[Hashable, int]:
+ """
+ Get the coordinate sizes of the variable.
+ """
return {k: v for k, v in self.sizes.items() if k not in HELPER_DIMS}
@property
@@ -1111,6 +1233,19 @@ def sanitize(self) -> Variable:
return self
def equals(self, other: Variable) -> bool:
+ """
+ Check if this Variable is equal to another.
+
+ Parameters
+ ----------
+ other : Variable
+ The Variable to compare with.
+
+ Returns
+ -------
+ bool
+ True if the variables have equal labels, False otherwise.
+ """
return self.labels.equals(other.labels)
# Wrapped function which would convert variable to dataarray
@@ -1251,6 +1386,8 @@ def __repr__(self) -> str:
sos_dim := ds.attrs.get(SOS_DIM_ATTR)
):
coords += f" - sos{sos_type} on {sos_dim}"
+ if ds.attrs.get("semi_continuous", False):
+ coords += " - semi-continuous"
r += f" * {name}{coords}\n"
if not len(list(self)):
r += "\n"
@@ -1390,7 +1527,23 @@ def continuous(self) -> Variables:
{
name: self.data[name]
for name in self
- if not self[name].attrs["integer"] and not self[name].attrs["binary"]
+ if not self[name].attrs["integer"]
+ and not self[name].attrs["binary"]
+ and not self[name].attrs.get("semi_continuous", False)
+ },
+ self.model,
+ )
+
+ @property
+ def semi_continuous(self) -> Variables:
+ """
+ Get all semi-continuous variables.
+ """
+ return self.__class__(
+ {
+ name: self.data[name]
+ for name in self
+ if self[name].attrs.get("semi_continuous", False)
},
self.model,
)
@@ -1655,7 +1808,7 @@ def __le__(self, other: int | float) -> AnonymousScalarConstraint:
def __ge__(self, other: int) -> AnonymousScalarConstraint:
return self.to_scalar_linexpr(1).__ge__(other)
- def __eq__(self, other: int | float) -> AnonymousScalarConstraint: # type: ignore
+ def __eq__(self, other: int | float) -> AnonymousScalarConstraint: # type: ignore[override]
return self.to_scalar_linexpr(1).__eq__(other)
def __gt__(self, other: Any) -> None:
diff --git a/pyproject.toml b/pyproject.toml
index aaac2cf1..14a53a22 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -159,6 +159,7 @@ ignore = [
'D101', # Missing docstring in public class
'D102', # Missing docstring in public method
'D103', # Missing docstring in public function
+ 'D106', # Missing docstring in public nested class
'D107', # Missing docstring in __init__
'D202', # No blank lines allowed after function docstring
'D203', # 1 blank line required before class docstring
diff --git a/test/conftest.py b/test/conftest.py
index 3197689b..ee20cdc2 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -1,9 +1,16 @@
"""Pytest configuration and fixtures."""
+from __future__ import annotations
+
import os
+from typing import TYPE_CHECKING
+import pandas as pd
import pytest
+if TYPE_CHECKING:
+ from linopy import Model, Variable
+
def pytest_addoption(parser: pytest.Parser) -> None:
"""Add custom command line options."""
@@ -48,3 +55,43 @@ def pytest_collection_modifyitems(
if solver_supports(solver, SolverFeature.GPU_ACCELERATION):
item.add_marker(skip_gpu)
item.add_marker(pytest.mark.gpu)
+
+
+@pytest.fixture
+def m() -> Model:
+ from linopy import Model
+
+ m = Model()
+ m.add_variables(pd.Series([0, 0]), 1, name="x")
+ m.add_variables(4, pd.Series([8, 10]), name="y")
+ m.add_variables(0, pd.DataFrame([[1, 2], [3, 4], [5, 6]]).T, name="z")
+ m.add_variables(coords=[pd.RangeIndex(20, name="dim_2")], name="v")
+ idx = pd.MultiIndex.from_product([[1, 2], ["a", "b"]], names=("level1", "level2"))
+ idx.name = "dim_3"
+ m.add_variables(coords=[idx], name="u")
+ return m
+
+
+@pytest.fixture
+def x(m: Model) -> Variable:
+ return m.variables["x"]
+
+
+@pytest.fixture
+def y(m: Model) -> Variable:
+ return m.variables["y"]
+
+
+@pytest.fixture
+def z(m: Model) -> Variable:
+ return m.variables["z"]
+
+
+@pytest.fixture
+def v(m: Model) -> Variable:
+ return m.variables["v"]
+
+
+@pytest.fixture
+def u(m: Model) -> Variable:
+ return m.variables["u"]
diff --git a/test/test_common.py b/test/test_common.py
index c3500155..f1190024 100644
--- a/test/test_common.py
+++ b/test/test_common.py
@@ -10,7 +10,6 @@
import polars as pl
import pytest
import xarray as xr
-from test_linear_expression import m, u, x # noqa: F401
from xarray import DataArray
from xarray.testing.assertions import assert_equal
@@ -96,17 +95,6 @@ def test_as_dataarray_with_series_dims_superset() -> None:
assert list(da.coords[target_dim].values) == target_index
-def test_as_dataarray_with_series_override_coords() -> None:
- target_dim = "dim_0"
- target_index = ["a", "b", "c"]
- s = pd.Series([1, 2, 3], index=target_index)
- with pytest.warns(UserWarning):
- da = as_dataarray(s, coords=[[1, 2, 3]])
- assert isinstance(da, DataArray)
- assert da.dims == (target_dim,)
- assert list(da.coords[target_dim].values) == target_index
-
-
def test_as_dataarray_with_series_aligned_coords() -> None:
"""This should not give out a warning even though coords are given."""
target_dim = "dim_0"
@@ -214,19 +202,6 @@ def test_as_dataarray_dataframe_dims_superset() -> None:
assert list(da.coords[target_dims[1]].values) == target_columns
-def test_as_dataarray_dataframe_override_coords() -> None:
- target_dims = ("dim_0", "dim_1")
- target_index = ["a", "b"]
- target_columns = ["A", "B"]
- df = pd.DataFrame([[1, 2], [3, 4]], index=target_index, columns=target_columns)
- with pytest.warns(UserWarning):
- da = as_dataarray(df, coords=[[1, 2], [2, 3]])
- assert isinstance(da, DataArray)
- assert da.dims == target_dims
- assert list(da.coords[target_dims[0]].values) == target_index
- assert list(da.coords[target_dims[1]].values) == target_columns
-
-
def test_as_dataarray_dataframe_aligned_coords() -> None:
"""This should not give out a warning even though coords are given."""
target_dims = ("dim_0", "dim_1")
@@ -370,8 +345,10 @@ def test_as_dataarray_with_ndarray_coords_dict_set_dims_not_aligned() -> None:
target_dims = ("dim_0", "dim_1")
target_coords = {"dim_0": ["a", "b"], "dim_2": ["A", "B"]}
arr = np.array([[1, 2], [3, 4]])
- with pytest.raises(ValueError):
- as_dataarray(arr, coords=target_coords, dims=target_dims)
+ da = as_dataarray(arr, coords=target_coords, dims=target_dims)
+ assert da.dims == target_dims
+ assert list(da.coords["dim_0"].values) == ["a", "b"]
+ assert "dim_2" not in da.coords
def test_as_dataarray_with_number() -> None:
diff --git a/test/test_compatible_arithmetrics.py b/test/test_compatible_arithmetrics.py
index 1d1618ba..edab1ae1 100644
--- a/test/test_compatible_arithmetrics.py
+++ b/test/test_compatible_arithmetrics.py
@@ -98,13 +98,13 @@ def test_arithmetric_operations_variable(m: Model) -> None:
assert_linequal(x + data, x + other_datatype)
assert_linequal(x - data, x - other_datatype)
assert_linequal(x * data, x * other_datatype)
- assert_linequal(x / data, x / other_datatype) # type: ignore
- assert_linequal(data * x, other_datatype * x) # type: ignore
+ assert_linequal(x / data, x / other_datatype)
+ assert_linequal(data * x, other_datatype * x) # type: ignore[arg-type]
assert x.__add__(object()) is NotImplemented
assert x.__sub__(object()) is NotImplemented
assert x.__mul__(object()) is NotImplemented
- assert x.__truediv__(object()) is NotImplemented # type: ignore
- assert x.__pow__(object()) is NotImplemented # type: ignore
+ assert x.__truediv__(object()) is NotImplemented
+ assert x.__pow__(object()) is NotImplemented # type: ignore[operator]
with pytest.raises(ValueError):
x.__pow__(3)
diff --git a/test/test_constraints.py b/test/test_constraints.py
index 01aebb69..9a467c8c 100644
--- a/test/test_constraints.py
+++ b/test/test_constraints.py
@@ -5,6 +5,8 @@
@author: fabulous
"""
+from typing import Any
+
import dask
import dask.array.core
import numpy as np
@@ -12,7 +14,7 @@
import pytest
import xarray as xr
-from linopy import EQUAL, GREATER_EQUAL, LESS_EQUAL, Model
+from linopy import EQUAL, GREATER_EQUAL, LESS_EQUAL, Model, Variable, available_solvers
from linopy.testing import assert_conequal
# Test model functions
@@ -139,6 +141,82 @@ def test_constraint_assignment_with_reindex() -> None:
assert (con.coords["dim_0"].values == shuffled_coords).all()
+@pytest.mark.parametrize(
+ "rhs_factory",
+ [
+ pytest.param(lambda m, v: v, id="numpy"),
+ pytest.param(lambda m, v: xr.DataArray(v, dims=["dim_0"]), id="dataarray"),
+ pytest.param(lambda m, v: pd.Series(v, index=v), id="series"),
+ pytest.param(
+ lambda m, v: m.add_variables(coords=[v]),
+ id="variable",
+ ),
+ pytest.param(
+ lambda m, v: 2 * m.add_variables(coords=[v]) + 1,
+ id="linexpr",
+ ),
+ ],
+)
+def test_constraint_rhs_lower_dim(rhs_factory: Any) -> None:
+ m = Model()
+ naxis = np.arange(10, dtype=float)
+ maxis = np.arange(10).astype(str)
+ x = m.add_variables(coords=[naxis, maxis])
+ y = m.add_variables(coords=[naxis, maxis])
+
+ c = m.add_constraints(x - y >= rhs_factory(m, naxis))
+ assert c.shape == (10, 10)
+
+
+@pytest.mark.parametrize(
+ "rhs_factory",
+ [
+ pytest.param(lambda m: np.ones((5, 3)), id="numpy"),
+ pytest.param(lambda m: pd.DataFrame(np.ones((5, 3))), id="dataframe"),
+ ],
+)
+def test_constraint_rhs_higher_dim_constant_warns(
+ rhs_factory: Any, caplog: Any
+) -> None:
+ m = Model()
+ x = m.add_variables(coords=[range(5)], name="x")
+
+ with caplog.at_level("WARNING", logger="linopy.expressions"):
+ m.add_constraints(x >= rhs_factory(m))
+ assert "dimensions" in caplog.text
+
+
+def test_constraint_rhs_higher_dim_dataarray_reindexes() -> None:
+ """DataArray RHS with extra dims reindexes to expression coords (no raise)."""
+ m = Model()
+ x = m.add_variables(coords=[range(5)], name="x")
+ rhs = xr.DataArray(np.ones((5, 3)), dims=["dim_0", "extra"])
+
+ c = m.add_constraints(x >= rhs)
+ assert c.shape == (5, 3)
+
+
+@pytest.mark.parametrize(
+ "rhs_factory",
+ [
+ pytest.param(
+ lambda m: m.add_variables(coords=[range(5), range(3)]),
+ id="variable",
+ ),
+ pytest.param(
+ lambda m: 2 * m.add_variables(coords=[range(5), range(3)]) + 1,
+ id="linexpr",
+ ),
+ ],
+)
+def test_constraint_rhs_higher_dim_expression(rhs_factory: Any) -> None:
+ m = Model()
+ x = m.add_variables(coords=[range(5)], name="x")
+
+ c = m.add_constraints(x >= rhs_factory(m))
+ assert c.shape == (5, 3)
+
+
def test_wrong_constraint_assignment_repeated() -> None:
# repeated variable assignment is forbidden
m: Model = Model()
@@ -266,3 +344,105 @@ def test_sanitize_infinities() -> None:
m.add_constraints(x >= np.inf, name="con_wrong_inf")
with pytest.raises(ValueError):
m.add_constraints(y <= -np.inf, name="con_wrong_neg_inf")
+
+
+class TestConstraintCoordinateAlignment:
+ @pytest.fixture(params=["xarray", "pandas_series"], ids=["da", "series"])
+ def subset(self, request: Any) -> xr.DataArray | pd.Series:
+ if request.param == "xarray":
+ return xr.DataArray([10.0, 30.0], dims=["dim_2"], coords={"dim_2": [1, 3]})
+ return pd.Series([10.0, 30.0], index=pd.Index([1, 3], name="dim_2"))
+
+ @pytest.fixture(params=["xarray", "pandas_series"], ids=["da", "series"])
+ def superset(self, request: Any) -> xr.DataArray | pd.Series:
+ if request.param == "xarray":
+ return xr.DataArray(
+ np.arange(25, dtype=float),
+ dims=["dim_2"],
+ coords={"dim_2": range(25)},
+ )
+ return pd.Series(
+ np.arange(25, dtype=float), index=pd.Index(range(25), name="dim_2")
+ )
+
+ def test_var_le_subset(self, v: Variable, subset: xr.DataArray) -> None:
+ con = v <= subset
+ assert con.sizes["dim_2"] == v.sizes["dim_2"]
+ assert con.rhs.sel(dim_2=1).item() == 10.0
+ assert con.rhs.sel(dim_2=3).item() == 30.0
+ assert np.isnan(con.rhs.sel(dim_2=0).item())
+
+ @pytest.mark.parametrize("sign", [LESS_EQUAL, GREATER_EQUAL, EQUAL])
+ def test_var_comparison_subset(
+ self, v: Variable, subset: xr.DataArray, sign: str
+ ) -> None:
+ if sign == LESS_EQUAL:
+ con = v <= subset
+ elif sign == GREATER_EQUAL:
+ con = v >= subset
+ else:
+ con = v == subset
+ assert con.sizes["dim_2"] == v.sizes["dim_2"]
+ assert con.rhs.sel(dim_2=1).item() == 10.0
+ assert np.isnan(con.rhs.sel(dim_2=0).item())
+
+ def test_expr_le_subset(self, v: Variable, subset: xr.DataArray) -> None:
+ expr = v + 5
+ con = expr <= subset
+ assert con.sizes["dim_2"] == v.sizes["dim_2"]
+ assert con.rhs.sel(dim_2=1).item() == pytest.approx(5.0)
+ assert con.rhs.sel(dim_2=3).item() == pytest.approx(25.0)
+ assert np.isnan(con.rhs.sel(dim_2=0).item())
+
+ @pytest.mark.parametrize("sign", [LESS_EQUAL, GREATER_EQUAL, EQUAL])
+ def test_subset_comparison_var(
+ self, v: Variable, subset: xr.DataArray, sign: str
+ ) -> None:
+ if sign == LESS_EQUAL:
+ con = subset <= v
+ elif sign == GREATER_EQUAL:
+ con = subset >= v
+ else:
+ con = subset == v
+ assert con.sizes["dim_2"] == v.sizes["dim_2"]
+ assert np.isnan(con.rhs.sel(dim_2=0).item())
+ assert con.rhs.sel(dim_2=1).item() == pytest.approx(10.0)
+
+ @pytest.mark.parametrize("sign", [LESS_EQUAL, GREATER_EQUAL])
+ def test_superset_comparison_var(
+ self, v: Variable, superset: xr.DataArray, sign: str
+ ) -> None:
+ if sign == LESS_EQUAL:
+ con = superset <= v
+ else:
+ con = superset >= v
+ assert con.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(con.lhs.coeffs.values).any()
+ assert not np.isnan(con.rhs.values).any()
+
+ def test_constraint_rhs_extra_dims_broadcasts(self, v: Variable) -> None:
+ rhs = xr.DataArray(
+ [[1.0, 2.0]],
+ dims=["extra", "dim_2"],
+ coords={"dim_2": [0, 1]},
+ )
+ c = v <= rhs
+ assert "extra" in c.dims
+
+ def test_subset_constraint_solve_integration(self) -> None:
+ if not available_solvers:
+ pytest.skip("No solver available")
+ solver = "highs" if "highs" in available_solvers else available_solvers[0]
+ m = Model()
+ coords = pd.RangeIndex(5, name="i")
+ x = m.add_variables(lower=0, upper=100, coords=[coords], name="x")
+ subset_ub = xr.DataArray([10.0, 20.0], dims=["i"], coords={"i": [1, 3]})
+ m.add_constraints(x <= subset_ub, name="subset_ub")
+ m.add_objective(x.sum(), sense="max")
+ m.solve(solver_name=solver)
+ sol = m.solution["x"]
+ assert sol.sel(i=1).item() == pytest.approx(10.0)
+ assert sol.sel(i=3).item() == pytest.approx(20.0)
+ assert sol.sel(i=0).item() == pytest.approx(100.0)
+ assert sol.sel(i=2).item() == pytest.approx(100.0)
+ assert sol.sel(i=4).item() == pytest.approx(100.0)
diff --git a/test/test_infeasibility.py b/test/test_infeasibility.py
index 01994789..74a63d6b 100644
--- a/test/test_infeasibility.py
+++ b/test/test_infeasibility.py
@@ -3,6 +3,8 @@
Test infeasibility detection for different solvers.
"""
+from typing import cast
+
import pandas as pd
import pytest
@@ -242,3 +244,58 @@ def test_deprecated_method(
# Check that it contains constraint labels
assert len(subset) > 0
+
+ @pytest.mark.parametrize("solver", ["gurobi", "xpress"])
+ def test_masked_constraint_infeasibility(
+ self, solver: str, capsys: pytest.CaptureFixture[str]
+ ) -> None:
+ """
+ Test infeasibility detection with masked constraints.
+
+ This test verifies that the solver correctly maps constraint positions
+ to constraint labels when constraints are masked (some rows skipped).
+ The enumeration creates positions [0, 1, 2, ...] that should correspond
+ to the actual constraint labels which may have gaps like [0, 2, 4, 6].
+ """
+ if solver not in available_solvers:
+ pytest.skip(f"{solver} not available")
+
+ m = Model()
+
+ time = pd.RangeIndex(8, name="time")
+ x = m.add_variables(lower=0, upper=5, coords=[time], name="x")
+ y = m.add_variables(lower=0, upper=5, coords=[time], name="y")
+
+ # Create a mask that keeps only even time indices (0, 2, 4, 6)
+ mask = pd.Series([i % 2 == 0 for i in range(len(time))])
+ m.add_constraints(x + y >= 10, name="sum_lower", mask=mask)
+
+ mask = pd.Series([False] * (len(time) // 2) + [True] * (len(time) // 2))
+ m.add_constraints(x <= 4, name="x_upper", mask=mask)
+
+ m.add_objective(x.sum() + y.sum())
+ status, condition = m.solve(solver_name=solver)
+
+ assert status == "warning"
+ assert "infeasible" in condition
+
+ labels = m.compute_infeasibilities()
+ assert labels
+
+ positions = [
+ cast(tuple[str, dict[str, int]], m.constraints.get_label_position(label))
+ for label in labels
+ ]
+ grouped_coords: dict[str, set[int]] = {"sum_lower": set(), "x_upper": set()}
+ for name, coord in positions:
+ assert name in grouped_coords
+ grouped_coords[name].add(coord["time"])
+
+ assert grouped_coords["sum_lower"]
+ assert grouped_coords["sum_lower"] == grouped_coords["x_upper"]
+
+ m.print_infeasibilities()
+ output = capsys.readouterr().out
+ for time_coord in grouped_coords["sum_lower"]:
+ assert f"sum_lower[{time_coord}]" in output
+ assert f"x_upper[{time_coord}]" in output
diff --git a/test/test_io.py b/test/test_io.py
index e8ded144..15bdcde7 100644
--- a/test/test_io.py
+++ b/test/test_io.py
@@ -6,6 +6,8 @@
"""
import pickle
+import sys
+import types
from pathlib import Path
import numpy as np
@@ -15,7 +17,7 @@
import xarray as xr
from linopy import LESS_EQUAL, Model, available_solvers, read_netcdf
-from linopy.io import signed_number
+from linopy.io import signed_number, to_xpress
from linopy.testing import assert_model_equal
@@ -204,6 +206,114 @@ def test_to_highspy(model: Model) -> None:
model.to_highspy()
+class _FakeXpressProblem:
+ def __init__(self) -> None:
+ self.calls: list[tuple[str, dict]] = []
+
+ def loadLP(self, **kwargs: object) -> None:
+ self.calls.append(("loadLP", kwargs))
+
+ def loadQP(self, **kwargs: object) -> None:
+ self.calls.append(("loadQP", kwargs))
+
+ def loadMIQP(self, **kwargs: object) -> None:
+ self.calls.append(("loadMIQP", kwargs))
+
+ def chgObjSense(self, objsense: object) -> None:
+ self.calls.append(("chgObjSense", {"objsense": objsense}))
+
+ def addNames(
+ self, namespace_type: int, names: list[str], first: int, last: int
+ ) -> None:
+ self.calls.append(
+ (
+ "addNames",
+ {
+ "type": namespace_type,
+ "names": names,
+ "first": first,
+ "last": last,
+ },
+ )
+ )
+
+
+def _install_fake_xpress(monkeypatch: pytest.MonkeyPatch) -> _FakeXpressProblem:
+ problem = _FakeXpressProblem()
+ fake_xpress = types.ModuleType("xpress")
+ fake_xpress.infinity = 1.0e20 # type: ignore[attr-defined]
+ fake_xpress.ObjSense = types.SimpleNamespace(MAXIMIZE="MAX", MINIMIZE="MIN") # type: ignore[attr-defined]
+ fake_xpress.Namespaces = types.SimpleNamespace(ROW=1, COLUMN=2) # type: ignore[attr-defined]
+ fake_xpress.problem = lambda: problem # type: ignore[attr-defined]
+ monkeypatch.setitem(sys.modules, "xpress", fake_xpress)
+ return problem
+
+
+def test_to_xpress_loadlp_and_maxsense(monkeypatch: pytest.MonkeyPatch) -> None:
+ fake_problem = _install_fake_xpress(monkeypatch)
+
+ m = Model()
+ x = m.add_variables(lower=0, upper=10, name="x")
+ y = m.add_variables(lower=0, upper=5, name="y")
+ m.add_constraints(2 * x + y, LESS_EQUAL, 12, name="c")
+ m.add_objective(3 * x + y, sense="max")
+
+ solver_model = to_xpress(m, explicit_coordinate_names=True)
+
+ assert solver_model is fake_problem
+ method_names = [name for name, _ in fake_problem.calls]
+ assert "loadLP" in method_names
+ assert "chgObjSense" in method_names
+ assert method_names.count("addNames") == 2
+
+ call_map = {name: kwargs for name, kwargs in fake_problem.calls}
+ lp_kwargs = call_map["loadLP"]
+ assert lp_kwargs["start"].dtype == np.int32
+ assert lp_kwargs["rowind"].dtype == np.int32
+
+
+def test_to_xpress_loadmiqp(monkeypatch: pytest.MonkeyPatch) -> None:
+ fake_problem = _install_fake_xpress(monkeypatch)
+
+ m = Model()
+ x = m.add_variables(lower=0, upper=10, name="x")
+ y = m.add_variables(binary=True, name="y")
+ m.add_constraints(x + y, LESS_EQUAL, 3, name="c")
+ m.add_objective(x * x + 2 * y)
+
+ to_xpress(m)
+
+ call_map = {name: kwargs for name, kwargs in fake_problem.calls}
+ assert "loadMIQP" in call_map
+ miqp_kwargs = call_map["loadMIQP"]
+ assert miqp_kwargs["entind"] is not None
+ assert miqp_kwargs["objqcoef"] is not None
+ assert miqp_kwargs["entind"].dtype == np.int32
+ assert miqp_kwargs["objqcol1"].dtype == np.int32
+ assert miqp_kwargs["objqcol2"].dtype == np.int32
+ assert "addNames" not in call_map
+
+
+def test_to_xpress_progress_logging(monkeypatch: pytest.MonkeyPatch) -> None:
+ _install_fake_xpress(monkeypatch)
+ messages: list[str] = []
+
+ def _log_info(msg: str, *args: object, **kwargs: object) -> None:
+ messages.append(msg % args if args else msg)
+
+ monkeypatch.setattr("linopy.io.logger.info", _log_info)
+
+ m = Model()
+ x = m.add_variables(lower=0, upper=10, coords=[range(3)], name="x")
+ m.add_constraints(x.sum(), LESS_EQUAL, 10, name="c")
+ m.add_objective((2 * x).sum())
+
+ to_xpress(m, progress=True)
+
+ assert any("prepared linear constraint matrix" in msg for msg in messages)
+ assert any("finished direct model build" in msg for msg in messages)
+
+
def test_to_blocks(tmp_path: Path) -> None:
m: Model = Model()
diff --git a/test/test_linear_expression.py b/test/test_linear_expression.py
index 0da9ec7f..d3b8d426 100644
--- a/test/test_linear_expression.py
+++ b/test/test_linear_expression.py
@@ -7,6 +7,8 @@
from __future__ import annotations
+from typing import Any
+
import numpy as np
import pandas as pd
import polars as pl
@@ -21,46 +23,6 @@
from linopy.variables import ScalarVariable
-@pytest.fixture
-def m() -> Model:
- m = Model()
-
- m.add_variables(pd.Series([0, 0]), 1, name="x")
- m.add_variables(4, pd.Series([8, 10]), name="y")
- m.add_variables(0, pd.DataFrame([[1, 2], [3, 4], [5, 6]]).T, name="z")
- m.add_variables(coords=[pd.RangeIndex(20, name="dim_2")], name="v")
-
- idx = pd.MultiIndex.from_product([[1, 2], ["a", "b"]], names=("level1", "level2"))
- idx.name = "dim_3"
- m.add_variables(coords=[idx], name="u")
- return m
-
-
-@pytest.fixture
-def x(m: Model) -> Variable:
- return m.variables["x"]
-
-
-@pytest.fixture
-def y(m: Model) -> Variable:
- return m.variables["y"]
-
-
-@pytest.fixture
-def z(m: Model) -> Variable:
- return m.variables["z"]
-
-
-@pytest.fixture
-def v(m: Model) -> Variable:
- return m.variables["v"]
-
-
-@pytest.fixture
-def u(m: Model) -> Variable:
- return m.variables["u"]
-
-
def test_empty_linexpr(m: Model) -> None:
LinearExpression(None, m)
@@ -575,6 +537,498 @@ def test_linear_expression_multiplication_invalid(
expr / x
+class TestCoordinateAlignment:
+ @pytest.fixture(params=["da", "series"])
+ def subset(self, request: Any) -> xr.DataArray | pd.Series:
+ if request.param == "da":
+ return xr.DataArray([10.0, 30.0], dims=["dim_2"], coords={"dim_2": [1, 3]})
+ return pd.Series([10.0, 30.0], index=pd.Index([1, 3], name="dim_2"))
+
+ @pytest.fixture(params=["da", "series"])
+ def superset(self, request: Any) -> xr.DataArray | pd.Series:
+ if request.param == "da":
+ return xr.DataArray(
+ np.arange(25, dtype=float),
+ dims=["dim_2"],
+ coords={"dim_2": range(25)},
+ )
+ return pd.Series(
+ np.arange(25, dtype=float), index=pd.Index(range(25), name="dim_2")
+ )
+
+ @pytest.fixture
+ def expected_fill(self) -> np.ndarray:
+ arr = np.zeros(20)
+ arr[1] = 10.0
+ arr[3] = 30.0
+ return arr
+
+ @pytest.fixture(params=["xarray", "pandas_series"], ids=["da", "series"])
+ def nan_constant(self, request: Any) -> xr.DataArray | pd.Series:
+ vals = np.arange(20, dtype=float)
+ vals[0] = np.nan
+ vals[5] = np.nan
+ vals[19] = np.nan
+ if request.param == "xarray":
+ return xr.DataArray(vals, dims=["dim_2"], coords={"dim_2": range(20)})
+ return pd.Series(vals, index=pd.Index(range(20), name="dim_2"))
+
+ class TestSubset:
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_mul_subset_fills_zeros(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ operand: str,
+ ) -> None:
+ target = v if operand == "var" else 1 * v
+ result = target * subset
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, expected_fill)
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_add_subset_fills_zeros(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ operand: str,
+ ) -> None:
+ if operand == "var":
+ result = v + subset
+ expected = expected_fill
+ else:
+ result = (v + 5) + subset
+ expected = expected_fill + 5
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.const.values).any()
+ np.testing.assert_array_equal(result.const.values, expected)
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_sub_subset_fills_negated(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ operand: str,
+ ) -> None:
+ if operand == "var":
+ result = v - subset
+ expected = -expected_fill
+ else:
+ result = (v + 5) - subset
+ expected = 5 - expected_fill
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.const.values).any()
+ np.testing.assert_array_equal(result.const.values, expected)
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_div_subset_inverts_nonzero(
+ self, v: Variable, subset: xr.DataArray, operand: str
+ ) -> None:
+ target = v if operand == "var" else 1 * v
+ result = target / subset
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+ assert result.coeffs.squeeze().sel(dim_2=1).item() == pytest.approx(0.1)
+ assert result.coeffs.squeeze().sel(dim_2=0).item() == pytest.approx(1.0)
+
+ def test_subset_add_var_coefficients(
+ self, v: Variable, subset: xr.DataArray
+ ) -> None:
+ result = subset + v
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, np.ones(20))
+
+ def test_subset_sub_var_coefficients(
+ self, v: Variable, subset: xr.DataArray
+ ) -> None:
+ result = subset - v
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, -np.ones(20))
+
+ class TestSuperset:
+ def test_add_superset_pins_to_lhs_coords(
+ self, v: Variable, superset: xr.DataArray
+ ) -> None:
+ result = v + superset
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.const.values).any()
+
+ def test_add_var_commutative(self, v: Variable, superset: xr.DataArray) -> None:
+ assert_linequal(superset + v, v + superset)
+
+ def test_sub_var_commutative(self, v: Variable, superset: xr.DataArray) -> None:
+ assert_linequal(superset - v, -v + superset)
+
+ def test_mul_var_commutative(self, v: Variable, superset: xr.DataArray) -> None:
+ assert_linequal(superset * v, v * superset)
+
+ def test_mul_superset_pins_to_lhs_coords(
+ self, v: Variable, superset: xr.DataArray
+ ) -> None:
+ result = v * superset
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+
+ def test_div_superset_pins_to_lhs_coords(self, v: Variable) -> None:
+ superset_nonzero = xr.DataArray(
+ np.arange(1, 26, dtype=float),
+ dims=["dim_2"],
+ coords={"dim_2": range(25)},
+ )
+ result = v / superset_nonzero
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+
+ class TestDisjoint:
+ def test_add_disjoint_fills_zeros(self, v: Variable) -> None:
+ disjoint = xr.DataArray(
+ [100.0, 200.0], dims=["dim_2"], coords={"dim_2": [50, 60]}
+ )
+ result = v + disjoint
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.const.values).any()
+ np.testing.assert_array_equal(result.const.values, np.zeros(20))
+
+ def test_mul_disjoint_fills_zeros(self, v: Variable) -> None:
+ disjoint = xr.DataArray(
+ [10.0, 20.0], dims=["dim_2"], coords={"dim_2": [50, 60]}
+ )
+ result = v * disjoint
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, np.zeros(20))
+
+ def test_div_disjoint_preserves_coeffs(self, v: Variable) -> None:
+ disjoint = xr.DataArray(
+ [10.0, 20.0], dims=["dim_2"], coords={"dim_2": [50, 60]}
+ )
+ result = v / disjoint
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, np.ones(20))
+
+ class TestCommutativity:
+ @pytest.mark.parametrize(
+ "make_lhs,make_rhs",
+ [
+ (lambda v, s: s * v, lambda v, s: v * s),
+ (lambda v, s: s * (1 * v), lambda v, s: (1 * v) * s),
+ (lambda v, s: s + v, lambda v, s: v + s),
+ (lambda v, s: s + (v + 5), lambda v, s: (v + 5) + s),
+ ],
+ ids=["subset*var", "subset*expr", "subset+var", "subset+expr"],
+ )
+ def test_commutativity(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ make_lhs: Any,
+ make_rhs: Any,
+ ) -> None:
+ assert_linequal(make_lhs(v, subset), make_rhs(v, subset))
+
+ def test_sub_var_anticommutative(
+ self, v: Variable, subset: xr.DataArray
+ ) -> None:
+ assert_linequal(subset - v, -v + subset)
+
+ def test_sub_expr_anticommutative(
+ self, v: Variable, subset: xr.DataArray
+ ) -> None:
+ expr = v + 5
+ assert_linequal(subset - expr, -(expr - subset))
+
+ def test_add_commutativity_full_coords(self, v: Variable) -> None:
+ full = xr.DataArray(
+ np.arange(20, dtype=float),
+ dims=["dim_2"],
+ coords={"dim_2": range(20)},
+ )
+ assert_linequal(v + full, full + v)
+
+ class TestQuadratic:
+ def test_quadexpr_add_subset(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ ) -> None:
+ qexpr = v * v
+ result = qexpr + subset
+ assert isinstance(result, QuadraticExpression)
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.const.values).any()
+ np.testing.assert_array_equal(result.const.values, expected_fill)
+
+ def test_quadexpr_sub_subset(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ ) -> None:
+ qexpr = v * v
+ result = qexpr - subset
+ assert isinstance(result, QuadraticExpression)
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.const.values).any()
+ np.testing.assert_array_equal(result.const.values, -expected_fill)
+
+ def test_quadexpr_mul_subset(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ ) -> None:
+ qexpr = v * v
+ result = qexpr * subset
+ assert isinstance(result, QuadraticExpression)
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, expected_fill)
+
+ def test_subset_mul_quadexpr(
+ self,
+ v: Variable,
+ subset: xr.DataArray,
+ expected_fill: np.ndarray,
+ ) -> None:
+ qexpr = v * v
+ result = subset * qexpr
+ assert isinstance(result, QuadraticExpression)
+ assert result.sizes["dim_2"] == v.sizes["dim_2"]
+ assert not np.isnan(result.coeffs.values).any()
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, expected_fill)
+
+ def test_subset_add_quadexpr(self, v: Variable, subset: xr.DataArray) -> None:
+ qexpr = v * v
+ assert_quadequal(subset + qexpr, qexpr + subset)
+
+ class TestMissingValues:
+ """
+ Same shape as variable but with NaN entries in the constant.
+
+ NaN values are filled with operation-specific neutral elements:
+ - Addition/subtraction: NaN -> 0 (additive identity)
+ - Multiplication: NaN -> 0 (zeroes out the variable)
+ - Division: NaN -> 1 (multiplicative identity, no scaling)
+ """
+
+ NAN_POSITIONS = [0, 5, 19]
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_add_nan_filled(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ operand: str,
+ ) -> None:
+ base_const = 0.0 if operand == "var" else 5.0
+ target = v if operand == "var" else v + 5
+ result = target + nan_constant
+ assert result.sizes["dim_2"] == 20
+ assert not np.isnan(result.const.values).any()
+ # At NaN positions, const should be unchanged (added 0)
+ for i in self.NAN_POSITIONS:
+ assert result.const.values[i] == base_const
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_sub_nan_filled(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ operand: str,
+ ) -> None:
+ base_const = 0.0 if operand == "var" else 5.0
+ target = v if operand == "var" else v + 5
+ result = target - nan_constant
+ assert result.sizes["dim_2"] == 20
+ assert not np.isnan(result.const.values).any()
+ # At NaN positions, const should be unchanged (subtracted 0)
+ for i in self.NAN_POSITIONS:
+ assert result.const.values[i] == base_const
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_mul_nan_filled(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ operand: str,
+ ) -> None:
+ target = v if operand == "var" else 1 * v
+ result = target * nan_constant
+ assert result.sizes["dim_2"] == 20
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ # At NaN positions, coeffs should be 0 (variable zeroed out)
+ for i in self.NAN_POSITIONS:
+ assert result.coeffs.squeeze().values[i] == 0.0
+
+ @pytest.mark.parametrize("operand", ["var", "expr"])
+ def test_div_nan_filled(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ operand: str,
+ ) -> None:
+ target = v if operand == "var" else 1 * v
+ result = target / nan_constant
+ assert result.sizes["dim_2"] == 20
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ # At NaN positions, coeffs should be unchanged (divided by 1)
+ original_coeffs = (1 * v).coeffs.squeeze().values
+ for i in self.NAN_POSITIONS:
+ assert result.coeffs.squeeze().values[i] == original_coeffs[i]
+
+ def test_add_commutativity(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ ) -> None:
+ result_a = v + nan_constant
+ result_b = nan_constant + v
+ assert not np.isnan(result_a.const.values).any()
+ assert not np.isnan(result_b.const.values).any()
+ np.testing.assert_array_equal(result_a.const.values, result_b.const.values)
+ np.testing.assert_array_equal(
+ result_a.coeffs.values, result_b.coeffs.values
+ )
+
+ def test_mul_commutativity(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ ) -> None:
+ result_a = v * nan_constant
+ result_b = nan_constant * v
+ assert not np.isnan(result_a.coeffs.values).any()
+ assert not np.isnan(result_b.coeffs.values).any()
+ np.testing.assert_array_equal(
+ result_a.coeffs.values, result_b.coeffs.values
+ )
+
+ def test_quadexpr_add_nan(
+ self,
+ v: Variable,
+ nan_constant: xr.DataArray | pd.Series,
+ ) -> None:
+ qexpr = v * v
+ result = qexpr + nan_constant
+ assert isinstance(result, QuadraticExpression)
+ assert result.sizes["dim_2"] == 20
+ assert not np.isnan(result.const.values).any()
+
+ class TestExpressionWithNaN:
+ """Test that NaN in expression's own const/coeffs doesn't propagate."""
+
+ def test_shifted_expr_add_scalar(self, v: Variable) -> None:
+ expr = (1 * v).shift(dim_2=1)
+ result = expr + 5
+ assert not np.isnan(result.const.values).any()
+ assert result.const.values[0] == 5.0
+
+ def test_shifted_expr_mul_scalar(self, v: Variable) -> None:
+ expr = (1 * v).shift(dim_2=1)
+ result = expr * 2
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ assert result.coeffs.squeeze().values[0] == 0.0
+
+ def test_shifted_expr_add_array(self, v: Variable) -> None:
+ arr = np.arange(v.sizes["dim_2"], dtype=float)
+ expr = (1 * v).shift(dim_2=1)
+ result = expr + arr
+ assert not np.isnan(result.const.values).any()
+ assert result.const.values[0] == 0.0
+
+ def test_shifted_expr_mul_array(self, v: Variable) -> None:
+ arr = np.arange(v.sizes["dim_2"], dtype=float) + 1
+ expr = (1 * v).shift(dim_2=1)
+ result = expr * arr
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ assert result.coeffs.squeeze().values[0] == 0.0
+
+ def test_shifted_expr_div_scalar(self, v: Variable) -> None:
+ expr = (1 * v).shift(dim_2=1)
+ result = expr / 2
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ assert result.coeffs.squeeze().values[0] == 0.0
+
+ def test_shifted_expr_sub_scalar(self, v: Variable) -> None:
+ expr = (1 * v).shift(dim_2=1)
+ result = expr - 3
+ assert not np.isnan(result.const.values).any()
+ assert result.const.values[0] == -3.0
+
+ def test_shifted_expr_div_array(self, v: Variable) -> None:
+ arr = np.arange(v.sizes["dim_2"], dtype=float) + 1
+ expr = (1 * v).shift(dim_2=1)
+ result = expr / arr
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ assert result.coeffs.squeeze().values[0] == 0.0
+
+ def test_variable_to_linexpr_nan_coefficient(self, v: Variable) -> None:
+ nan_coeff = np.ones(v.sizes["dim_2"])
+ nan_coeff[0] = np.nan
+ result = v.to_linexpr(nan_coeff)
+ assert not np.isnan(result.coeffs.squeeze().values).any()
+ assert result.coeffs.squeeze().values[0] == 0.0
+
+ class TestMultiDim:
+ def test_multidim_subset_mul(self, m: Model) -> None:
+ coords_a = pd.RangeIndex(4, name="a")
+ coords_b = pd.RangeIndex(5, name="b")
+ w = m.add_variables(coords=[coords_a, coords_b], name="w")
+
+ subset_2d = xr.DataArray(
+ [[2.0, 3.0], [4.0, 5.0]],
+ dims=["a", "b"],
+ coords={"a": [1, 3], "b": [0, 4]},
+ )
+ result = w * subset_2d
+ assert result.sizes["a"] == 4
+ assert result.sizes["b"] == 5
+ assert not np.isnan(result.coeffs.values).any()
+ assert result.coeffs.squeeze().sel(a=1, b=0).item() == pytest.approx(2.0)
+ assert result.coeffs.squeeze().sel(a=3, b=4).item() == pytest.approx(5.0)
+ assert result.coeffs.squeeze().sel(a=0, b=0).item() == pytest.approx(0.0)
+ assert result.coeffs.squeeze().sel(a=1, b=2).item() == pytest.approx(0.0)
+
+ def test_multidim_subset_add(self, m: Model) -> None:
+ coords_a = pd.RangeIndex(4, name="a")
+ coords_b = pd.RangeIndex(5, name="b")
+ w = m.add_variables(coords=[coords_a, coords_b], name="w")
+
+ subset_2d = xr.DataArray(
+ [[2.0, 3.0], [4.0, 5.0]],
+ dims=["a", "b"],
+ coords={"a": [1, 3], "b": [0, 4]},
+ )
+ result = w + subset_2d
+ assert result.sizes["a"] == 4
+ assert result.sizes["b"] == 5
+ assert not np.isnan(result.const.values).any()
+ assert result.const.sel(a=1, b=0).item() == pytest.approx(2.0)
+ assert result.const.sel(a=3, b=4).item() == pytest.approx(5.0)
+ assert result.const.sel(a=0, b=0).item() == pytest.approx(0.0)
+
+ class TestXarrayCompat:
+ def test_da_eq_da_still_works(self) -> None:
+ da1 = xr.DataArray([1, 2, 3])
+ da2 = xr.DataArray([1, 2, 3])
+ result = da1 == da2
+ assert result.values.all()
+
+ def test_da_eq_scalar_still_works(self) -> None:
+ da = xr.DataArray([1, 2, 3])
+ result = da == 2
+ np.testing.assert_array_equal(result.values, [False, True, False])
+
+ def test_da_truediv_var_raises(self, v: Variable) -> None:
+ da = xr.DataArray(np.ones(20), dims=["dim_2"], coords={"dim_2": range(20)})
+ with pytest.raises(TypeError):
+ da / v # type: ignore[operator]
+
+
def test_expression_inherited_properties(x: Variable, y: Variable) -> None:
expr = 10 * x + y
assert isinstance(expr.attrs, dict)
@@ -1399,3 +1853,308 @@ def test_constant_only_expression_mul_linexpr_with_vars_and_const(
assert not result_rev.is_constant
assert (result_rev.coeffs == expected_coeffs).all()
assert (result_rev.const == expected_const).all()
+
+
+class TestJoinParameter:
+ @pytest.fixture
+ def m2(self) -> Model:
+ m = Model()
+ m.add_variables(coords=[pd.Index([0, 1, 2], name="i")], name="a")
+ m.add_variables(coords=[pd.Index([1, 2, 3], name="i")], name="b")
+ m.add_variables(coords=[pd.Index([0, 1, 2], name="i")], name="c")
+ return m
+
+ @pytest.fixture
+ def a(self, m2: Model) -> Variable:
+ return m2.variables["a"]
+
+ @pytest.fixture
+ def b(self, m2: Model) -> Variable:
+ return m2.variables["b"]
+
+ @pytest.fixture
+ def c(self, m2: Model) -> Variable:
+ return m2.variables["c"]
+
+ class TestAddition:
+ def test_add_join_none_preserves_default(
+ self, a: Variable, b: Variable
+ ) -> None:
+ result_default = a.to_linexpr() + b.to_linexpr()
+ result_none = a.to_linexpr().add(b.to_linexpr(), join=None)
+ assert_linequal(result_default, result_none)
+
+ def test_add_expr_join_inner(self, a: Variable, b: Variable) -> None:
+ result = a.to_linexpr().add(b.to_linexpr(), join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_add_expr_join_outer(self, a: Variable, b: Variable) -> None:
+ result = a.to_linexpr().add(b.to_linexpr(), join="outer")
+ assert list(result.data.indexes["i"]) == [0, 1, 2, 3]
+
+ def test_add_expr_join_left(self, a: Variable, b: Variable) -> None:
+ result = a.to_linexpr().add(b.to_linexpr(), join="left")
+ assert list(result.data.indexes["i"]) == [0, 1, 2]
+
+ def test_add_expr_join_right(self, a: Variable, b: Variable) -> None:
+ result = a.to_linexpr().add(b.to_linexpr(), join="right")
+ assert list(result.data.indexes["i"]) == [1, 2, 3]
+
+ def test_add_constant_join_inner(self, a: Variable) -> None:
+ const = xr.DataArray([10, 20, 30], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.to_linexpr().add(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_add_constant_join_outer(self, a: Variable) -> None:
+ const = xr.DataArray([10, 20, 30], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.to_linexpr().add(const, join="outer")
+ assert list(result.data.indexes["i"]) == [0, 1, 2, 3]
+
+ def test_add_constant_join_override(self, a: Variable, c: Variable) -> None:
+ expr = a.to_linexpr()
+ const = xr.DataArray([10, 20, 30], dims=["i"], coords={"i": [0, 1, 2]})
+ result = expr.add(const, join="override")
+ assert list(result.data.indexes["i"]) == [0, 1, 2]
+ assert (result.const.values == const.values).all()
+
+ def test_add_same_coords_all_joins(self, a: Variable, c: Variable) -> None:
+ expr_a = 1 * a + 5
+ const = xr.DataArray([1, 2, 3], dims=["i"], coords={"i": [0, 1, 2]})
+ for join in ["override", "outer", "inner"]:
+ result = expr_a.add(const, join=join)
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_equal(result.const.values, [6, 7, 8])
+
+ def test_add_scalar_with_explicit_join(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ result = expr.add(10, join="override")
+ np.testing.assert_array_equal(result.const.values, [15, 15, 15])
+ assert list(result.coords["i"].values) == [0, 1, 2]
+
+ class TestSubtraction:
+ def test_sub_expr_join_inner(self, a: Variable, b: Variable) -> None:
+ result = a.to_linexpr().sub(b.to_linexpr(), join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_sub_constant_override(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ other = xr.DataArray([10, 20, 30], dims=["i"], coords={"i": [5, 6, 7]})
+ result = expr.sub(other, join="override")
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_equal(result.const.values, [-5, -15, -25])
+
+ class TestMultiplication:
+ def test_mul_constant_join_inner(self, a: Variable) -> None:
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.to_linexpr().mul(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_mul_constant_join_outer(self, a: Variable) -> None:
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.to_linexpr().mul(const, join="outer")
+ assert list(result.data.indexes["i"]) == [0, 1, 2, 3]
+ assert result.coeffs.sel(i=0).item() == 0
+ assert result.coeffs.sel(i=1).item() == 2
+ assert result.coeffs.sel(i=2).item() == 3
+
+ def test_mul_expr_with_join_raises(self, a: Variable, b: Variable) -> None:
+ with pytest.raises(TypeError, match="join parameter is not supported"):
+ a.to_linexpr().mul(b.to_linexpr(), join="inner")
+
+ class TestDivision:
+ def test_div_constant_join_inner(self, a: Variable) -> None:
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.to_linexpr().div(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_div_constant_join_outer(self, a: Variable) -> None:
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.to_linexpr().div(const, join="outer")
+ assert list(result.data.indexes["i"]) == [0, 1, 2, 3]
+
+ def test_div_expr_with_join_raises(self, a: Variable, b: Variable) -> None:
+ with pytest.raises(TypeError):
+ a.to_linexpr().div(b.to_linexpr(), join="outer")
+
+ class TestVariableOperations:
+ def test_variable_add_join(self, a: Variable, b: Variable) -> None:
+ result = a.add(b, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_variable_sub_join(self, a: Variable, b: Variable) -> None:
+ result = a.sub(b, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_variable_mul_join(self, a: Variable) -> None:
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.mul(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_variable_div_join(self, a: Variable) -> None:
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = a.div(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_variable_add_outer_values(self, a: Variable, b: Variable) -> None:
+ result = a.add(b, join="outer")
+ assert isinstance(result, LinearExpression)
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+ assert result.nterm == 2
+
+ def test_variable_mul_override(self, a: Variable) -> None:
+ other = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [5, 6, 7]})
+ result = a.mul(other, join="override")
+ assert isinstance(result, LinearExpression)
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, [2, 3, 4])
+
+ def test_variable_div_override(self, a: Variable) -> None:
+ other = xr.DataArray([2.0, 5.0, 10.0], dims=["i"], coords={"i": [5, 6, 7]})
+ result = a.div(other, join="override")
+ assert isinstance(result, LinearExpression)
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_almost_equal(
+ result.coeffs.squeeze().values, [0.5, 0.2, 0.1]
+ )
+
+ def test_same_shape_add_join_override(self, a: Variable, c: Variable) -> None:
+ result = a.to_linexpr().add(c.to_linexpr(), join="override")
+ assert list(result.data.indexes["i"]) == [0, 1, 2]
+
+ class TestMerge:
+ def test_merge_join_parameter(self, a: Variable, b: Variable) -> None:
+ result: LinearExpression = merge(
+ [a.to_linexpr(), b.to_linexpr()], join="inner"
+ )
+ assert list(result.data.indexes["i"]) == [1, 2]
+
+ def test_merge_outer_join(self, a: Variable, b: Variable) -> None:
+ result: LinearExpression = merge(
+ [a.to_linexpr(), b.to_linexpr()], join="outer"
+ )
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+
+ def test_merge_join_left(self, a: Variable, b: Variable) -> None:
+ result: LinearExpression = merge(
+ [a.to_linexpr(), b.to_linexpr()], join="left"
+ )
+ assert list(result.data.indexes["i"]) == [0, 1, 2]
+
+ def test_merge_join_right(self, a: Variable, b: Variable) -> None:
+ result: LinearExpression = merge(
+ [a.to_linexpr(), b.to_linexpr()], join="right"
+ )
+ assert list(result.data.indexes["i"]) == [1, 2, 3]
+
+ class TestValueVerification:
+ def test_add_expr_outer_const_values(self, a: Variable, b: Variable) -> None:
+ expr_a = 1 * a + 5
+ expr_b = 2 * b + 10
+ result = expr_a.add(expr_b, join="outer")
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+ assert result.const.sel(i=0).item() == 5
+ assert result.const.sel(i=1).item() == 15
+ assert result.const.sel(i=2).item() == 15
+ assert result.const.sel(i=3).item() == 10
+
+ def test_add_expr_inner_const_values(self, a: Variable, b: Variable) -> None:
+ expr_a = 1 * a + 5
+ expr_b = 2 * b + 10
+ result = expr_a.add(expr_b, join="inner")
+ assert list(result.coords["i"].values) == [1, 2]
+ assert result.const.sel(i=1).item() == 15
+ assert result.const.sel(i=2).item() == 15
+
+ def test_add_constant_outer_fill_values(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ const = xr.DataArray([10, 20], dims=["i"], coords={"i": [1, 3]})
+ result = expr.add(const, join="outer")
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+ assert result.const.sel(i=0).item() == 5
+ assert result.const.sel(i=1).item() == 15
+ assert result.const.sel(i=2).item() == 5
+ assert result.const.sel(i=3).item() == 20
+
+ def test_add_constant_inner_fill_values(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ const = xr.DataArray([10, 20], dims=["i"], coords={"i": [1, 3]})
+ result = expr.add(const, join="inner")
+ assert list(result.coords["i"].values) == [1]
+ assert result.const.sel(i=1).item() == 15
+
+ def test_add_constant_override_positional(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ other = xr.DataArray([10, 20, 30], dims=["i"], coords={"i": [5, 6, 7]})
+ result = expr.add(other, join="override")
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_equal(result.const.values, [15, 25, 35])
+
+ def test_sub_expr_outer_const_values(self, a: Variable, b: Variable) -> None:
+ expr_a = 1 * a + 5
+ expr_b = 2 * b + 10
+ result = expr_a.sub(expr_b, join="outer")
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+ assert result.const.sel(i=0).item() == 5
+ assert result.const.sel(i=1).item() == -5
+ assert result.const.sel(i=2).item() == -5
+ assert result.const.sel(i=3).item() == -10
+
+ def test_mul_constant_override_positional(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ other = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [5, 6, 7]})
+ result = expr.mul(other, join="override")
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_equal(result.const.values, [10, 15, 20])
+ np.testing.assert_array_equal(result.coeffs.squeeze().values, [2, 3, 4])
+
+ def test_mul_constant_outer_fill_values(self, a: Variable) -> None:
+ expr = 1 * a + 5
+ other = xr.DataArray([2, 3], dims=["i"], coords={"i": [1, 3]})
+ result = expr.mul(other, join="outer")
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+ assert result.const.sel(i=0).item() == 0
+ assert result.const.sel(i=1).item() == 10
+ assert result.const.sel(i=2).item() == 0
+ assert result.const.sel(i=3).item() == 0
+ assert result.coeffs.squeeze().sel(i=1).item() == 2
+ assert result.coeffs.squeeze().sel(i=0).item() == 0
+
+ def test_div_constant_override_positional(self, a: Variable) -> None:
+ expr = 1 * a + 10
+ other = xr.DataArray([2.0, 5.0, 10.0], dims=["i"], coords={"i": [5, 6, 7]})
+ result = expr.div(other, join="override")
+ assert list(result.coords["i"].values) == [0, 1, 2]
+ np.testing.assert_array_equal(result.const.values, [5.0, 2.0, 1.0])
+
+ def test_div_constant_outer_fill_values(self, a: Variable) -> None:
+ expr = 1 * a + 10
+ other = xr.DataArray([2.0, 5.0], dims=["i"], coords={"i": [1, 3]})
+ result = expr.div(other, join="outer")
+ assert set(result.coords["i"].values) == {0, 1, 2, 3}
+ assert result.const.sel(i=1).item() == pytest.approx(5.0)
+ assert result.coeffs.squeeze().sel(i=1).item() == pytest.approx(0.5)
+ assert result.const.sel(i=0).item() == pytest.approx(10.0)
+ assert result.coeffs.squeeze().sel(i=0).item() == pytest.approx(1.0)
+
+ class TestQuadratic:
+ def test_quadratic_add_constant_join_inner(
+ self, a: Variable, b: Variable
+ ) -> None:
+ quad = a.to_linexpr() * b.to_linexpr()
+ const = xr.DataArray([10, 20, 30], dims=["i"], coords={"i": [1, 2, 3]})
+ result = quad.add(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2, 3]
+
+ def test_quadratic_add_expr_join_inner(self, a: Variable) -> None:
+ quad = a.to_linexpr() * a.to_linexpr()
+ const = xr.DataArray([10, 20], dims=["i"], coords={"i": [0, 1]})
+ result = quad.add(const, join="inner")
+ assert list(result.data.indexes["i"]) == [0, 1]
+
+ def test_quadratic_mul_constant_join_inner(
+ self, a: Variable, b: Variable
+ ) -> None:
+ quad = a.to_linexpr() * b.to_linexpr()
+ const = xr.DataArray([2, 3, 4], dims=["i"], coords={"i": [1, 2, 3]})
+ result = quad.mul(const, join="inner")
+ assert list(result.data.indexes["i"]) == [1, 2, 3]
diff --git a/test/test_matrices.py b/test/test_matrices.py
index 98a73564..58ada40b 100644
--- a/test/test_matrices.py
+++ b/test/test_matrices.py
@@ -77,3 +77,18 @@ def test_matrices_float_c() -> None:
c = m.matrices.c
assert np.all(c == np.array([1.5, 1.5]))
+
+
+def test_matrices_A_with_sparse_constraint_labels() -> None:
+ m = Model()
+
+ x = m.add_variables(0, 1, coords=[range(6)], name="x")
+ mask = xr.DataArray([True, False, True, False, True, False], dims=["dim_0"])
+ m.add_constraints(x, GREATER_EQUAL, 0, mask=mask)
+ m.add_objective(x.sum())
+
+ A_full = m.constraints.to_matrix(filter_missings=False)
+ expected = A_full[m.matrices.clabels][:, m.matrices.vlabels]
+
+ assert m.matrices.A is not None
+ assert (m.matrices.A != expected).nnz == 0
diff --git a/test/test_optimization.py b/test/test_optimization.py
index 492d703a..cdac8e61 100644
--- a/test/test_optimization.py
+++ b/test/test_optimization.py
@@ -55,7 +55,7 @@
params.append(("mosek", "lp", True))
-# Note: Platform-specific solver bugs (e.g., SCIP quadratic on Windows) are now
+# Note: Platform-specific solver bugs are now
# handled in linopy/solver_capabilities.py by adjusting the registry at import time.
feasible_quadratic_solvers: list[str] = list(quadratic_solvers)
@@ -530,7 +530,7 @@ def test_solver_time_limit_options(
"cplex": {"timelimit": 1},
"xpress": {"maxtime": 1},
"highs": {"time_limit": 1},
- "scip": {"limits/time": 1},
+ "scip": {"limits/time": 10}, # increase time limit to avoid race condition
"mosek": {"MSK_DPAR_OPTIMIZER_MAX_TIME": 1},
"mindopt": {"MaxTime": 1},
"copt": {"TimeLimit": 1},
diff --git a/test/test_piecewise_constraints.py b/test/test_piecewise_constraints.py
index aeb76ec7..ab8e1f09 100644
--- a/test/test_piecewise_constraints.py
+++ b/test/test_piecewise_constraints.py
@@ -1,4 +1,4 @@
-"""Tests for piecewise linear constraints."""
+"""Tests for the new piecewise linear constraints API."""
from __future__ import annotations
@@ -9,2119 +9,1485 @@
import pytest
import xarray as xr
-from linopy import Model, available_solvers, breakpoints
+from linopy import (
+ Model,
+ available_solvers,
+ breakpoints,
+ piecewise,
+ segments,
+ slopes_to_points,
+)
from linopy.constants import (
+ BREAKPOINT_DIM,
+ LP_SEG_DIM,
+ PWL_ACTIVE_BOUND_SUFFIX,
+ PWL_AUX_SUFFIX,
PWL_BINARY_SUFFIX,
PWL_CONVEX_SUFFIX,
PWL_DELTA_SUFFIX,
PWL_FILL_SUFFIX,
+ PWL_INC_BINARY_SUFFIX,
+ PWL_INC_LINK_SUFFIX,
+ PWL_INC_ORDER_SUFFIX,
PWL_LAMBDA_SUFFIX,
- PWL_LINK_SUFFIX,
+ PWL_LP_DOMAIN_SUFFIX,
+ PWL_LP_SUFFIX,
PWL_SELECT_SUFFIX,
+ PWL_X_LINK_SUFFIX,
+ PWL_Y_LINK_SUFFIX,
+ SEGMENT_DIM,
+)
+from linopy.piecewise import (
+ PiecewiseConstraintDescriptor,
+ PiecewiseExpression,
)
from linopy.solver_capabilities import SolverFeature, get_available_solvers_with_feature
+_sos2_solvers = get_available_solvers_with_feature(
+ SolverFeature.SOS_CONSTRAINTS, available_solvers
+)
+_any_solvers = [
+ s for s in ["highs", "gurobi", "glpk", "cplex"] if s in available_solvers
+]
-class TestBasicSingleVariable:
- """Tests for single variable piecewise constraints."""
- def test_basic_single_variable(self) -> None:
- """Test basic piecewise constraint with a single variable."""
- m = Model()
- x = m.add_variables(name="x")
+# ===========================================================================
+# slopes_to_points
+# ===========================================================================
- breakpoints = xr.DataArray(
- [0, 10, 50, 100], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
- )
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
+class TestSlopesToPoints:
+ def test_basic(self) -> None:
+ assert slopes_to_points([0, 1, 2], [1, 2], 0) == [0, 1, 3]
- # Check lambda variables were created
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ def test_negative_slopes(self) -> None:
+ result = slopes_to_points([0, 10, 20], [-0.5, -1.0], 10)
+ assert result == [10, 5, -5]
- # Check constraints were created
- assert f"pwl0{PWL_CONVEX_SUFFIX}" in m.constraints
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
+ def test_wrong_length_raises(self) -> None:
+ with pytest.raises(ValueError, match="len\\(slopes\\)"):
+ slopes_to_points([0, 1, 2], [1], 0)
- # Check SOS2 constraint was added
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert lambda_var.attrs.get("sos_type") == 2
- assert lambda_var.attrs.get("sos_dim") == "bp"
- def test_single_variable_with_coords(self) -> None:
- """Test piecewise constraint with a variable that has coordinates."""
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
+# ===========================================================================
+# breakpoints() factory
+# ===========================================================================
- bp_coords = [0, 1, 2]
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 30, 80]],
- dims=["generator", "bp"],
- coords={"generator": generators, "bp": bp_coords},
- )
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
+class TestBreakpointsFactory:
+ def test_list(self) -> None:
+ bp = breakpoints([0, 50, 100])
+ assert bp.dims == (BREAKPOINT_DIM,)
+ assert list(bp.values) == [0.0, 50.0, 100.0]
- # Lambda should have both generator and bp dimensions
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
- assert "bp" in lambda_var.dims
+ def test_dict(self) -> None:
+ bp = breakpoints({"gen1": [0, 50, 100], "gen2": [0, 30]}, dim="generator")
+ assert set(bp.dims) == {"generator", BREAKPOINT_DIM}
+ assert bp.sizes[BREAKPOINT_DIM] == 3
+ assert np.isnan(bp.sel(generator="gen2").sel({BREAKPOINT_DIM: 2}))
+ def test_dict_without_dim_raises(self) -> None:
+ with pytest.raises(ValueError, match="'dim' is required"):
+ breakpoints({"a": [0, 50], "b": [0, 30]})
-class TestDictOfVariables:
- """Tests for dict of variables (multiple linked variables)."""
+ def test_slopes_list(self) -> None:
+ bp = breakpoints(slopes=[1, 2], x_points=[0, 1, 2], y0=0)
+ expected = breakpoints([0, 1, 3])
+ xr.testing.assert_equal(bp, expected)
- def test_dict_of_variables(self) -> None:
- """Test piecewise constraint with multiple linked variables."""
- m = Model()
- power = m.add_variables(name="power")
- efficiency = m.add_variables(name="efficiency")
+ def test_slopes_dict(self) -> None:
+ bp = breakpoints(
+ slopes={"a": [1, 0.5], "b": [2, 1]},
+ x_points={"a": [0, 10, 50], "b": [0, 20, 80]},
+ y0={"a": 0, "b": 10},
+ dim="gen",
+ )
+ assert set(bp.dims) == {"gen", BREAKPOINT_DIM}
+ # a: [0, 10, 30], b: [10, 50, 110]
+ np.testing.assert_allclose(bp.sel(gen="a").values, [0, 10, 30])
+ np.testing.assert_allclose(bp.sel(gen="b").values, [10, 50, 110])
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.8, 0.95, 0.9]],
- dims=["var", "bp"],
- coords={"var": ["power", "efficiency"], "bp": [0, 1, 2]},
+ def test_slopes_dict_shared_xpoints(self) -> None:
+ bp = breakpoints(
+ slopes={"a": [1, 2], "b": [3, 4]},
+ x_points=[0, 1, 2],
+ y0={"a": 0, "b": 0},
+ dim="gen",
)
+ np.testing.assert_allclose(bp.sel(gen="a").values, [0, 1, 3])
+ np.testing.assert_allclose(bp.sel(gen="b").values, [0, 3, 7])
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
+ def test_slopes_dict_shared_y0(self) -> None:
+ bp = breakpoints(
+ slopes={"a": [1, 2], "b": [3, 4]},
+ x_points={"a": [0, 1, 2], "b": [0, 1, 2]},
+ y0=5.0,
+ dim="gen",
)
+ np.testing.assert_allclose(bp.sel(gen="a").values, [5, 6, 8])
- # Check single linking constraint was created for all variables
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
+ def test_values_and_slopes_raises(self) -> None:
+ with pytest.raises(ValueError, match="mutually exclusive"):
+ breakpoints([0, 1], slopes=[1], x_points=[0, 1], y0=0)
- def test_dict_with_coordinates(self) -> None:
- """Test dict of variables with additional coordinates."""
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- power = m.add_variables(coords=[generators], name="power")
- efficiency = m.add_variables(coords=[generators], name="efficiency")
+ def test_slopes_without_xpoints_raises(self) -> None:
+ with pytest.raises(ValueError, match="requires both"):
+ breakpoints(slopes=[1], y0=0)
- breakpoints = xr.DataArray(
- [[[0, 50, 100], [0.8, 0.95, 0.9]], [[0, 30, 80], [0.75, 0.9, 0.85]]],
- dims=["generator", "var", "bp"],
- coords={
- "generator": generators,
- "var": ["power", "efficiency"],
- "bp": [0, 1, 2],
- },
- )
+ def test_slopes_without_y0_raises(self) -> None:
+ with pytest.raises(ValueError, match="requires both"):
+ breakpoints(slopes=[1], x_points=[0, 1])
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
- )
+ def test_xpoints_with_values_raises(self) -> None:
+ with pytest.raises(ValueError, match="forbidden"):
+ breakpoints([0, 1], x_points=[0, 1])
- # Lambda should have generator and bp dimensions (not var)
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
- assert "bp" in lambda_var.dims
- assert "var" not in lambda_var.dims
+ def test_y0_with_values_raises(self) -> None:
+ with pytest.raises(ValueError, match="forbidden"):
+ breakpoints([0, 1], y0=5)
+ # --- pandas and xarray inputs ---
-class TestAutoDetectLinkDim:
- """Tests for auto-detection of linking dimension."""
+ def test_series(self) -> None:
+ bp = breakpoints(pd.Series([0, 50, 100]))
+ assert bp.dims == (BREAKPOINT_DIM,)
+ assert list(bp.values) == [0.0, 50.0, 100.0]
- def test_auto_detect_linking_dim(self) -> None:
- """Test that linking dimension is auto-detected from breakpoints."""
- m = Model()
- power = m.add_variables(name="power")
- efficiency = m.add_variables(name="efficiency")
+ def test_dataframe(self) -> None:
+ df = pd.DataFrame(
+ {"gen1": [0, 50, 100], "gen2": [0, 30, np.nan]}
+ ).T # rows=entities, cols=breakpoints
+ bp = breakpoints(df, dim="generator")
+ assert set(bp.dims) == {"generator", BREAKPOINT_DIM}
+ assert bp.sizes[BREAKPOINT_DIM] == 3
+ np.testing.assert_allclose(bp.sel(generator="gen1").values, [0, 50, 100])
+ assert np.isnan(bp.sel(generator="gen2").values[2])
+
+ def test_dataframe_without_dim_raises(self) -> None:
+ df = pd.DataFrame({"a": [0, 50], "b": [0, 30]}).T
+ with pytest.raises(ValueError, match="'dim' is required"):
+ breakpoints(df)
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.8, 0.95, 0.9]],
- dims=["var", "bp"],
- coords={"var": ["power", "efficiency"], "bp": [0, 1, 2]},
+ def test_dataarray_passthrough(self) -> None:
+ da = xr.DataArray(
+ [0, 50, 100],
+ dims=[BREAKPOINT_DIM],
+ coords={BREAKPOINT_DIM: np.arange(3)},
)
+ bp = breakpoints(da)
+ xr.testing.assert_equal(bp, da)
- # Should auto-detect linking dim="var"
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
- )
+ def test_dataarray_missing_dim_raises(self) -> None:
+ da = xr.DataArray([0, 50, 100], dims=["foo"])
+ with pytest.raises(ValueError, match="must have a"):
+ breakpoints(da)
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
+ def test_slopes_series(self) -> None:
+ bp = breakpoints(
+ slopes=pd.Series([1, 2]),
+ x_points=pd.Series([0, 1, 2]),
+ y0=0,
+ )
+ expected = breakpoints([0, 1, 3])
+ xr.testing.assert_equal(bp, expected)
+
+ def test_slopes_dataarray(self) -> None:
+ slopes_da = xr.DataArray(
+ [[1, 2], [3, 4]],
+ dims=["gen", BREAKPOINT_DIM],
+ coords={"gen": ["a", "b"], BREAKPOINT_DIM: [0, 1]},
+ )
+ xp_da = xr.DataArray(
+ [[0, 1, 2], [0, 1, 2]],
+ dims=["gen", BREAKPOINT_DIM],
+ coords={"gen": ["a", "b"], BREAKPOINT_DIM: [0, 1, 2]},
+ )
+ y0_da = xr.DataArray([0, 5], dims=["gen"], coords={"gen": ["a", "b"]})
+ bp = breakpoints(slopes=slopes_da, x_points=xp_da, y0=y0_da, dim="gen")
+ np.testing.assert_allclose(bp.sel(gen="a").values, [0, 1, 3])
+ np.testing.assert_allclose(bp.sel(gen="b").values, [5, 8, 12])
+
+ def test_slopes_dataframe(self) -> None:
+ slopes_df = pd.DataFrame({"a": [1, 0.5], "b": [2, 1]}).T
+ xp_df = pd.DataFrame({"a": [0, 10, 50], "b": [0, 20, 80]}).T
+ y0_series = pd.Series({"a": 0, "b": 10})
+ bp = breakpoints(slopes=slopes_df, x_points=xp_df, y0=y0_series, dim="gen")
+ np.testing.assert_allclose(bp.sel(gen="a").values, [0, 10, 30])
+ np.testing.assert_allclose(bp.sel(gen="b").values, [10, 50, 110])
+
+
+# ===========================================================================
+# segments() factory
+# ===========================================================================
+
+
+class TestSegmentsFactory:
+ def test_list(self) -> None:
+ bp = segments([[0, 10], [50, 100]])
+ assert set(bp.dims) == {SEGMENT_DIM, BREAKPOINT_DIM}
+ assert bp.sizes[SEGMENT_DIM] == 2
+ assert bp.sizes[BREAKPOINT_DIM] == 2
+
+ def test_dict(self) -> None:
+ bp = segments(
+ {"a": [[0, 10], [50, 100]], "b": [[0, 20], [60, 90]]},
+ dim="gen",
+ )
+ assert "gen" in bp.dims
+ assert SEGMENT_DIM in bp.dims
+ assert BREAKPOINT_DIM in bp.dims
+
+ def test_ragged(self) -> None:
+ bp = segments([[0, 5, 10], [50, 100]])
+ assert bp.sizes[BREAKPOINT_DIM] == 3
+ assert np.isnan(bp.sel({SEGMENT_DIM: 1, BREAKPOINT_DIM: 2}))
+
+ def test_dict_without_dim_raises(self) -> None:
+ with pytest.raises(ValueError, match="'dim' is required"):
+ segments({"a": [[0, 10]], "b": [[50, 100]]})
+
+ def test_dataframe(self) -> None:
+ df = pd.DataFrame([[0, 10], [50, 100]]) # rows=segments, cols=breakpoints
+ bp = segments(df)
+ assert set(bp.dims) == {SEGMENT_DIM, BREAKPOINT_DIM}
+ assert bp.sizes[SEGMENT_DIM] == 2
+ assert bp.sizes[BREAKPOINT_DIM] == 2
+ np.testing.assert_allclose(bp.sel({SEGMENT_DIM: 0}).values, [0, 10])
+ np.testing.assert_allclose(bp.sel({SEGMENT_DIM: 1}).values, [50, 100])
+
+ def test_dataarray_passthrough(self) -> None:
+ da = xr.DataArray(
+ [[0, 10], [50, 100]],
+ dims=[SEGMENT_DIM, BREAKPOINT_DIM],
+ coords={SEGMENT_DIM: [0, 1], BREAKPOINT_DIM: [0, 1]},
+ )
+ bp = segments(da)
+ xr.testing.assert_equal(bp, da)
- def test_auto_detect_fails_with_no_match(self) -> None:
- """Test that auto-detection fails when no dimension matches keys."""
- m = Model()
- power = m.add_variables(name="power")
- efficiency = m.add_variables(name="efficiency")
+ def test_dataarray_missing_dim_raises(self) -> None:
+ da_no_seg = xr.DataArray(
+ [[0, 10], [50, 100]],
+ dims=["foo", BREAKPOINT_DIM],
+ )
+ with pytest.raises(ValueError, match="must have both"):
+ segments(da_no_seg)
- # Dimension 'wrong' doesn't match variable keys
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.8, 0.95, 0.9]],
- dims=["wrong", "bp"],
- coords={"wrong": ["a", "b"], "bp": [0, 1, 2]},
+ da_no_bp = xr.DataArray(
+ [[0, 10], [50, 100]],
+ dims=[SEGMENT_DIM, "bar"],
)
+ with pytest.raises(ValueError, match="must have both"):
+ segments(da_no_bp)
- with pytest.raises(ValueError, match="Could not auto-detect linking dimension"):
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
- )
+# ===========================================================================
+# piecewise() and operator overloading
+# ===========================================================================
-class TestMasking:
- """Tests for masking functionality."""
- def test_nan_masking(self) -> None:
- """Test that NaN values in breakpoints create masked constraints."""
+class TestPiecewiseFunction:
+ def test_returns_expression(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ pw = piecewise(x, x_points=[0, 10, 50], y_points=[5, 2, 20])
+ assert isinstance(pw, PiecewiseExpression)
- # Third breakpoint is NaN
- breakpoints = xr.DataArray(
- [0, 10, np.nan, 100],
- dims=["bp"],
- coords={"bp": [0, 1, 2, 3]},
- )
-
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- # Non-NaN breakpoints (0, 1, 3) should have valid labels
- assert int(lambda_var.labels.sel(bp=0)) != -1
- assert int(lambda_var.labels.sel(bp=1)) != -1
- assert int(lambda_var.labels.sel(bp=3)) != -1
- # NaN breakpoint (2) should be masked
- assert int(lambda_var.labels.sel(bp=2)) == -1
-
- def test_explicit_mask(self) -> None:
- """Test user-provided mask."""
+ def test_series_inputs(self) -> None:
m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 30, 80]],
- dims=["generator", "bp"],
- coords={"generator": generators, "bp": [0, 1, 2]},
- )
-
- # Mask out gen2
- mask = xr.DataArray(
- [[True, True, True], [False, False, False]],
- dims=["generator", "bp"],
- coords={"generator": generators, "bp": [0, 1, 2]},
- )
-
- m.add_piecewise_constraints(x, breakpoints, dim="bp", mask=mask)
-
- # Should still create variables and constraints
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ x = m.add_variables(name="x")
+ pw = piecewise(x, pd.Series([0, 10, 50]), pd.Series([5, 2, 20]))
+ assert isinstance(pw, PiecewiseExpression)
- def test_skip_nan_check(self) -> None:
- """Test skip_nan_check parameter for performance."""
+ def test_tuple_inputs(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ pw = piecewise(x, (0, 10, 50), (5, 2, 20))
+ assert isinstance(pw, PiecewiseExpression)
- # Breakpoints with no NaNs
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- # Should work with skip_nan_check=True
- m.add_piecewise_constraints(x, breakpoints, dim="bp", skip_nan_check=True)
-
- # All lambda variables should be valid (no masking)
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert (lambda_var.labels != -1).all()
-
- def test_dict_mask_without_linking_dim(self) -> None:
- """Test dict case accepts broadcastable mask without linking dimension."""
+ def test_eq_returns_descriptor(self) -> None:
m = Model()
- power = m.add_variables(name="power")
- efficiency = m.add_variables(name="efficiency")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.8, 0.95, 0.9]],
- dims=["var", "bp"],
- coords={"var": ["power", "efficiency"], "bp": [0, 1, 2]},
- )
-
- # Mask over bp only; should broadcast across var
- mask = xr.DataArray([True, False, True], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
- mask=mask,
- )
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert (lambda_var.labels.sel(bp=0) != -1).all()
- assert (lambda_var.labels.sel(bp=1) == -1).all()
- assert (lambda_var.labels.sel(bp=2) != -1).all()
-
-
-class TestMultiDimensional:
- """Tests for multi-dimensional piecewise constraints."""
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ desc = piecewise(x, [0, 10, 50], [5, 2, 20]) == y
+ assert isinstance(desc, PiecewiseConstraintDescriptor)
+ assert desc.sign == "=="
- def test_multi_dimensional(self) -> None:
- """Test piecewise constraint with multiple loop dimensions."""
+ def test_ge_returns_le_descriptor(self) -> None:
+ """Pw >= y means y <= pw"""
m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- timesteps = pd.Index([0, 1, 2], name="time")
- x = m.add_variables(coords=[generators, timesteps], name="x")
-
- rng = np.random.default_rng(42)
- breakpoints = xr.DataArray(
- rng.random((2, 3, 4)) * 100,
- dims=["generator", "time", "bp"],
- coords={"generator": generators, "time": timesteps, "bp": [0, 1, 2, 3]},
- )
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ desc = piecewise(x, [0, 10, 50], [5, 2, 20]) >= y
+ assert isinstance(desc, PiecewiseConstraintDescriptor)
+ assert desc.sign == "<="
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
+ def test_le_returns_ge_descriptor(self) -> None:
+ """Pw <= y means y >= pw"""
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ desc = piecewise(x, [0, 10, 50], [5, 2, 20]) <= y
+ assert isinstance(desc, PiecewiseConstraintDescriptor)
+ assert desc.sign == ">="
+
+ @pytest.mark.parametrize(
+ ("operator", "expected_sign"),
+ [("==", "=="), ("<=", "<="), (">=", ">=")],
+ )
+ def test_rhs_piecewise_returns_descriptor(
+ self, operator: str, expected_sign: str
+ ) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ pw = piecewise(x, [0, 10, 50], [5, 2, 20])
+
+ if operator == "==":
+ desc = y == pw
+ elif operator == "<=":
+ desc = y <= pw
+ else:
+ desc = y >= pw
+
+ assert isinstance(desc, PiecewiseConstraintDescriptor)
+ assert desc.sign == expected_sign
+ assert desc.piecewise_func is pw
+
+ @pytest.mark.parametrize(
+ ("operator", "expected_sign"),
+ [("==", "=="), ("<=", "<="), (">=", ">=")],
+ )
+ def test_rhs_piecewise_linear_expression_returns_descriptor(
+ self, operator: str, expected_sign: str
+ ) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ z = m.add_variables(name="z")
+ lhs = 2 * y + z
+ pw = piecewise(x, [0, 10, 50], [5, 2, 20])
- # Lambda should have all dimensions
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
- assert "time" in lambda_var.dims
- assert "bp" in lambda_var.dims
+ if operator == "==":
+ desc = lhs == pw
+ elif operator == "<=":
+ desc = lhs <= pw
+ else:
+ desc = lhs >= pw
+ assert isinstance(desc, PiecewiseConstraintDescriptor)
+ assert desc.sign == expected_sign
+ assert desc.lhs is lhs
+ assert desc.piecewise_func is pw
-class TestValidationErrors:
- """Tests for input validation."""
+ def test_rhs_piecewise_add_constraint(self) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(y == piecewise(x, [0, 10, 50], [5, 2, 20]))
+ assert len(m.constraints) > 0
- def test_invalid_vars_type(self) -> None:
- """Test error when expr is not Variable, LinearExpression, or dict."""
+ def test_mismatched_sizes_raises(self) -> None:
m = Model()
+ x = m.add_variables(name="x")
+ with pytest.raises(ValueError, match="same size"):
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20])
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
+ def test_missing_breakpoint_dim_raises(self) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ xp = xr.DataArray([0, 10, 50], dims=["knot"])
+ yp = xr.DataArray([5, 2, 20], dims=["knot"])
+ with pytest.raises(ValueError, match="must have a breakpoint dimension"):
+ piecewise(x, xp, yp)
+ def test_missing_breakpoint_dim_x_only_raises(self) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ xp = xr.DataArray([0, 10, 50], dims=["knot"])
+ yp = xr.DataArray([5, 2, 20], dims=[BREAKPOINT_DIM])
with pytest.raises(
- TypeError, match="must be a Variable, LinearExpression, or dict"
+ ValueError, match="x_points is missing the breakpoint dimension"
):
- m.add_piecewise_constraints("invalid", breakpoints, dim="bp") # type: ignore
+ piecewise(x, xp, yp)
- def test_invalid_dict_value_type(self) -> None:
+ def test_missing_breakpoint_dim_y_only_raises(self) -> None:
m = Model()
- bp = xr.DataArray(
- [[0, 50], [0, 10]],
- dims=["var", "bp"],
- coords={"var": ["x", "y"], "bp": [0, 1]},
- )
- with pytest.raises(TypeError, match="dict value for key 'x'"):
- m.add_piecewise_constraints({"x": "bad", "y": "bad"}, bp, dim="bp") # type: ignore
+ x = m.add_variables(name="x")
+ xp = xr.DataArray([0, 10, 50], dims=[BREAKPOINT_DIM])
+ yp = xr.DataArray([5, 2, 20], dims=["knot"])
+ with pytest.raises(
+ ValueError, match="y_points is missing the breakpoint dimension"
+ ):
+ piecewise(x, xp, yp)
- def test_missing_dim(self) -> None:
- """Test error when breakpoints don't have the required dim."""
+ def test_segment_dim_mismatch_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ xp = segments([[0, 10], [50, 100]])
+ yp = xr.DataArray([0, 5], dims=[BREAKPOINT_DIM])
+ with pytest.raises(ValueError, match="segment.*dimension.*both must"):
+ piecewise(x, xp, yp)
- breakpoints = xr.DataArray([0, 10, 50], dims=["wrong"])
-
- with pytest.raises(ValueError, match="must have dimension"):
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
-
- def test_non_numeric_dim(self) -> None:
- """Test error when dim coordinates are not numeric."""
+ def test_detects_disjunctive(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ pw = piecewise(x, segments([[0, 10], [50, 100]]), segments([[0, 5], [20, 80]]))
+ assert pw.disjunctive is True
- breakpoints = xr.DataArray(
- [0, 10, 50],
- dims=["bp"],
- coords={"bp": ["a", "b", "c"]}, # Non-numeric
- )
-
- with pytest.raises(ValueError, match="numeric coordinates"):
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
-
- def test_expression_support(self) -> None:
- """Test that LinearExpression is supported as input."""
+ def test_detects_continuous(self) -> None:
m = Model()
x = m.add_variables(name="x")
- y = m.add_variables(name="y")
+ pw = piecewise(x, [0, 10, 50], [5, 2, 20])
+ assert pw.disjunctive is False
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
- # Should work with a LinearExpression
- m.add_piecewise_constraints(x + y, breakpoints, dim="bp")
+# ===========================================================================
+# Continuous piecewise – equality
+# ===========================================================================
- # Check constraints were created
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
- def test_no_matching_linking_dim(self) -> None:
- """Test error when no breakpoints dimension matches dict keys."""
+class TestContinuousEquality:
+ def test_sos2(self) -> None:
m = Model()
- power = m.add_variables(name="power")
- efficiency = m.add_variables(name="efficiency")
-
- breakpoints = xr.DataArray([0, 50, 100], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- with pytest.raises(ValueError, match="Could not auto-detect linking dimension"):
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
- )
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20, 80]) == y,
+ method="sos2",
+ )
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_CONVEX_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_X_LINK_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_Y_LINK_SUFFIX}" in m.constraints
+ lam = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
+ assert lam.attrs.get("sos_type") == 2
- def test_linking_dim_coords_mismatch(self) -> None:
- """Test error when breakpoint dimension coords don't match dict keys."""
+ def test_auto_selects_incremental_for_monotonic(self) -> None:
m = Model()
- power = m.add_variables(name="power")
- efficiency = m.add_variables(name="efficiency")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.8, 0.95, 0.9]],
- dims=["var", "bp"],
- coords={"var": ["wrong1", "wrong2"], "bp": [0, 1, 2]},
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20, 80]) == y,
)
+ assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
- with pytest.raises(ValueError, match="Could not auto-detect linking dimension"):
- m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
- )
-
-
-class TestNameGeneration:
- """Tests for automatic name generation."""
-
- def test_auto_name_generation(self) -> None:
- """Test that names are auto-generated correctly."""
+ def test_auto_nonmonotonic_falls_back_to_sos2(self) -> None:
m = Model()
x = m.add_variables(name="x")
y = m.add_variables(name="y")
-
- bp1 = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
- bp2 = xr.DataArray([0, 20, 80], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- m.add_piecewise_constraints(x, bp1, dim="bp")
- m.add_piecewise_constraints(y, bp2, dim="bp")
-
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 30, 100], [5, 20, 15, 80]) == y,
+ )
assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl1{PWL_LAMBDA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_DELTA_SUFFIX}" not in m.variables
- def test_custom_name(self) -> None:
- """Test using a custom name."""
+ def test_multi_dimensional(self) -> None:
m = Model()
- x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- m.add_piecewise_constraints(x, breakpoints, dim="bp", name="my_pwl")
-
- assert f"my_pwl{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"my_pwl{PWL_CONVEX_SUFFIX}" in m.constraints
- assert f"my_pwl{PWL_LINK_SUFFIX}" in m.constraints
-
-
-class TestLPFileOutput:
- """Tests for LP file output with piecewise constraints."""
+ gens = pd.Index(["gen_a", "gen_b"], name="generator")
+ x = m.add_variables(coords=[gens], name="x")
+ y = m.add_variables(coords=[gens], name="y")
+ m.add_piecewise_constraints(
+ piecewise(
+ x,
+ breakpoints(
+ {"gen_a": [0, 10, 50], "gen_b": [0, 20, 80]}, dim="generator"
+ ),
+ breakpoints(
+ {"gen_a": [0, 5, 30], "gen_b": [0, 8, 50]}, dim="generator"
+ ),
+ )
+ == y,
+ )
+ delta = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
+ assert "generator" in delta.dims
- def test_piecewise_written_to_lp(self, tmp_path: Path) -> None:
- """Test that piecewise constraints are properly written to LP file."""
+ def test_with_slopes(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0.0, 10.0, 50.0],
- dims=["bp"],
- coords={"bp": [0, 1, 2]},
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(
+ x,
+ [0, 10, 50, 100],
+ breakpoints(slopes=[-0.3, 0.45, 1.2], x_points=[0, 10, 50, 100], y0=5),
+ )
+ == y,
)
-
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
-
- # Add a simple objective to make it a valid LP
- m.add_objective(x)
-
- fn = tmp_path / "pwl.lp"
- m.to_file(fn, io_api="lp")
- content = fn.read_text()
-
- # Should contain SOS2 section
- assert "\nsos\n" in content.lower()
- assert "s2" in content.lower()
+ assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
-@pytest.mark.skipif("gurobi" not in available_solvers, reason="Gurobi not installed")
-class TestSolverIntegration:
- """Integration tests with Gurobi solver."""
+# ===========================================================================
+# Continuous piecewise – inequality
+# ===========================================================================
- def test_solve_single_variable(self) -> None:
- """Test solving a model with piecewise constraint."""
- gurobipy = pytest.importorskip("gurobipy")
+class TestContinuousInequality:
+ def test_concave_le_uses_lp(self) -> None:
+ """Y <= concave f(x) → LP tangent lines"""
m = Model()
- # Variable that should be between 0 and 100
- x = m.add_variables(lower=0, upper=100, name="x")
-
- # Piecewise linear cost function: cost = f(x)
- # f(0) = 0, f(50) = 10, f(100) = 50
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 10, 50]],
- dims=["var", "bp"],
- coords={"var": ["x", "cost"], "bp": [0, 1, 2]},
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ # Concave: slopes 0.8, 0.4 (decreasing)
+ # pw >= y means y <= pw (sign="<=")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 40, 60]) >= y,
)
+ assert f"pwl0{PWL_LP_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
+ assert f"pwl0{PWL_AUX_SUFFIX}" not in m.variables
- m.add_piecewise_constraints({"x": x, "cost": cost}, breakpoints, dim="bp")
-
- # Minimize cost, but need x >= 50 to make it interesting
- m.add_constraints(x >= 50, name="x_min")
- m.add_objective(cost)
-
- try:
- status, cond = m.solve(solver_name="gurobi", io_api="direct")
- except gurobipy.GurobiError as exc:
- pytest.skip(f"Gurobi environment unavailable: {exc}")
-
- assert status == "ok"
- # At x=50, cost should be 10
- assert np.isclose(x.solution.values, 50, atol=1e-5)
- assert np.isclose(cost.solution.values, 10, atol=1e-5)
-
- def test_solve_efficiency_curve(self) -> None:
- """Test solving with a realistic efficiency curve."""
- gurobipy = pytest.importorskip("gurobipy")
-
+ def test_convex_le_uses_sos2_aux(self) -> None:
+ """Y <= convex f(x) → SOS2 + aux"""
m = Model()
- power = m.add_variables(lower=0, upper=100, name="power")
- efficiency = m.add_variables(name="efficiency")
-
- # Efficiency curve: starts low, peaks, then decreases
- # power: 0 25 50 75 100
- # efficiency: 0.7 0.85 0.95 0.9 0.8
- breakpoints = xr.DataArray(
- [[0, 25, 50, 75, 100], [0.7, 0.85, 0.95, 0.9, 0.8]],
- dims=["var", "bp"],
- coords={"var": ["power", "efficiency"], "bp": [0, 1, 2, 3, 4]},
- )
-
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ # Convex: slopes 0.2, 1.0 (increasing)
m.add_piecewise_constraints(
- {"power": power, "efficiency": efficiency},
- breakpoints,
- dim="bp",
+ piecewise(x, [0, 50, 100], [0, 10, 60]) >= y,
)
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_AUX_SUFFIX}" in m.variables
- # Maximize efficiency
- m.add_objective(efficiency, sense="max")
-
- try:
- status, cond = m.solve(solver_name="gurobi", io_api="direct")
- except gurobipy.GurobiError as exc:
- pytest.skip(f"Gurobi environment unavailable: {exc}")
-
- assert status == "ok"
- # Maximum efficiency is at power=50
- assert np.isclose(power.solution.values, 50, atol=1e-5)
- assert np.isclose(efficiency.solution.values, 0.95, atol=1e-5)
-
- def test_solve_multi_generator(self) -> None:
- """Test with multiple generators each with different curves."""
- gurobipy = pytest.importorskip("gurobipy")
-
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- power = m.add_variables(lower=0, upper=100, coords=[generators], name="power")
- cost = m.add_variables(coords=[generators], name="cost")
-
- # Different cost curves for each generator
- # gen1: cheaper at low power, expensive at high
- # gen2: more expensive at low power, cheaper at high
- breakpoints = xr.DataArray(
- [
- [[0, 50, 100], [0, 5, 30]], # gen1: power, cost
- [[0, 50, 100], [0, 15, 20]], # gen2: power, cost
- ],
- dims=["generator", "var", "bp"],
- coords={
- "generator": generators,
- "var": ["power", "cost"],
- "bp": [0, 1, 2],
- },
+ def test_convex_ge_uses_lp(self) -> None:
+ """Y >= convex f(x) → LP tangent lines"""
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ # Convex: slopes 0.2, 1.0 (increasing)
+ # pw <= y means y >= pw (sign=">=")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 60]) <= y,
)
+ assert f"pwl0{PWL_LP_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
+ assert f"pwl0{PWL_AUX_SUFFIX}" not in m.variables
+ def test_concave_ge_uses_sos2_aux(self) -> None:
+ """Y >= concave f(x) → SOS2 + aux"""
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ # Concave: slopes 0.8, 0.4 (decreasing)
m.add_piecewise_constraints(
- {"power": power, "cost": cost}, breakpoints, dim="bp"
+ piecewise(x, [0, 50, 100], [0, 40, 60]) <= y,
)
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_AUX_SUFFIX}" in m.variables
- # Need total power of 120
- m.add_constraints(power.sum() >= 120, name="demand")
-
- # Minimize total cost
- m.add_objective(cost.sum())
-
- try:
- status, cond = m.solve(solver_name="gurobi", io_api="direct")
- except gurobipy.GurobiError as exc:
- pytest.skip(f"Gurobi environment unavailable: {exc}")
-
- assert status == "ok"
- # gen1 should provide ~50 (cheap up to 50), gen2 provides rest
- total_power = power.solution.sum().values
- assert np.isclose(total_power, 120, atol=1e-5)
-
-
-class TestIncrementalFormulation:
- """Tests for the incremental (delta) piecewise formulation."""
-
- def test_single_variable_incremental(self) -> None:
- """Test incremental formulation with a single variable."""
+ def test_mixed_uses_sos2(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0, 10, 50, 100], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
+ y = m.add_variables(name="y")
+ # Mixed: slopes 0.5, 0.3, 0.9 (down then up)
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 30, 60, 100], [0, 15, 24, 60]) >= y,
)
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_AUX_SUFFIX}" in m.variables
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
-
- # Check delta variables created
- assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- # 3 segments → 3 delta vars
- delta_var = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
- assert "bp_seg" in delta_var.dims
- assert len(delta_var.coords["bp_seg"]) == 3
-
- # Check filling-order constraint (single vectorized constraint)
- assert f"pwl0{PWL_FILL_SUFFIX}" in m.constraints
-
- # Check link constraint
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
+ def test_method_lp_wrong_convexity_raises(self) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ # Convex function + y <= pw + method="lp" should fail
+ with pytest.raises(ValueError, match="convex"):
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 60]) >= y,
+ method="lp",
+ )
- # No SOS2 or lambda variables
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
+ def test_method_lp_decreasing_breakpoints_raises(self) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ with pytest.raises(ValueError, match="strictly increasing x_points"):
+ m.add_piecewise_constraints(
+ piecewise(x, [100, 50, 0], [60, 10, 0]) <= y,
+ method="lp",
+ )
- def test_two_breakpoints_incremental(self) -> None:
- """Test incremental with only 2 breakpoints (1 segment, no fill constraints)."""
+ def test_auto_inequality_decreasing_breakpoints_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ with pytest.raises(ValueError, match="strictly increasing x_points"):
+ m.add_piecewise_constraints(
+ piecewise(x, [100, 50, 0], [60, 10, 0]) <= y,
+ )
- breakpoints = xr.DataArray([0, 100], dims=["bp"], coords={"bp": [0, 1]})
+ def test_method_lp_equality_raises(self) -> None:
+ m = Model()
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ with pytest.raises(ValueError, match="equality"):
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 40, 60]) == y,
+ method="lp",
+ )
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
- # 1 segment → 1 delta var, no filling constraints
- delta_var = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
- assert len(delta_var.coords["bp_seg"]) == 1
+# ===========================================================================
+# Incremental formulation
+# ===========================================================================
- # Link constraint should exist
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
- def test_dict_incremental(self) -> None:
- """Test incremental formulation with dict of variables."""
+class TestIncremental:
+ def test_creates_delta_vars(self) -> None:
m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
-
- # Both power and cost breakpoints are strictly increasing
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 10, 50]],
- dims=["var", "bp"],
- coords={"var": ["power", "cost"], "bp": [0, 1, 2]},
- )
-
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
m.add_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- dim="bp",
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20, 80]) == y,
method="incremental",
)
-
assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
+ delta = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
+ assert delta.labels.sizes[LP_SEG_DIM] == 3
+ assert f"pwl0{PWL_FILL_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
- def test_non_monotonic_raises(self) -> None:
- """Test that non-monotonic breakpoints raise ValueError for incremental."""
+ def test_nonmonotonic_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- # Not monotonic: 0, 50, 30
- breakpoints = xr.DataArray([0, 50, 30], dims=["bp"], coords={"bp": [0, 1, 2]})
-
+ y = m.add_variables(name="y")
with pytest.raises(ValueError, match="strictly monotonic"):
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 30, 100], [5, 20, 15, 80]) == y,
+ method="incremental",
+ )
- def test_decreasing_monotonic_works(self) -> None:
- """Test that strictly decreasing breakpoints work for incremental."""
+ def test_sos2_nonmonotonic_succeeds(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [100, 50, 10, 0], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 30, 100], [5, 20, 15, 80]) == y,
+ method="sos2",
)
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_DELTA_SUFFIX}" not in m.variables
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
- assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
-
- def test_opposite_directions_in_dict(self) -> None:
- """Test that dict with opposite monotonic directions works."""
+ def test_two_breakpoints_no_fill(self) -> None:
m = Model()
- power = m.add_variables(name="power")
- eff = m.add_variables(name="eff")
-
- # power increasing, efficiency decreasing
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.95, 0.9, 0.8]],
- dims=["var", "bp"],
- coords={"var": ["power", "eff"], "bp": [0, 1, 2]},
- )
-
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
m.add_piecewise_constraints(
- {"power": power, "eff": eff},
- breakpoints,
- dim="bp",
+ piecewise(x, [0, 100], [5, 80]) == y,
method="incremental",
)
+ delta = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
+ assert delta.labels.sizes[LP_SEG_DIM] == 1
+ assert f"pwl0{PWL_X_LINK_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_Y_LINK_SUFFIX}" in m.constraints
- assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
-
- def test_nan_breakpoints_monotonic(self) -> None:
- """Test that trailing NaN breakpoints don't break monotonicity check."""
+ def test_creates_binary_indicator_vars(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0, 10, 100, np.nan], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20, 80]) == y,
+ method="incremental",
)
+ assert f"pwl0{PWL_INC_BINARY_SUFFIX}" in m.variables
+ binary = m.variables[f"pwl0{PWL_INC_BINARY_SUFFIX}"]
+ assert binary.labels.sizes[LP_SEG_DIM] == 3
+ assert f"pwl0{PWL_INC_LINK_SUFFIX}" in m.constraints
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="auto")
- assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
-
- def test_auto_selects_incremental(self) -> None:
- """Test method='auto' selects incremental for monotonic breakpoints."""
+ def test_creates_order_constraints(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0, 10, 50, 100], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20, 80]) == y,
+ method="incremental",
)
+ assert f"pwl0{PWL_INC_ORDER_SUFFIX}" in m.constraints
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="auto")
-
- # Should use incremental (delta vars, no lambda)
- assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
-
- def test_auto_selects_sos2(self) -> None:
- """Test method='auto' falls back to sos2 for non-monotonic breakpoints."""
+ def test_two_breakpoints_no_order_constraint(self) -> None:
+ """With only one segment, there's no order constraint needed."""
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 100], [5, 80]) == y,
+ method="incremental",
+ )
+ assert f"pwl0{PWL_INC_BINARY_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_INC_LINK_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_INC_ORDER_SUFFIX}" not in m.constraints
- # Non-monotonic across the full array (dict case would have linking dimension)
- # For single expr, breakpoints along dim are [0, 50, 30]
- breakpoints = xr.DataArray([0, 50, 30], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="auto")
-
- # Should use sos2 (lambda vars, no delta)
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_DELTA_SUFFIX}" not in m.variables
-
- def test_invalid_method_raises(self) -> None:
- """Test that an invalid method raises ValueError."""
+ def test_decreasing_monotonic(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- with pytest.raises(ValueError, match="method must be"):
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="invalid") # type: ignore[arg-type]
-
- def test_incremental_with_coords(self) -> None:
- """Test incremental formulation with extra coordinates."""
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 30, 80]],
- dims=["generator", "bp"],
- coords={"generator": generators, "bp": [0, 1, 2]},
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [100, 50, 10, 0], [80, 20, 2, 5]) == y,
+ method="incremental",
)
+ assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
-
- delta_var = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
- assert "generator" in delta_var.dims
- assert "bp_seg" in delta_var.dims
-
-
-# ===== Disjunctive Piecewise Linear Constraint Tests =====
+# ===========================================================================
+# Disjunctive piecewise
+# ===========================================================================
-class TestDisjunctiveBasicSingleVariable:
- """Tests for single variable disjunctive piecewise constraints."""
- def test_two_equal_segments(self) -> None:
- """Test with two equal-length segments."""
+class TestDisjunctive:
+ def test_equality_creates_binary(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, segments([[0, 10], [50, 100]]), segments([[0, 5], [20, 80]]))
+ == y,
)
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- # Binary variables created
assert f"pwl0{PWL_BINARY_SUFFIX}" in m.variables
- # Selection constraint
assert f"pwl0{PWL_SELECT_SUFFIX}" in m.constraints
- # Lambda variables
assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- # Convexity constraint
assert f"pwl0{PWL_CONVEX_SUFFIX}" in m.constraints
- # Link constraint
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
- # SOS2 on lambda
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert lambda_var.attrs.get("sos_type") == 2
- assert lambda_var.attrs.get("sos_dim") == "breakpoint"
+ lam = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
+ assert lam.attrs.get("sos_type") == 2
- def test_uneven_segments_with_nan(self) -> None:
- """Test segments of different lengths with NaN padding."""
+ def test_inequality_creates_aux(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 5, 10], [50, 100, np.nan]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1, 2]},
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- # Lambda for NaN breakpoint should be masked
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "segment" in lambda_var.dims
- assert "breakpoint" in lambda_var.dims
-
- def test_single_breakpoint_segment(self) -> None:
- """Test with a segment that has only one valid breakpoint (point segment)."""
- m = Model()
- x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 10], [42, np.nan]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, segments([[0, 10], [50, 100]]), segments([[0, 5], [20, 80]]))
+ >= y,
)
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
+ assert f"pwl0{PWL_AUX_SUFFIX}" in m.variables
assert f"pwl0{PWL_BINARY_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- def test_single_variable_with_coords(self) -> None:
- """Test coordinates are preserved on binary and lambda variables."""
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
-
- breakpoints = xr.DataArray(
- [
- [[0, 10], [50, 100]],
- [[0, 20], [60, 90]],
- ],
- dims=["generator", "segment", "breakpoint"],
- coords={
- "generator": generators,
- "segment": [0, 1],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
-
- # Both should preserve generator coordinates
- assert list(binary_var.coords["generator"].values) == ["gen1", "gen2"]
- assert list(lambda_var.coords["generator"].values) == ["gen1", "gen2"]
-
- # Binary has (generator, segment), lambda has (generator, segment, breakpoint)
- assert set(binary_var.dims) == {"generator", "segment"}
- assert set(lambda_var.dims) == {"generator", "segment", "breakpoint"}
-
- def test_return_value_is_selection_constraint(self) -> None:
- """Test the return value is the selection constraint."""
+ def test_method_lp_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ with pytest.raises(ValueError, match="disjunctive"):
+ m.add_piecewise_constraints(
+ piecewise(
+ x, segments([[0, 10], [50, 100]]), segments([[0, 5], [20, 80]])
+ )
+ >= y,
+ method="lp",
+ )
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
-
- result = m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- # Return value should be the selection constraint
- assert result is not None
- select_name = f"pwl0{PWL_SELECT_SUFFIX}"
- assert select_name in m.constraints
-
-
-class TestDisjunctiveDictOfVariables:
- """Tests for dict of variables with disjunctive constraints."""
-
- def test_dict_with_two_segments(self) -> None:
- """Test dict of variables with two segments."""
- m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[[0, 50], [0, 10]], [[80, 100], [20, 50]]],
- dims=["segment", "var", "breakpoint"],
- coords={
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- )
-
- assert f"pwl0{PWL_BINARY_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
-
- def test_auto_detect_linking_dim_with_segment_dim(self) -> None:
- """Test auto-detection of linking dimension when segment_dim is also present."""
- m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[[0, 50], [0, 10]], [[80, 100], [20, 50]]],
- dims=["segment", "var", "breakpoint"],
- coords={
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1],
- },
- )
-
- # Should auto-detect linking dim="var" (not segment)
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- )
-
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
-
-
-class TestDisjunctiveExtraDimensions:
- """Tests for extra dimensions on disjunctive constraints."""
-
- def test_extra_generator_dimension(self) -> None:
- """Test with an extra generator dimension."""
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
-
- breakpoints = xr.DataArray(
- [
- [[0, 10], [50, 100]],
- [[0, 20], [60, 90]],
- ],
- dims=["generator", "segment", "breakpoint"],
- coords={
- "generator": generators,
- "segment": [0, 1],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- # Binary and lambda should have generator dimension
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in binary_var.dims
- assert "generator" in lambda_var.dims
- assert "segment" in binary_var.dims
- assert "segment" in lambda_var.dims
-
- def test_multi_dimensional_generator_time(self) -> None:
- """Test variable with generator + time coords, verify all dims present."""
+ def test_method_incremental_raises(self) -> None:
m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- timesteps = pd.Index([0, 1, 2], name="time")
- x = m.add_variables(coords=[generators, timesteps], name="x")
-
- rng = np.random.default_rng(42)
- bp_data = rng.random((2, 3, 2, 2)) * 100
- # Sort breakpoints within each segment
- bp_data = np.sort(bp_data, axis=-1)
-
- breakpoints = xr.DataArray(
- bp_data,
- dims=["generator", "time", "segment", "breakpoint"],
- coords={
- "generator": generators,
- "time": timesteps,
- "segment": [0, 1],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
-
- # All extra dims should be present
- for dim_name in ["generator", "time", "segment"]:
- assert dim_name in binary_var.dims
- for dim_name in ["generator", "time", "segment", "breakpoint"]:
- assert dim_name in lambda_var.dims
+ x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ with pytest.raises(ValueError, match="disjunctive"):
+ m.add_piecewise_constraints(
+ piecewise(
+ x, segments([[0, 10], [50, 100]]), segments([[0, 5], [20, 80]])
+ )
+ == y,
+ method="incremental",
+ )
- def test_dict_with_additional_coords(self) -> None:
- """Test dict of variables with extra generator dim, binary/lambda exclude linking dimension."""
+ def test_multi_dimensional(self) -> None:
m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- power = m.add_variables(coords=[generators], name="power")
- cost = m.add_variables(coords=[generators], name="cost")
-
- breakpoints = xr.DataArray(
- [
- [[[0, 50], [0, 10]], [[80, 100], [20, 30]]],
- [[[0, 40], [0, 8]], [[70, 90], [15, 25]]],
- ],
- dims=["generator", "segment", "var", "breakpoint"],
- coords={
- "generator": generators,
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
+ gens = pd.Index(["gen_a", "gen_b"], name="generator")
+ x = m.add_variables(coords=[gens], name="x")
+ y = m.add_variables(coords=[gens], name="y")
+ m.add_piecewise_constraints(
+ piecewise(
+ x,
+ segments(
+ {"gen_a": [[0, 10], [50, 100]], "gen_b": [[0, 20], [60, 90]]},
+ dim="generator",
+ ),
+ segments(
+ {"gen_a": [[0, 5], [20, 80]], "gen_b": [[0, 8], [30, 70]]},
+ dim="generator",
+ ),
+ )
+ == y,
)
+ binary = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
+ lam = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
+ assert "generator" in binary.dims
+ assert "generator" in lam.dims
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
-
- # linking dimension (var) should NOT be in binary or lambda dims
- assert "var" not in binary_var.dims
- assert "var" not in lambda_var.dims
- # generator should be present
- assert "generator" in binary_var.dims
- assert "generator" in lambda_var.dims
+# ===========================================================================
+# Validation
+# ===========================================================================
-class TestDisjunctiveMasking:
- """Tests for masking functionality in disjunctive constraints."""
-
- def test_nan_masking_labels(self) -> None:
- """Test NaN breakpoints mask lambda labels to -1."""
+class TestValidation:
+ def test_non_descriptor_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ with pytest.raises(TypeError, match="PiecewiseConstraintDescriptor"):
+ m.add_piecewise_constraints(x) # type: ignore
- breakpoints = xr.DataArray(
- [[0, 5, 10], [50, 100, np.nan]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1, 2]},
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- # Segment 0: all 3 breakpoints valid (labels != -1)
- seg0_labels = lambda_var.labels.sel(segment=0)
- assert (seg0_labels != -1).all()
- # Segment 1: breakpoint 2 is NaN → masked (label == -1)
- seg1_bp2_label = lambda_var.labels.sel(segment=1, breakpoint=2)
- assert int(seg1_bp2_label) == -1
-
- # Binary: both segments have at least one valid breakpoint
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- assert (binary_var.labels != -1).all()
-
- def test_nan_masking_partial_segment(self) -> None:
- """Test partial NaN — lambda masked but segment binary still valid."""
+ def test_invalid_method_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ with pytest.raises(ValueError, match="method must be"):
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50], [5, 2, 20]) == y,
+ method="invalid", # type: ignore
+ )
- # Segment 0 has 3 valid breakpoints, segment 1 has 2 valid + 1 NaN
- breakpoints = xr.DataArray(
- [[0, 5, 10], [50, 100, np.nan]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1, 2]},
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- # Segment 1 binary is still valid (has 2 valid breakpoints)
- assert int(binary_var.labels.sel(segment=1)) != -1
+# ===========================================================================
+# Name generation
+# ===========================================================================
- # Segment 1 valid lambdas (breakpoint 0, 1) should be valid
- assert int(lambda_var.labels.sel(segment=1, breakpoint=0)) != -1
- assert int(lambda_var.labels.sel(segment=1, breakpoint=1)) != -1
- def test_explicit_mask(self) -> None:
- """Test user-provided mask disables specific entries."""
+class TestNameGeneration:
+ def test_auto_name(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ z = m.add_variables(name="z")
+ m.add_piecewise_constraints(piecewise(x, [0, 10, 50], [5, 2, 20]) == y)
+ m.add_piecewise_constraints(piecewise(x, [0, 20, 80], [10, 15, 50]) == z)
+ assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
+ assert f"pwl1{PWL_DELTA_SUFFIX}" in m.variables
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
-
- # Mask out entire segment 1
- mask = xr.DataArray(
- [[True, True], [False, False]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints, mask=mask)
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
-
- # Segment 0 lambdas should be valid
- assert (lambda_var.labels.sel(segment=0) != -1).all()
- # Segment 1 lambdas should be masked
- assert (lambda_var.labels.sel(segment=1) == -1).all()
- # Segment 1 binary should be masked (no valid breakpoints)
- assert int(binary_var.labels.sel(segment=1)) == -1
-
- def test_skip_nan_check(self) -> None:
- """Test skip_nan_check=True treats all breakpoints as valid."""
+ def test_custom_name(self) -> None:
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 5, 10], [50, 100, np.nan]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1, 2]},
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50], [5, 2, 20]) == y,
+ name="my_pwl",
)
+ assert f"my_pwl{PWL_DELTA_SUFFIX}" in m.variables
+ assert f"my_pwl{PWL_X_LINK_SUFFIX}" in m.constraints
+ assert f"my_pwl{PWL_Y_LINK_SUFFIX}" in m.constraints
- m.add_disjunctive_piecewise_constraints(x, breakpoints, skip_nan_check=True)
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- # All labels should be valid (no masking)
- assert (lambda_var.labels != -1).all()
+# ===========================================================================
+# Broadcasting
+# ===========================================================================
- def test_dict_mask_without_linking_dim(self) -> None:
- """Test dict case accepts mask that omits linking dimension but is broadcastable."""
- m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[[0, 50], [0, 10]], [[80, 100], [20, 30]]],
- dims=["segment", "var", "breakpoint"],
- coords={
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1],
- },
- )
-
- # Mask over segment/breakpoint only; should broadcast across var
- mask = xr.DataArray(
- [[True, True], [False, False]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- mask=mask,
+class TestBroadcasting:
+ def test_broadcast_over_extra_dims(self) -> None:
+ m = Model()
+ gens = pd.Index(["gen_a", "gen_b"], name="generator")
+ times = pd.Index([0, 1, 2], name="time")
+ x = m.add_variables(coords=[gens, times], name="x")
+ y = m.add_variables(coords=[gens, times], name="y")
+ # Points only have generator dim → broadcast over time
+ m.add_piecewise_constraints(
+ piecewise(
+ x,
+ breakpoints(
+ {"gen_a": [0, 10, 50], "gen_b": [0, 20, 80]}, dim="generator"
+ ),
+ breakpoints(
+ {"gen_a": [0, 5, 30], "gen_b": [0, 8, 50]}, dim="generator"
+ ),
+ )
+ == y,
)
+ delta = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
+ assert "generator" in delta.dims
+ assert "time" in delta.dims
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert (lambda_var.labels.sel(segment=0) != -1).all()
- assert (lambda_var.labels.sel(segment=1) == -1).all()
+# ===========================================================================
+# NaN masking
+# ===========================================================================
-class TestDisjunctiveValidationErrors:
- """Tests for validation errors in disjunctive constraints."""
-
- def test_missing_dim(self) -> None:
- """Test error when breakpoints don't have dim."""
- m = Model()
- x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "wrong"],
- coords={"segment": [0, 1], "wrong": [0, 1]},
- )
-
- with pytest.raises(ValueError, match="must have dimension"):
- m.add_disjunctive_piecewise_constraints(x, breakpoints, dim="breakpoint")
- def test_missing_segment_dim(self) -> None:
- """Test error when breakpoints don't have segment_dim."""
+class TestNaNMasking:
+ def test_nan_masks_lambda_labels(self) -> None:
+ """NaN in y_points produces masked labels in SOS2 formulation."""
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0, 10, 50],
- dims=["breakpoint"],
- coords={"breakpoint": [0, 1, 2]},
+ y = m.add_variables(name="y")
+ x_pts = xr.DataArray([0, 10, 50, np.nan], dims=[BREAKPOINT_DIM])
+ y_pts = xr.DataArray([0, 5, 20, np.nan], dims=[BREAKPOINT_DIM])
+ m.add_piecewise_constraints(
+ piecewise(x, x_pts, y_pts) == y,
+ method="sos2",
)
+ lam = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
+ # First 3 should be valid, last masked
+ assert (lam.labels.isel({BREAKPOINT_DIM: slice(None, 3)}) != -1).all()
+ assert int(lam.labels.isel({BREAKPOINT_DIM: 3})) == -1
- with pytest.raises(ValueError, match="must have dimension"):
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- def test_same_dim_segment_dim(self) -> None:
- """Test error when dim == segment_dim."""
+ def test_skip_nan_check_with_nan_raises(self) -> None:
+ """skip_nan_check=True with NaN breakpoints raises ValueError."""
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
-
- with pytest.raises(ValueError, match="must be different"):
- m.add_disjunctive_piecewise_constraints(
- x, breakpoints, dim="segment", segment_dim="segment"
+ y = m.add_variables(name="y")
+ x_pts = xr.DataArray([0, 10, 50, np.nan], dims=[BREAKPOINT_DIM])
+ y_pts = xr.DataArray([0, 5, 20, np.nan], dims=[BREAKPOINT_DIM])
+ with pytest.raises(ValueError, match="skip_nan_check=True but breakpoints"):
+ m.add_piecewise_constraints(
+ piecewise(x, x_pts, y_pts) == y,
+ method="sos2",
+ skip_nan_check=True,
)
- def test_non_numeric_coords(self) -> None:
- """Test error when dim coordinates are not numeric."""
+ def test_skip_nan_check_without_nan(self) -> None:
+ """skip_nan_check=True without NaN works fine (no mask computed)."""
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": ["a", "b"]},
- )
-
- with pytest.raises(ValueError, match="numeric coordinates"):
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- def test_invalid_expr(self) -> None:
- """Test error when expr is invalid type."""
- m = Model()
-
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ y = m.add_variables(name="y")
+ x_pts = xr.DataArray([0, 10, 50, 100], dims=[BREAKPOINT_DIM])
+ y_pts = xr.DataArray([0, 5, 20, 40], dims=[BREAKPOINT_DIM])
+ m.add_piecewise_constraints(
+ piecewise(x, x_pts, y_pts) == y,
+ method="sos2",
+ skip_nan_check=True,
)
+ lam = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
+ assert (lam.labels != -1).all()
- with pytest.raises(
- TypeError, match="must be a Variable, LinearExpression, or dict"
- ):
- m.add_disjunctive_piecewise_constraints("invalid", breakpoints) # type: ignore
-
- def test_expression_support(self) -> None:
- """Test that LinearExpression (x + y) works as input."""
+ def test_sos2_interior_nan_raises(self) -> None:
+ """SOS2 with interior NaN breakpoints raises ValueError."""
m = Model()
x = m.add_variables(name="x")
y = m.add_variables(name="y")
+ x_pts = xr.DataArray([0, np.nan, 50, 100], dims=[BREAKPOINT_DIM])
+ y_pts = xr.DataArray([0, np.nan, 20, 40], dims=[BREAKPOINT_DIM])
+ with pytest.raises(ValueError, match="non-trailing NaN"):
+ m.add_piecewise_constraints(
+ piecewise(x, x_pts, y_pts) == y,
+ method="sos2",
+ )
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
- m.add_disjunctive_piecewise_constraints(x + y, breakpoints)
+# ===========================================================================
+# Convexity detection edge cases
+# ===========================================================================
- assert f"pwl0{PWL_BINARY_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
- def test_no_matching_linking_dim(self) -> None:
- """Test error when no breakpoints dimension matches dict keys."""
+class TestConvexityDetection:
+ def test_linear_uses_lp_both_directions(self) -> None:
+ """Linear function uses LP for both <= and >= inequalities."""
m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[0, 50], [80, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y1 = m.add_variables(name="y1")
+ y2 = m.add_variables(name="y2")
+ # y1 >= f(x) → LP
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 25, 50]) <= y1,
)
-
- with pytest.raises(ValueError, match="Could not auto-detect linking dimension"):
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- )
-
- def test_linking_dim_coords_mismatch(self) -> None:
- """Test error when breakpoint dimension coords don't match dict keys."""
- m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[[0, 50], [0, 10]], [[80, 100], [20, 30]]],
- dims=["segment", "var", "breakpoint"],
- coords={
- "segment": [0, 1],
- "var": ["wrong1", "wrong2"],
- "breakpoint": [0, 1],
- },
+ assert f"pwl0{PWL_LP_SUFFIX}" in m.constraints
+ # y2 <= f(x) → also LP (linear is both convex and concave)
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 25, 50]) >= y2,
)
+ assert f"pwl1{PWL_LP_SUFFIX}" in m.constraints
- with pytest.raises(ValueError, match="Could not auto-detect linking dimension"):
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- )
-
-
-class TestDisjunctiveNameGeneration:
- """Tests for name generation in disjunctive constraints."""
-
- def test_shared_counter_with_continuous(self) -> None:
- """Test that disjunctive and continuous PWL share the counter."""
+ def test_single_segment_uses_lp(self) -> None:
+ """A single segment (2 breakpoints) is linear; uses LP."""
m = Model()
- x = m.add_variables(name="x")
+ x = m.add_variables(lower=0, upper=100, name="x")
y = m.add_variables(name="y")
-
- bp_continuous = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
- m.add_piecewise_constraints(x, bp_continuous, dim="bp")
-
- bp_disjunctive = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 100], [0, 50]) <= y,
)
- m.add_disjunctive_piecewise_constraints(y, bp_disjunctive)
-
- # First is pwl0, second is pwl1
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl1{PWL_BINARY_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_LP_SUFFIX}" in m.constraints
- def test_custom_name(self) -> None:
- """Test custom name for disjunctive constraints."""
+ def test_mixed_convexity_uses_sos2(self) -> None:
+ """Mixed convexity should fall back to SOS2 for inequalities."""
m = Model()
- x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0, 10], [50, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(name="y")
+ # Mixed: slope goes up then down → neither convex nor concave
+ # y <= f(x) → piecewise >= y → sign="<=" internally
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 30, 60, 100], [0, 40, 30, 50]) >= y,
)
+ assert f"pwl0{PWL_AUX_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- m.add_disjunctive_piecewise_constraints(x, breakpoints, name="my_dpwl")
-
- assert f"my_dpwl{PWL_BINARY_SUFFIX}" in m.variables
- assert f"my_dpwl{PWL_SELECT_SUFFIX}" in m.constraints
- assert f"my_dpwl{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"my_dpwl{PWL_CONVEX_SUFFIX}" in m.constraints
- assert f"my_dpwl{PWL_LINK_SUFFIX}" in m.constraints
+# ===========================================================================
+# LP file output
+# ===========================================================================
-class TestDisjunctiveLPFileOutput:
- """Tests for LP file output with disjunctive piecewise constraints."""
- def test_lp_contains_sos2_and_binary(self, tmp_path: Path) -> None:
- """Test LP file contains SOS2 section and binary variables."""
+class TestLPFileOutput:
+ def test_sos2_equality(self, tmp_path: Path) -> None:
m = Model()
- x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [[0.0, 10.0], [50.0, 100.0]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ x = m.add_variables(name="x", lower=0, upper=100)
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0.0, 10.0, 50.0, 100.0], [5.0, 2.0, 20.0, 80.0]) == y,
+ method="sos2",
)
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
- m.add_objective(x)
-
- fn = tmp_path / "dpwl.lp"
+ m.add_objective(y)
+ fn = tmp_path / "pwl_eq.lp"
m.to_file(fn, io_api="lp")
- content = fn.read_text()
-
- # Should contain SOS2 section
- assert "\nsos\n" in content.lower()
- assert "s2" in content.lower()
-
- # Should contain binary section
- assert "binary" in content.lower() or "binaries" in content.lower()
-
+ content = fn.read_text().lower()
+ assert "sos" in content
+ assert "s2" in content
-class TestDisjunctiveMultiBreakpointSegments:
- """Tests for segments with multiple breakpoints (unique to disjunctive formulation)."""
-
- def test_three_breakpoints_per_segment(self) -> None:
- """Test segments with 3 breakpoints each — verify lambda shape."""
+ def test_lp_formulation_no_sos2(self, tmp_path: Path) -> None:
m = Model()
- x = m.add_variables(name="x")
-
- # 2 segments, each with 3 breakpoints
- breakpoints = xr.DataArray(
- [[0, 5, 10], [50, 75, 100]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1, 2]},
+ x = m.add_variables(name="x", lower=0, upper=100)
+ y = m.add_variables(name="y")
+ # Concave: pw >= y uses LP
+ m.add_piecewise_constraints(
+ piecewise(x, [0.0, 50.0, 100.0], [0.0, 40.0, 60.0]) >= y,
)
+ m.add_objective(y)
+ fn = tmp_path / "pwl_lp.lp"
+ m.to_file(fn, io_api="lp")
+ content = fn.read_text().lower()
+ assert "s2" not in content
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- # Lambda should have shape (2 segments, 3 breakpoints)
- assert lambda_var.labels.sizes["segment"] == 2
- assert lambda_var.labels.sizes["breakpoint"] == 3
- # All labels valid (no NaN)
- assert (lambda_var.labels != -1).all()
-
- def test_mixed_segment_lengths_nan_padding(self) -> None:
- """Test one segment with 4 breakpoints, another with 2 (NaN-padded)."""
+ def test_disjunctive_sos2_and_binary(self, tmp_path: Path) -> None:
m = Model()
- x = m.add_variables(name="x")
-
- # Segment 0: 4 valid breakpoints
- # Segment 1: 2 valid breakpoints + 2 NaN
- breakpoints = xr.DataArray(
- [[0, 5, 10, 15], [50, 100, np.nan, np.nan]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1, 2, 3]},
+ x = m.add_variables(name="x", lower=0, upper=100)
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(
+ x,
+ segments([[0.0, 10.0], [50.0, 100.0]]),
+ segments([[0.0, 5.0], [20.0, 80.0]]),
+ )
+ == y,
)
+ m.add_objective(y)
+ fn = tmp_path / "pwl_disj.lp"
+ m.to_file(fn, io_api="lp")
+ content = fn.read_text().lower()
+ assert "s2" in content
+ assert "binary" in content or "binaries" in content
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
-
- # Lambda shape: (2 segments, 4 breakpoints)
- assert lambda_var.labels.sizes["segment"] == 2
- assert lambda_var.labels.sizes["breakpoint"] == 4
-
- # Segment 0: all 4 lambdas valid
- assert (lambda_var.labels.sel(segment=0) != -1).all()
-
- # Segment 1: first 2 valid, last 2 masked
- assert (lambda_var.labels.sel(segment=1, breakpoint=0) != -1).item()
- assert (lambda_var.labels.sel(segment=1, breakpoint=1) != -1).item()
- assert (lambda_var.labels.sel(segment=1, breakpoint=2) == -1).item()
- assert (lambda_var.labels.sel(segment=1, breakpoint=3) == -1).item()
-
- # Both segment binaries valid (both have at least one valid breakpoint)
- assert (binary_var.labels != -1).all()
-
-
-_disjunctive_solvers = get_available_solvers_with_feature(
- SolverFeature.SOS_CONSTRAINTS, available_solvers
-)
+# ===========================================================================
+# Solver integration – SOS2 capable
+# ===========================================================================
-@pytest.mark.skipif(
- len(_disjunctive_solvers) == 0,
- reason="No solver with SOS constraint support installed",
-)
-class TestDisjunctiveSolverIntegration:
- """Integration tests for disjunctive piecewise constraints."""
- @pytest.fixture(params=_disjunctive_solvers)
+@pytest.mark.skipif(len(_sos2_solvers) == 0, reason="No solver with SOS2 support")
+class TestSolverSOS2:
+ @pytest.fixture(params=_sos2_solvers)
def solver_name(self, request: pytest.FixtureRequest) -> str:
return request.param
- def test_minimize_picks_low_segment(self, solver_name: str) -> None:
- """Test minimizing x picks the lower segment."""
- m = Model()
- x = m.add_variables(name="x")
-
- # Two segments: [0, 10] and [50, 100]
- breakpoints = xr.DataArray(
- [[0.0, 10.0], [50.0, 100.0]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
- m.add_objective(x)
-
- status, cond = m.solve(solver_name=solver_name)
-
- assert status == "ok"
- # Should pick x=0 (minimum of low segment)
- assert np.isclose(x.solution.values, 0.0, atol=1e-5)
-
- def test_maximize_picks_high_segment(self, solver_name: str) -> None:
- """Test maximizing x picks the upper segment."""
+ def test_equality_minimize_cost(self, solver_name: str) -> None:
m = Model()
- x = m.add_variables(name="x")
-
- # Two segments: [0, 10] and [50, 100]
- breakpoints = xr.DataArray(
- [[0.0, 10.0], [50.0, 100.0]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
- )
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
- m.add_objective(x, sense="max")
-
- status, cond = m.solve(solver_name=solver_name)
-
- assert status == "ok"
- # Should pick x=100 (maximum of high segment)
- assert np.isclose(x.solution.values, 100.0, atol=1e-5)
-
- def test_dict_case_solver(self, solver_name: str) -> None:
- """Test disjunctive with dict of variables and solver."""
- m = Model()
- power = m.add_variables(name="power")
+ x = m.add_variables(lower=0, upper=100, name="x")
cost = m.add_variables(name="cost")
-
- # Two operating regions:
- # Region 0: power [0,50], cost [0,10]
- # Region 1: power [80,100], cost [20,30]
- breakpoints = xr.DataArray(
- [[[0.0, 50.0], [0.0, 10.0]], [[80.0, 100.0], [20.0, 30.0]]],
- dims=["segment", "var", "breakpoint"],
- coords={
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50]) == cost,
)
-
- # Minimize cost
+ m.add_constraints(x >= 50, name="x_min")
m.add_objective(cost)
-
- status, cond = m.solve(solver_name=solver_name)
-
+ status, _ = m.solve(solver_name=solver_name)
assert status == "ok"
- # Should pick region 0, minimum cost = 0
- assert np.isclose(cost.solution.values, 0.0, atol=1e-5)
- assert np.isclose(power.solution.values, 0.0, atol=1e-5)
+ np.testing.assert_allclose(x.solution.values, 50, atol=1e-4)
+ np.testing.assert_allclose(cost.solution.values, 10, atol=1e-4)
- def test_three_segments_min(self, solver_name: str) -> None:
- """Test 3 segments, minimize picks lowest."""
+ def test_equality_maximize_efficiency(self, solver_name: str) -> None:
m = Model()
- x = m.add_variables(name="x")
-
- # Three segments: [0, 10], [30, 50], [80, 100]
- breakpoints = xr.DataArray(
- [[0.0, 10.0], [30.0, 50.0], [80.0, 100.0]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1, 2], "breakpoint": [0, 1]},
+ power = m.add_variables(lower=0, upper=100, name="power")
+ eff = m.add_variables(name="eff")
+ m.add_piecewise_constraints(
+ piecewise(power, [0, 25, 50, 75, 100], [0.7, 0.85, 0.95, 0.9, 0.8]) == eff,
)
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
- m.add_objective(x)
-
- status, cond = m.solve(solver_name=solver_name)
-
+ m.add_objective(eff, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
assert status == "ok"
- assert np.isclose(x.solution.values, 0.0, atol=1e-5)
+ np.testing.assert_allclose(power.solution.values, 50, atol=1e-4)
+ np.testing.assert_allclose(eff.solution.values, 0.95, atol=1e-4)
- def test_constrained_mid_segment(self, solver_name: str) -> None:
- """Test constraint forcing x into middle of a segment, verify interpolation."""
+ def test_disjunctive_solve(self, solver_name: str) -> None:
m = Model()
x = m.add_variables(name="x")
-
- # Two segments: [0, 10] and [50, 100]
- breakpoints = xr.DataArray(
- [[0.0, 10.0], [50.0, 100.0]],
- dims=["segment", "breakpoint"],
- coords={"segment": [0, 1], "breakpoint": [0, 1]},
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(
+ x,
+ segments([[0.0, 10.0], [50.0, 100.0]]),
+ segments([[0.0, 5.0], [20.0, 80.0]]),
+ )
+ == y,
)
-
- m.add_disjunctive_piecewise_constraints(x, breakpoints)
-
- # Force x >= 60, so must be in segment 1
- m.add_constraints(x >= 60, name="x_lower")
- m.add_objective(x)
-
- status, cond = m.solve(solver_name=solver_name)
-
+ m.add_constraints(x >= 60, name="x_min")
+ m.add_objective(y)
+ status, _ = m.solve(solver_name=solver_name)
assert status == "ok"
- # Minimum in segment 1 with x >= 60 → x = 60
- assert np.isclose(x.solution.values, 60.0, atol=1e-5)
-
- def test_multi_breakpoint_segment_solver(self, solver_name: str) -> None:
- """Test segment with 3 breakpoints, verify correct interpolated value."""
- m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
+ # x=60 on second segment: y = 20 + (80-20)/(100-50)*(60-50) = 32
+ np.testing.assert_allclose(float(x.solution.values), 60, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 32, atol=1e-4)
- # Both segments have 3 breakpoints (no NaN padding needed)
- # Segment 0: 3-breakpoint curve (power [0,50,100], cost [0,10,50])
- # Segment 1: 3-breakpoint curve (power [200,250,300], cost [80,90,100])
- breakpoints = xr.DataArray(
- [
- [[0.0, 50.0, 100.0], [0.0, 10.0, 50.0]],
- [[200.0, 250.0, 300.0], [80.0, 90.0, 100.0]],
- ],
- dims=["segment", "var", "breakpoint"],
- coords={
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1, 2],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- )
-
- # Constraint: power >= 50, minimize cost → picks segment 0, power=50, cost=10
- m.add_constraints(power >= 50, name="power_min")
- m.add_constraints(power <= 150, name="power_max")
- m.add_objective(cost)
- status, cond = m.solve(solver_name=solver_name)
+# ===========================================================================
+# Solver integration – LP formulation (any solver)
+# ===========================================================================
- assert status == "ok"
- assert np.isclose(power.solution.values, 50.0, atol=1e-5)
- assert np.isclose(cost.solution.values, 10.0, atol=1e-5)
-
- def test_multi_generator_solver(self, solver_name: str) -> None:
- """Test multiple generators with different disjunctive segments."""
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- power = m.add_variables(lower=0, coords=[generators], name="power")
- cost = m.add_variables(coords=[generators], name="cost")
-
- # gen1: two operating regions
- # Region 0: power [0,50], cost [0,15]
- # Region 1: power [80,100], cost [30,50]
- # gen2: two operating regions
- # Region 0: power [0,60], cost [0,10]
- # Region 1: power [70,100], cost [12,40]
- breakpoints = xr.DataArray(
- [
- [[[0.0, 50.0], [0.0, 15.0]], [[80.0, 100.0], [30.0, 50.0]]],
- [[[0.0, 60.0], [0.0, 10.0]], [[70.0, 100.0], [12.0, 40.0]]],
- ],
- dims=["generator", "segment", "var", "breakpoint"],
- coords={
- "generator": generators,
- "segment": [0, 1],
- "var": ["power", "cost"],
- "breakpoint": [0, 1],
- },
- )
-
- m.add_disjunctive_piecewise_constraints(
- {"power": power, "cost": cost},
- breakpoints,
- )
-
- # Total power demand >= 100
- m.add_constraints(power.sum() >= 100, name="demand")
- m.add_objective(cost.sum())
-
- status, cond = m.solve(solver_name=solver_name)
-
- assert status == "ok"
- total_power = power.solution.sum().values
- assert total_power >= 100 - 1e-5
-
-
-_incremental_solvers = [s for s in ["gurobi", "highs"] if s in available_solvers]
-
-
-@pytest.mark.skipif(
- len(_incremental_solvers) == 0,
- reason="No supported solver (gurobi/highs) installed",
-)
-class TestIncrementalSolverIntegrationMultiSolver:
- """Integration tests for incremental formulation across solvers."""
- @pytest.fixture(params=_incremental_solvers)
+@pytest.mark.skipif(len(_any_solvers) == 0, reason="No solver available")
+class TestSolverLP:
+ @pytest.fixture(params=_any_solvers)
def solver_name(self, request: pytest.FixtureRequest) -> str:
return request.param
- def test_solve_incremental_single(self, solver_name: str) -> None:
+ def test_concave_le(self, solver_name: str) -> None:
+ """Y <= concave f(x), maximize y"""
m = Model()
x = m.add_variables(lower=0, upper=100, name="x")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 10, 50]],
- dims=["var", "bp"],
- coords={"var": ["x", "cost"], "bp": [0, 1, 2]},
- )
-
+ y = m.add_variables(name="y")
+ # Concave: [0,0],[50,40],[100,60]
m.add_piecewise_constraints(
- {"x": x, "cost": cost},
- breakpoints,
- dim="bp",
- method="incremental",
+ piecewise(x, [0, 50, 100], [0, 40, 60]) >= y,
)
-
- m.add_constraints(x >= 50, name="x_min")
- m.add_objective(cost)
-
- status, cond = m.solve(solver_name=solver_name)
-
+ m.add_constraints(x <= 75, name="x_max")
+ m.add_objective(y, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
assert status == "ok"
- assert np.isclose(x.solution.values, 50, atol=1e-5)
- assert np.isclose(cost.solution.values, 10, atol=1e-5)
-
-
-class TestIncrementalDecreasingBreakpointsSolver:
- """Solver test for incremental formulation with decreasing breakpoints."""
-
- @pytest.fixture(params=_incremental_solvers)
- def solver_name(self, request: pytest.FixtureRequest) -> str:
- return request.param
+ # At x=75: y = 40 + 0.4*(75-50) = 50
+ np.testing.assert_allclose(float(x.solution.values), 75, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 50, atol=1e-4)
- def test_decreasing_breakpoints_solver(self, solver_name: str) -> None:
+ def test_convex_ge(self, solver_name: str) -> None:
+ """Y >= convex f(x), minimize y"""
m = Model()
x = m.add_variables(lower=0, upper=100, name="x")
- cost = m.add_variables(name="cost")
-
- breakpoints = xr.DataArray(
- [[100, 50, 0], [50, 10, 0]],
- dims=["var", "bp"],
- coords={"var": ["x", "cost"], "bp": [0, 1, 2]},
- )
-
+ y = m.add_variables(name="y")
+ # Convex: [0,0],[50,10],[100,60]
m.add_piecewise_constraints(
- {"x": x, "cost": cost},
- breakpoints,
- dim="bp",
- method="incremental",
+ piecewise(x, [0, 50, 100], [0, 10, 60]) <= y,
)
-
- m.add_constraints(x >= 50, name="x_min")
- m.add_objective(cost)
-
- status, cond = m.solve(solver_name=solver_name)
-
+ m.add_constraints(x >= 25, name="x_min")
+ m.add_objective(y)
+ status, _ = m.solve(solver_name=solver_name)
assert status == "ok"
- assert np.isclose(x.solution.values, 50, atol=1e-5)
- assert np.isclose(cost.solution.values, 10, atol=1e-5)
-
-
-class TestIncrementalNonMonotonicDictRaises:
- """Test that non-monotonic breakpoints in a dict raise ValueError."""
-
- def test_non_monotonic_in_dict_raises(self) -> None:
- m = Model()
- x = m.add_variables(name="x")
- y = m.add_variables(name="y")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 30, 10]],
- dims=["var", "bp"],
- coords={"var": ["x", "y"], "bp": [0, 1, 2]},
- )
-
- with pytest.raises(ValueError, match="strictly monotonic"):
- m.add_piecewise_constraints(
- {"x": x, "y": y},
- breakpoints,
- dim="bp",
- method="incremental",
+ # At x=25: y = 0.2*25 = 5
+ np.testing.assert_allclose(float(x.solution.values), 25, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 5, atol=1e-4)
+
+ def test_slopes_equivalence(self, solver_name: str) -> None:
+ """Same model with y_points vs slopes produces identical solutions."""
+ # Model 1: direct y_points
+ m1 = Model()
+ x1 = m1.add_variables(lower=0, upper=100, name="x")
+ y1 = m1.add_variables(name="y")
+ m1.add_piecewise_constraints(
+ piecewise(x1, [0, 50, 100], [0, 40, 60]) >= y1,
+ )
+ m1.add_constraints(x1 <= 75, name="x_max")
+ m1.add_objective(y1, sense="max")
+ s1, _ = m1.solve(solver_name=solver_name)
+
+ # Model 2: slopes
+ m2 = Model()
+ x2 = m2.add_variables(lower=0, upper=100, name="x")
+ y2 = m2.add_variables(name="y")
+ m2.add_piecewise_constraints(
+ piecewise(
+ x2,
+ [0, 50, 100],
+ breakpoints(slopes=[0.8, 0.4], x_points=[0, 50, 100], y0=0),
)
+ >= y2,
+ )
+ m2.add_constraints(x2 <= 75, name="x_max")
+ m2.add_objective(y2, sense="max")
+ s2, _ = m2.solve(solver_name=solver_name)
-
-class TestAdditionalEdgeCases:
- """Additional edge case tests identified in review."""
-
- def test_nan_breakpoints_delta_mask(self) -> None:
- """Verify delta mask correctly masks segments adjacent to trailing NaN breakpoints."""
- m = Model()
- x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0, 10, np.nan, np.nan], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
+ assert s1 == "ok"
+ assert s2 == "ok"
+ np.testing.assert_allclose(
+ float(y1.solution.values), float(y2.solution.values), atol=1e-4
)
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
- delta_var = m.variables[f"pwl0{PWL_DELTA_SUFFIX}"]
- assert delta_var.labels.sel(bp_seg=0).values != -1
- assert delta_var.labels.sel(bp_seg=1).values == -1
- assert delta_var.labels.sel(bp_seg=2).values == -1
+class TestLPDomainConstraints:
+ """Tests for LP domain bound constraints."""
- def test_dict_with_linear_expressions(self) -> None:
- """Test _build_stacked_expr with LinearExpression values (not just Variable)."""
+ def test_lp_domain_constraints_created(self) -> None:
+ """LP method creates domain bound constraints."""
m = Model()
x = m.add_variables(name="x")
y = m.add_variables(name="y")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0, 10, 50]],
- dims=["var", "bp"],
- coords={"var": ["expr_a", "expr_b"], "bp": [0, 1, 2]},
- )
-
+ # Concave: slopes decreasing → y <= pw uses LP
m.add_piecewise_constraints(
- {"expr_a": 2 * x, "expr_b": 3 * y},
- breakpoints,
- dim="bp",
+ piecewise(x, [0, 50, 100], [0, 40, 60]) >= y,
)
+ assert f"pwl0{PWL_LP_DOMAIN_SUFFIX}_lo" in m.constraints
+ assert f"pwl0{PWL_LP_DOMAIN_SUFFIX}_hi" in m.constraints
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
-
- def test_pwl_counter_increments(self) -> None:
- """Test that _pwlCounter increments and produces unique names."""
+ def test_lp_domain_constraints_multidim(self) -> None:
+ """Domain constraints have entity dimension for per-entity breakpoints."""
m = Model()
- x = m.add_variables(name="x")
- y = m.add_variables(name="y")
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- m.add_piecewise_constraints(x, breakpoints, dim="bp")
- assert m._pwlCounter == 1
-
- m.add_piecewise_constraints(y, breakpoints, dim="bp")
- assert m._pwlCounter == 2
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl1{PWL_LAMBDA_SUFFIX}" in m.variables
-
- def test_auto_with_mixed_monotonicity_dict(self) -> None:
- """Test method='auto' with opposite-direction slices in dict."""
- m = Model()
- power = m.add_variables(name="power")
- eff = m.add_variables(name="eff")
-
- breakpoints = xr.DataArray(
- [[0, 50, 100], [0.95, 0.9, 0.8]],
- dims=["var", "bp"],
- coords={"var": ["power", "eff"], "bp": [0, 1, 2]},
- )
-
+ x = m.add_variables(coords=[pd.Index(["a", "b"], name="entity")], name="x")
+ y = m.add_variables(coords=[pd.Index(["a", "b"], name="entity")], name="y")
+ x_pts = breakpoints({"a": [0, 50, 100], "b": [10, 60, 110]}, dim="entity")
+ y_pts = breakpoints({"a": [0, 40, 60], "b": [5, 35, 55]}, dim="entity")
m.add_piecewise_constraints(
- {"power": power, "eff": eff},
- breakpoints,
- dim="bp",
- method="auto",
+ piecewise(x, x_pts, y_pts) >= y,
)
+ lo_name = f"pwl0{PWL_LP_DOMAIN_SUFFIX}_lo"
+ hi_name = f"pwl0{PWL_LP_DOMAIN_SUFFIX}_hi"
+ assert lo_name in m.constraints
+ assert hi_name in m.constraints
+ # Domain constraints should have the entity dimension
+ assert "entity" in m.constraints[lo_name].labels.dims
+ assert "entity" in m.constraints[hi_name].labels.dims
- assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" not in m.variables
- def test_custom_segment_dim(self) -> None:
- """Test disjunctive with custom segment_dim name."""
- m = Model()
- x = m.add_variables(name="x")
+# ===========================================================================
+# Active parameter (commitment binary)
+# ===========================================================================
- breakpoints = xr.DataArray(
- [[0.0, 10.0], [50.0, 100.0]],
- dims=["zone", "breakpoint"],
- coords={"zone": [0, 1], "breakpoint": [0, 1]},
- )
- m.add_disjunctive_piecewise_constraints(x, breakpoints, segment_dim="zone")
+class TestActiveParameter:
+ """Tests for the ``active`` parameter in piecewise constraints."""
- assert f"pwl0{PWL_BINARY_SUFFIX}" in m.variables
- assert f"pwl0{PWL_SELECT_SUFFIX}" in m.constraints
-
- def test_sos2_return_value_is_convexity_constraint(self) -> None:
- """Test that add_piecewise_constraints (SOS2) returns the convexity constraint."""
+ def test_incremental_creates_active_bound(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50, 100], [5, 2, 20, 80], active=u) == y,
+ method="incremental",
+ )
+ assert f"pwl0{PWL_ACTIVE_BOUND_SUFFIX}" in m.constraints
+ assert f"pwl0{PWL_DELTA_SUFFIX}" in m.variables
- breakpoints = xr.DataArray([0, 10, 50], dims=["bp"], coords={"bp": [0, 1, 2]})
-
- result = m.add_piecewise_constraints(x, breakpoints, dim="bp")
- assert result.name == f"pwl0{PWL_CONVEX_SUFFIX}"
-
- def test_incremental_lp_no_sos2(self, tmp_path: Path) -> None:
- """Test that incremental formulation LP file has no SOS2 section."""
+ def test_active_none_is_default(self) -> None:
+ """Without active, formulation is identical to before."""
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0.0, 10.0, 50.0], dims=["bp"], coords={"bp": [0, 1, 2]}
+ y = m.add_variables(name="y")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 10, 50], [0, 5, 30]) == y,
+ method="incremental",
)
+ assert f"pwl0{PWL_ACTIVE_BOUND_SUFFIX}" not in m.constraints
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
- m.add_objective(x)
-
- fn = tmp_path / "inc.lp"
- m.to_file(fn, io_api="lp")
- content = fn.read_text()
-
- assert "\nsos\n" not in content.lower()
- assert "s2" not in content.lower()
-
- def test_two_breakpoints_no_fill_constraint(self) -> None:
- """Test 2-breakpoint incremental produces no fill constraint."""
+ def test_active_with_lp_method_raises(self) -> None:
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ with pytest.raises(ValueError, match="not supported with method='lp'"):
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 40, 60], active=u) >= y,
+ method="lp",
+ )
- breakpoints = xr.DataArray([0, 100], dims=["bp"], coords={"bp": [0, 1]})
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
-
- assert f"pwl0{PWL_FILL_SUFFIX}" not in m.constraints
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
-
- def test_non_trailing_nan_incremental_raises(self) -> None:
- """Non-trailing NaN breakpoints raise ValueError with method='incremental'."""
+ def test_active_with_auto_lp_raises(self) -> None:
+ """Auto selects LP for concave >=, but active is incompatible."""
m = Model()
x = m.add_variables(name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ with pytest.raises(ValueError, match="not supported with method='lp'"):
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 40, 60], active=u) >= y,
+ )
- breakpoints = xr.DataArray(
- [0, np.nan, 50, 100], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
- )
-
- with pytest.raises(ValueError, match="non-trailing NaN"):
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="incremental")
-
- def test_non_trailing_nan_incremental_dict_raises(self) -> None:
- """Dict case with one variable having non-trailing NaN raises."""
+ def test_incremental_inequality_with_active(self) -> None:
+ """Inequality + active creates aux variable and active bound."""
m = Model()
x = m.add_variables(name="x")
y = m.add_variables(name="y")
-
- breakpoints = xr.DataArray(
- [[0, 50, np.nan, 100], [0, 10, 50, 80]],
- dims=["var", "bp"],
- coords={"var": ["x", "y"], "bp": [0, 1, 2, 3]},
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=u) >= y,
+ method="incremental",
)
+ assert f"pwl0{PWL_AUX_SUFFIX}" in m.variables
+ assert f"pwl0{PWL_ACTIVE_BOUND_SUFFIX}" in m.constraints
+ assert "pwl0_ineq" in m.constraints
- with pytest.raises(ValueError, match="non-trailing NaN"):
- m.add_piecewise_constraints(
- {"x": x, "y": y},
- breakpoints,
- dim="bp",
- method="incremental",
- )
-
- def test_non_trailing_nan_falls_back_to_sos2(self) -> None:
- """method='auto' falls back to SOS2 for non-trailing NaN."""
+ def test_active_with_linear_expression(self) -> None:
+ """Active can be a LinearExpression, not just a Variable."""
m = Model()
x = m.add_variables(name="x")
-
- breakpoints = xr.DataArray(
- [0, np.nan, 50, 100], dims=["bp"], coords={"bp": [0, 1, 2, 3]}
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=1 * u) == y,
+ method="incremental",
)
+ assert f"pwl0{PWL_ACTIVE_BOUND_SUFFIX}" in m.constraints
- m.add_piecewise_constraints(x, breakpoints, dim="bp", method="auto")
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
- assert f"pwl0{PWL_DELTA_SUFFIX}" not in m.variables
+# ===========================================================================
+# Solver integration – active parameter
+# ===========================================================================
-class TestBreakpointsFactory:
- def test_positional_list(self) -> None:
- bp = breakpoints([0, 50, 100])
- assert bp.dims == ("breakpoint",)
- assert list(bp.values) == [0.0, 50.0, 100.0]
- assert list(bp.coords["breakpoint"].values) == [0, 1, 2]
-
- def test_positional_dict(self) -> None:
- bp = breakpoints({"gen1": [0, 50, 100], "gen2": [0, 30]}, dim="generator")
- assert set(bp.dims) == {"generator", "breakpoint"}
- assert bp.sizes["generator"] == 2
- assert bp.sizes["breakpoint"] == 3
- assert np.isnan(bp.sel(generator="gen2", breakpoint=2))
-
- def test_positional_dict_without_dim_raises(self) -> None:
- with pytest.raises(ValueError, match="'dim' is required"):
- breakpoints({"gen1": [0, 50], "gen2": [0, 30]})
+@pytest.mark.skipif(len(_any_solvers) == 0, reason="No solver available")
+class TestSolverActive:
+ @pytest.fixture(params=_any_solvers)
+ def solver_name(self, request: pytest.FixtureRequest) -> str:
+ return request.param
- def test_kwargs_uniform(self) -> None:
- bp = breakpoints(power=[0, 50, 100], fuel=[10, 20, 30])
- assert "var" in bp.dims
- assert "breakpoint" in bp.dims
- assert list(bp.coords["var"].values) == ["power", "fuel"]
- assert bp.sizes["breakpoint"] == 3
+ def test_incremental_active_on(self, solver_name: str) -> None:
+ """When u=1 (forced on), normal PWL domain is active."""
+ m = Model()
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=u) == y,
+ method="incremental",
+ )
+ m.add_constraints(u >= 1, name="force_on")
+ m.add_constraints(x >= 50, name="x_min")
+ m.add_objective(y)
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(x.solution.values), 50, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 10, atol=1e-4)
- def test_kwargs_per_entity(self) -> None:
- bp = breakpoints(
- power={"gen1": [0, 50, 100], "gen2": [0, 30]},
- cost={"gen1": [0, 10, 50], "gen2": [0, 8]},
- dim="generator",
+ def test_incremental_active_off(self, solver_name: str) -> None:
+ """When u=0 (forced off), x and y must be zero."""
+ m = Model()
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=u) == y,
+ method="incremental",
)
- assert "generator" in bp.dims
- assert "var" in bp.dims
- assert "breakpoint" in bp.dims
+ m.add_constraints(u <= 0, name="force_off")
+ m.add_objective(y, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(x.solution.values), 0, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 0, atol=1e-4)
- def test_kwargs_mixed_list_and_dict(self) -> None:
- bp = breakpoints(
- power={"gen1": [0, 50], "gen2": [0, 30]},
- fuel=[10, 20],
- dim="generator",
- )
- assert "generator" in bp.dims
- assert "var" in bp.dims
- assert bp.sel(var="fuel", generator="gen1", breakpoint=0) == 10
- assert bp.sel(var="fuel", generator="gen2", breakpoint=0) == 10
-
- def test_kwargs_dataarray_passthrough(self) -> None:
- power_da = xr.DataArray([0, 50, 100], dims=["breakpoint"])
- bp = breakpoints(power=power_da, fuel=[10, 20, 30])
- assert "var" in bp.dims
- assert bp.sel(var="power", breakpoint=0) == 0
-
- def test_both_positional_and_kwargs_raises(self) -> None:
- with pytest.raises(ValueError, match="Cannot pass both"):
- breakpoints([0, 50], power=[10, 20])
-
- def test_neither_raises(self) -> None:
- with pytest.raises(ValueError, match="Must pass either"):
- breakpoints()
-
- def test_invalid_values_type_raises(self) -> None:
- with pytest.raises(TypeError, match="must be a list or dict"):
- breakpoints(42) # type: ignore
-
- def test_invalid_kwarg_type_raises(self) -> None:
- with pytest.raises(ValueError, match="must be a list, dict, or DataArray"):
- breakpoints(power=42) # type: ignore
-
- def test_kwargs_dict_without_dim_raises(self) -> None:
- with pytest.raises(ValueError, match="'dim' is required"):
- breakpoints(power={"gen1": [0, 50]}, cost=[10, 20])
+ def test_incremental_nonzero_base_active_off(self, solver_name: str) -> None:
+ """
+ Non-zero base (x₀=20, y₀=5) with u=0 must still force zero.
- def test_factory_output_works_with_piecewise(self) -> None:
+ Tests the x₀*u / y₀*u base term multiplication — would fail if
+ base terms aren't multiplied by active.
+ """
m = Model()
- x = m.add_variables(name="x")
- bp = breakpoints([0, 10, 50])
- m.add_piecewise_constraints(x, bp, dim="breakpoint")
- assert f"pwl0{PWL_LAMBDA_SUFFIX}" in m.variables
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [20, 60, 100], [5, 20, 50], active=u) == y,
+ method="incremental",
+ )
+ m.add_constraints(u <= 0, name="force_off")
+ m.add_objective(y, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(x.solution.values), 0, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 0, atol=1e-4)
- def test_factory_dict_output_works_with_piecewise(self) -> None:
+ def test_incremental_inequality_active_off(self, solver_name: str) -> None:
+ """Inequality with active=0: aux variable is 0, so y <= 0."""
m = Model()
- power = m.add_variables(name="power")
- cost = m.add_variables(name="cost")
- bp = breakpoints(power=[0, 50, 100], cost=[0, 10, 50])
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(lower=0, name="y")
+ u = m.add_variables(binary=True, name="u")
m.add_piecewise_constraints(
- {"power": power, "cost": cost}, bp, dim="breakpoint"
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=u) >= y,
+ method="incremental",
)
- assert f"pwl0{PWL_LINK_SUFFIX}" in m.constraints
-
+ m.add_constraints(u <= 0, name="force_off")
+ m.add_objective(y, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(y.solution.values), 0, atol=1e-4)
-class TestBreakpointsSegments:
- def test_list_of_tuples(self) -> None:
- bp = breakpoints.segments([(0, 10), (50, 100)])
- assert set(bp.dims) == {"segment", "breakpoint"}
- assert bp.sizes["segment"] == 2
- assert bp.sizes["breakpoint"] == 2
+ def test_unit_commitment_pattern(self, solver_name: str) -> None:
+ """Solver decides to commit: verifies correct fuel at operating point."""
+ m = Model()
+ p_min, p_max = 20.0, 100.0
+ fuel_at_pmin, fuel_at_pmax = 10.0, 60.0
- def test_ragged_segments(self) -> None:
- bp = breakpoints.segments([(0, 5, 10), (50, 100)])
- assert bp.sizes["breakpoint"] == 3
- assert np.isnan(bp.sel(segment=1, breakpoint=2))
+ power = m.add_variables(lower=0, upper=p_max, name="power")
+ fuel = m.add_variables(name="fuel")
+ u = m.add_variables(binary=True, name="commit")
- def test_per_entity_dict(self) -> None:
- bp = breakpoints.segments(
- {"gen1": [(0, 10), (50, 100)], "gen2": [(0, 20), (60, 90)]},
- dim="generator",
+ m.add_piecewise_constraints(
+ piecewise(power, [p_min, p_max], [fuel_at_pmin, fuel_at_pmax], active=u)
+ == fuel,
+ method="incremental",
)
- assert "generator" in bp.dims
- assert "segment" in bp.dims
- assert "breakpoint" in bp.dims
+ m.add_constraints(power >= 50, name="demand")
+ m.add_objective(fuel + 5 * u)
- def test_kwargs_multi_variable(self) -> None:
- bp = breakpoints.segments(
- power=[(0, 50), (80, 100)],
- cost=[(0, 10), (20, 30)],
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(u.solution.values), 1, atol=1e-4)
+ np.testing.assert_allclose(float(power.solution.values), 50, atol=1e-4)
+ # fuel = 10 + (60-10)/(100-20) * (50-20) = 28.75
+ np.testing.assert_allclose(float(fuel.solution.values), 28.75, atol=1e-4)
+
+ def test_multi_dimensional_solver(self, solver_name: str) -> None:
+ """Per-entity on/off: gen_a on at x=50, gen_b off at x=0."""
+ m = Model()
+ gens = pd.Index(["a", "b"], name="gen")
+ x = m.add_variables(lower=0, upper=100, coords=[gens], name="x")
+ y = m.add_variables(coords=[gens], name="y")
+ u = m.add_variables(binary=True, coords=[gens], name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=u) == y,
+ method="incremental",
)
- assert "segment" in bp.dims
- assert "var" in bp.dims
- assert "breakpoint" in bp.dims
-
- def test_segments_invalid_values_type_raises(self) -> None:
- with pytest.raises(TypeError, match="must be a list or dict"):
- breakpoints.segments(42) # type: ignore
-
- def test_segments_both_positional_and_kwargs_raises(self) -> None:
- with pytest.raises(ValueError, match="Cannot pass both"):
- breakpoints.segments([(0, 10)], power=[(0, 10)])
-
- def test_segments_neither_raises(self) -> None:
- with pytest.raises(ValueError, match="Must pass either"):
- breakpoints.segments()
-
- def test_segments_invalid_kwarg_type_raises(self) -> None:
- with pytest.raises(ValueError, match="must be a list, dict, or DataArray"):
- breakpoints.segments(power=42) # type: ignore
-
- def test_segments_kwargs_dict_without_dim_raises(self) -> None:
- with pytest.raises(ValueError, match="'dim' is required"):
- breakpoints.segments(power={"gen1": [(0, 50)]}, cost=[(10, 20)])
-
- def test_segments_dict_without_dim_raises(self) -> None:
- with pytest.raises(ValueError, match="'dim' is required"):
- breakpoints.segments({"gen1": [(0, 10)], "gen2": [(50, 100)]})
-
- def test_segments_works_with_disjunctive(self) -> None:
- m = Model()
- x = m.add_variables(name="x")
- bp = breakpoints.segments([(0, 10), (50, 100)])
- m.add_disjunctive_piecewise_constraints(x, bp)
- assert f"pwl0{PWL_BINARY_SUFFIX}" in m.variables
+ m.add_constraints(u.sel(gen="a") >= 1, name="a_on")
+ m.add_constraints(u.sel(gen="b") <= 0, name="b_off")
+ m.add_constraints(x.sel(gen="a") >= 50, name="a_min")
+ m.add_objective(y.sum())
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(x.solution.sel(gen="a")), 50, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.sel(gen="a")), 10, atol=1e-4)
+ np.testing.assert_allclose(float(x.solution.sel(gen="b")), 0, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.sel(gen="b")), 0, atol=1e-4)
-class TestAutobroadcast:
- def test_1d_breakpoints_2d_variable(self) -> None:
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
- bp = breakpoints([0, 10, 50])
- m.add_piecewise_constraints(x, bp, dim="breakpoint")
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
- assert "breakpoint" in lambda_var.dims
+@pytest.mark.skipif(len(_sos2_solvers) == 0, reason="No SOS2-capable solver")
+class TestSolverActiveSOS2:
+ @pytest.fixture(params=_sos2_solvers)
+ def solver_name(self, request: pytest.FixtureRequest) -> str:
+ return request.param
- def test_already_matching_dims_noop(self) -> None:
+ def test_sos2_active_off(self, solver_name: str) -> None:
+ """SOS2: u=0 forces Σλ=0, collapsing x=0, y=0."""
m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
- bp = xr.DataArray(
- [[0, 50, 100], [0, 30, 80]],
- dims=["generator", "bp"],
- coords={"generator": generators, "bp": [0, 1, 2]},
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
+ m.add_piecewise_constraints(
+ piecewise(x, [0, 50, 100], [0, 10, 50], active=u) == y,
+ method="sos2",
)
- m.add_piecewise_constraints(x, bp, dim="bp")
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
+ m.add_constraints(u <= 0, name="force_off")
+ m.add_objective(y, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(x.solution.values), 0, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 0, atol=1e-4)
- def test_dict_expr_broadcast(self) -> None:
+ def test_disjunctive_active_off(self, solver_name: str) -> None:
+ """Disjunctive: u=0 forces Σz_k=0, collapsing x=0, y=0."""
m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- power = m.add_variables(coords=[generators], name="power")
- cost = m.add_variables(coords=[generators], name="cost")
- bp = breakpoints(power=[0, 50, 100], cost=[0, 10, 50])
+ x = m.add_variables(lower=0, upper=100, name="x")
+ y = m.add_variables(name="y")
+ u = m.add_variables(binary=True, name="u")
m.add_piecewise_constraints(
- {"power": power, "cost": cost}, bp, dim="breakpoint"
- )
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
-
- def test_disjunctive_broadcast(self) -> None:
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- x = m.add_variables(coords=[generators], name="x")
- bp = breakpoints.segments([(0, 10), (50, 100)])
- m.add_disjunctive_piecewise_constraints(x, bp)
- binary_var = m.variables[f"pwl0{PWL_BINARY_SUFFIX}"]
- assert "generator" in binary_var.dims
-
- def test_broadcast_multi_dim(self) -> None:
- m = Model()
- generators = pd.Index(["gen1", "gen2"], name="generator")
- timesteps = pd.Index([0, 1, 2], name="time")
- x = m.add_variables(coords=[generators, timesteps], name="x")
- bp = breakpoints([0, 10, 50])
- m.add_piecewise_constraints(x, bp, dim="breakpoint")
- lambda_var = m.variables[f"pwl0{PWL_LAMBDA_SUFFIX}"]
- assert "generator" in lambda_var.dims
- assert "time" in lambda_var.dims
+ piecewise(
+ x,
+ segments([[0.0, 10.0], [50.0, 100.0]]),
+ segments([[0.0, 5.0], [20.0, 80.0]]),
+ active=u,
+ )
+ == y,
+ )
+ m.add_constraints(u <= 0, name="force_off")
+ m.add_objective(y, sense="max")
+ status, _ = m.solve(solver_name=solver_name)
+ assert status == "ok"
+ np.testing.assert_allclose(float(x.solution.values), 0, atol=1e-4)
+ np.testing.assert_allclose(float(y.solution.values), 0, atol=1e-4)
diff --git a/test/test_semi_continuous.py b/test/test_semi_continuous.py
new file mode 100644
index 00000000..f529c428
--- /dev/null
+++ b/test/test_semi_continuous.py
@@ -0,0 +1,180 @@
+"""Tests for semi-continuous variable support."""
+
+from pathlib import Path
+
+import numpy as np
+import pandas as pd
+import pytest
+
+from linopy import Model, available_solvers
+
+
+def test_add_semi_continuous_variable() -> None:
+ """Semi-continuous variable is created with correct attributes."""
+ m = Model()
+ x = m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ assert x.attrs["semi_continuous"] is True
+ assert not x.attrs["binary"]
+ assert not x.attrs["integer"]
+
+
+def test_semi_continuous_mutual_exclusivity() -> None:
+ """Semi-continuous cannot be combined with binary or integer."""
+ m = Model()
+ with pytest.raises(ValueError, match="only be one of"):
+ m.add_variables(lower=1, upper=10, binary=True, semi_continuous=True)
+ with pytest.raises(ValueError, match="only be one of"):
+ m.add_variables(lower=1, upper=10, integer=True, semi_continuous=True)
+
+
+def test_semi_continuous_requires_positive_lb() -> None:
+ """Semi-continuous variables require a positive lower bound."""
+ m = Model()
+ with pytest.raises(ValueError, match="positive scalar lower bound"):
+ m.add_variables(lower=-1, upper=10, semi_continuous=True)
+ with pytest.raises(ValueError, match="positive scalar lower bound"):
+ m.add_variables(lower=0, upper=10, semi_continuous=True)
+
+
+def test_semi_continuous_collection_property() -> None:
+ """Variables.semi_continuous filters correctly."""
+ m = Model()
+ m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_variables(lower=0, upper=5, name="y")
+ m.add_variables(name="z", binary=True)
+
+ assert list(m.variables.semi_continuous) == ["x"]
+ assert "x" not in m.variables.continuous
+ assert "y" in m.variables.continuous
+ assert "z" not in m.variables.continuous
+
+
+def test_semi_continuous_repr() -> None:
+ """Semi-continuous annotation appears in repr."""
+ m = Model()
+ m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ r = repr(m.variables)
+ assert "semi-continuous" in r
+
+
+def test_semi_continuous_vtypes() -> None:
+ """Matrices vtypes returns 'S' for semi-continuous variables."""
+ m = Model()
+ m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_variables(lower=0, upper=5, name="y")
+ m.add_variables(name="z", binary=True)
+ # Add a dummy constraint and objective so the model is valid
+ m.add_constraints(m.variables["y"] >= 0, name="dummy")
+ m.add_objective(m.variables["y"])
+
+ vtypes = m.matrices.vtypes
+ # x is semi-continuous -> "S", y is continuous -> "C", z is binary -> "B"
+ assert "S" in vtypes
+ assert "C" in vtypes
+ assert "B" in vtypes
+
+
+def test_semi_continuous_lp_file(tmp_path: Path) -> None:
+ """LP file contains semi-continuous section."""
+ m = Model()
+ m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_variables(lower=0, upper=5, name="y")
+ m.add_constraints(m.variables["y"] >= 0, name="dummy")
+ m.add_objective(m.variables["y"])
+
+ fn = tmp_path / "test.lp"
+ m.to_file(fn)
+ content = fn.read_text()
+ assert "semi-continuous" in content
+
+
+def test_semi_continuous_with_coords() -> None:
+ """Semi-continuous variables work with multi-dimensional coords."""
+ m = Model()
+ idx = pd.RangeIndex(5, name="i")
+ x = m.add_variables(lower=2, upper=20, coords=[idx], name="x", semi_continuous=True)
+ assert x.attrs["semi_continuous"] is True
+ assert list(m.variables.semi_continuous) == ["x"]
+
+
+@pytest.mark.skipif("gurobi" not in available_solvers, reason="Gurobi not installed")
+def test_semi_continuous_solve_gurobi() -> None:
+ """
+ Semi-continuous variable solves correctly with Gurobi.
+
+ Maximize x subject to x <= 0.5, x semi-continuous in [1, 10].
+ Since x can be 0 or in [1, 10], and x <= 0.5 prevents [1, 10],
+ the optimal x should be 0.
+ """
+ m = Model()
+ x = m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_constraints(x <= 0.5, name="ub")
+ m.add_objective(x, sense="max")
+ m.solve(solver_name="gurobi")
+ assert m.objective.value is not None
+ assert np.isclose(m.objective.value, 0, atol=1e-6)
+
+
+@pytest.mark.skipif("gurobi" not in available_solvers, reason="Gurobi not installed")
+def test_semi_continuous_solve_gurobi_active() -> None:
+ """
+ Semi-continuous variable takes value in [lb, ub] when beneficial.
+
+ Maximize x subject to x <= 5, x semi-continuous in [1, 10].
+ Optimal x should be 5.
+ """
+ m = Model()
+ x = m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_constraints(x <= 5, name="ub")
+ m.add_objective(x, sense="max")
+ m.solve(solver_name="gurobi")
+ assert m.objective.value is not None
+ assert np.isclose(m.objective.value, 5, atol=1e-6)
+
+
+def test_unsupported_solver_raises() -> None:
+ """Solvers without semi-continuous support raise ValueError."""
+ m = Model()
+ m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_constraints(m.variables["x"] <= 5, name="ub")
+ m.add_objective(m.variables["x"])
+
+ for solver in ["glpk", "mosek", "mindopt"]:
+ if solver in available_solvers:
+ with pytest.raises(ValueError, match="does not support semi-continuous"):
+ m.solve(solver_name=solver)
+
+
+@pytest.mark.skipif("highs" not in available_solvers, reason="HiGHS not installed")
+def test_semi_continuous_solve_highs() -> None:
+ """
+ Semi-continuous variable solves correctly with HiGHS.
+
+ Maximize x subject to x <= 0.5, x semi-continuous in [1, 10].
+ Since x can be 0 or in [1, 10], and x <= 0.5 prevents [1, 10],
+ the optimal x should be 0.
+ """
+ m = Model()
+ x = m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_constraints(x <= 0.5, name="ub")
+ m.add_objective(x, sense="max")
+ m.solve(solver_name="highs")
+ assert m.objective.value is not None
+ assert np.isclose(m.objective.value, 0, atol=1e-6)
+
+
+@pytest.mark.skipif("highs" not in available_solvers, reason="HiGHS not installed")
+def test_semi_continuous_solve_highs_active() -> None:
+ """
+ Semi-continuous variable takes value in [lb, ub] when beneficial with HiGHS.
+
+ Maximize x subject to x <= 5, x semi-continuous in [1, 10].
+ Optimal x should be 5.
+ """
+ m = Model()
+ x = m.add_variables(lower=1, upper=10, name="x", semi_continuous=True)
+ m.add_constraints(x <= 5, name="ub")
+ m.add_objective(x, sense="max")
+ m.solve(solver_name="highs")
+ assert m.objective.value is not None
+ assert np.isclose(m.objective.value, 5, atol=1e-6)