Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions linopy/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@
PWL_LAMBDA_SUFFIX = "_lambda"
PWL_CONVEX_SUFFIX = "_convex"
PWL_LINK_SUFFIX = "_link"
PWL_DELTA_SUFFIX = "_delta"
PWL_FILL_SUFFIX = "_fill"
DEFAULT_BREAKPOINT_DIM = "breakpoint"
GROUPED_TERM_DIM = "_grouped_term"
GROUP_DIM = "_group"
Expand Down
236 changes: 192 additions & 44 deletions linopy/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@
HELPER_DIMS,
LESS_EQUAL,
PWL_CONVEX_SUFFIX,
PWL_DELTA_SUFFIX,
PWL_FILL_SUFFIX,
PWL_LAMBDA_SUFFIX,
PWL_LINK_SUFFIX,
TERM_DIM,
Expand Down Expand Up @@ -606,18 +608,20 @@ def add_piecewise_constraints(
mask: DataArray | None = None,
name: str | None = None,
skip_nan_check: bool = False,
method: Literal["sos2", "incremental", "auto"] = "sos2",
) -> Constraint:
"""
Add a piecewise linear constraint using SOS2 formulation.
Add a piecewise linear constraint using SOS2 or incremental formulation.

This method creates a piecewise linear constraint that links one or more
variables/expressions together via a set of breakpoints. It uses the SOS2
(Special Ordered Set of type 2) formulation with lambda (interpolation)
variables.
variables/expressions together via a set of breakpoints. It supports two
formulations:

The SOS2 formulation ensures that at most two adjacent lambda variables
can be non-zero, effectively selecting a segment of the piecewise linear
function.
- **SOS2** (default): Uses SOS2 (Special Ordered Set of type 2) with lambda
(interpolation) variables. Works for any breakpoints.
- **Incremental**: Uses delta variables with filling-order constraints.
Pure LP formulation (no SOS2 or binary variables), but requires strictly
monotonic breakpoints.

Parameters
----------
Expand All @@ -638,7 +642,8 @@ def add_piecewise_constraints(
will attempt to auto-detect from breakpoints dimensions.
dim : str, default "breakpoint"
The dimension in breakpoints that represents the breakpoint index.
This dimension's coordinates must be numeric (used as SOS2 weights).
This dimension's coordinates must be numeric (used as SOS2 weights
for the SOS2 method).
mask : xr.DataArray, optional
Boolean mask indicating which piecewise constraints are valid.
If None, auto-detected from NaN values in breakpoints (unless
Expand All @@ -649,15 +654,20 @@ def add_piecewise_constraints(
skip_nan_check : bool, default False
If True, skip automatic NaN detection in breakpoints. Use this
when you know breakpoints contain no NaN values for better performance.
method : str, default "sos2"
Formulation method. One of:
- ``"sos2"``: SOS2 formulation with lambda variables (default).
- ``"incremental"``: Incremental (delta) formulation. Requires strictly
monotonic breakpoints. Pure LP, no SOS2 or binary variables.
- ``"auto"``: Automatically selects ``"incremental"`` if breakpoints are
strictly monotonic, otherwise falls back to ``"sos2"``.

Returns
-------
Constraint
The convexity constraint (sum of lambda = 1). Lambda variables
and other constraints can be accessed via:
- `model.variables[f"{name}_lambda"]`
- `model.constraints[f"{name}_convex"]`
- `model.constraints[f"{name}_link"]`
For SOS2: the convexity constraint (sum of lambda = 1).
For incremental: the first filling-order constraint (or the link
constraint if only 2 breakpoints).

Raises
------
Expand All @@ -666,7 +676,9 @@ def add_piecewise_constraints(
If breakpoints doesn't have the required dim dimension.
If link_dim cannot be auto-detected when expr is a dict.
If link_dim coordinates don't match dict keys.
If dim coordinates are not numeric.
If dim coordinates are not numeric (SOS2 method only).
If breakpoints are not strictly monotonic (incremental method).
If method is not one of 'sos2', 'incremental', 'auto'.

Examples
--------
Expand Down Expand Up @@ -702,23 +714,53 @@ def add_piecewise_constraints(
... dim="bp",
... )

Incremental formulation (no SOS2, pure LP):

>>> m = Model()
>>> x = m.add_variables(name="x")
>>> breakpoints = xr.DataArray([0, 10, 50, 100], dims=["bp"])
>>> _ = m.add_piecewise_constraints(
... x, breakpoints, dim="bp", method="incremental"
... )

Notes
-----
The piecewise linear constraint is formulated using SOS2 variables:
**SOS2 formulation:**

1. Lambda variables λ_i with bounds [0, 1] are created for each breakpoint
2. SOS2 constraint ensures at most two adjacent λ_i can be non-zero
3. Convexity constraint: Σ λ_i = 1
4. Linking constraints: expr = Σ λ_i × breakpoint_i (for each expression)

**Incremental formulation** (for strictly monotonic breakpoints bp₀ < bp₁ < ... < bpₙ):

1. Delta variables δᵢ ∈ [0, 1] for i = 1, ..., n (one per segment)
2. Filling-order constraints: δᵢ₊₁ ≤ δᵢ for i = 1, ..., n-1
3. Linking constraint: expr = bp₀ + Σᵢ δᵢ × (bpᵢ - bpᵢ₋₁)
"""
if method not in ("sos2", "incremental", "auto"):
raise ValueError(
f"method must be 'sos2', 'incremental', or 'auto', got '{method}'"
)

# --- Input validation ---
if dim not in breakpoints.dims:
raise ValueError(
f"breakpoints must have dimension '{dim}', "
f"but only has dimensions {list(breakpoints.dims)}"
)

if not pd.api.types.is_numeric_dtype(breakpoints.coords[dim]):
# Resolve method for 'auto'
if method == "auto":
if self._check_strict_monotonicity(breakpoints, dim):
method = "incremental"
else:
method = "sos2"

# Numeric coordinates only required for SOS2
if method == "sos2" and not pd.api.types.is_numeric_dtype(
breakpoints.coords[dim]
):
raise ValueError(
f"Breakpoint dimension '{dim}' must have numeric coordinates "
f"for SOS2 weights, but got {breakpoints.coords[dim].dtype}"
Expand All @@ -729,11 +771,7 @@ def add_piecewise_constraints(
name = f"pwl{self._pwlCounter}"
self._pwlCounter += 1

lambda_name = f"{name}{PWL_LAMBDA_SUFFIX}"
convex_name = f"{name}{PWL_CONVEX_SUFFIX}"
link_name = f"{name}{PWL_LINK_SUFFIX}"

# --- Determine lambda coordinates, mask, and target expression ---
# --- Determine target expression and related info ---
is_single = isinstance(expr, Variable | LinearExpression)
is_dict = isinstance(expr, dict)

Expand All @@ -744,39 +782,69 @@ def add_piecewise_constraints(
)

if is_single:
# Single expression case
assert isinstance(expr, Variable | LinearExpression)
target_expr = self._to_linexpr(expr)
# Build lambda coordinates from breakpoints dimensions
lambda_coords = [
pd.Index(breakpoints.coords[d].values, name=d) for d in breakpoints.dims
]
lambda_mask = self._compute_pwl_mask(mask, breakpoints, skip_nan_check)

resolved_link_dim = None
computed_mask = self._compute_pwl_mask(mask, breakpoints, skip_nan_check)
lambda_mask = computed_mask
else:
# Dict case - need to validate link_dim and build stacked expression
assert isinstance(expr, dict)
expr_dict: dict[str, Variable | LinearExpression] = expr
expr_keys = set(expr_dict.keys())
resolved_link_dim = self._resolve_pwl_link_dim(
link_dim, breakpoints, dim, expr_keys
)
computed_mask = self._compute_pwl_mask(mask, breakpoints, skip_nan_check)
lambda_mask = (
computed_mask.any(dim=resolved_link_dim)
if computed_mask is not None
else None
)
target_expr = self._build_stacked_expr(
expr_dict, breakpoints, resolved_link_dim
)

# Auto-detect or validate link_dim
link_dim = self._resolve_pwl_link_dim(link_dim, breakpoints, dim, expr_keys)

# Build lambda coordinates (exclude link_dim)
lambda_coords = [
pd.Index(breakpoints.coords[d].values, name=d)
for d in breakpoints.dims
if d != link_dim
]
# Build coordinate lists excluding special dimensions
exclude_dims = {dim, resolved_link_dim} - {None}
extra_coords = [
pd.Index(breakpoints.coords[d].values, name=d)
for d in breakpoints.dims
if d not in exclude_dims
]
lambda_coords = extra_coords + [
pd.Index(breakpoints.coords[dim].values, name=dim)
]

# Compute mask
base_mask = self._compute_pwl_mask(mask, breakpoints, skip_nan_check)
lambda_mask = base_mask.any(dim=link_dim) if base_mask is not None else None
# --- Dispatch to formulation ---
if method == "sos2":
return self._add_pwl_sos2(
name, breakpoints, dim, target_expr, lambda_coords, lambda_mask
)
else:
return self._add_pwl_incremental(
name,
breakpoints,
dim,
target_expr,
extra_coords,
computed_mask,
resolved_link_dim,
)

# Build stacked expression from dict
target_expr = self._build_stacked_expr(expr_dict, breakpoints, link_dim)
def _add_pwl_sos2(
self,
name: str,
breakpoints: DataArray,
dim: str,
target_expr: LinearExpression,
lambda_coords: list[pd.Index],
lambda_mask: DataArray | None,
) -> Constraint:
"""Create piecewise linear constraint using SOS2 formulation."""
lambda_name = f"{name}{PWL_LAMBDA_SUFFIX}"
convex_name = f"{name}{PWL_CONVEX_SUFFIX}"
link_name = f"{name}{PWL_LINK_SUFFIX}"

# --- Common: Create lambda, SOS2, convexity, and linking constraints ---
lambda_var = self.add_variables(
lower=0, upper=1, coords=lambda_coords, name=lambda_name, mask=lambda_mask
)
Expand All @@ -792,6 +860,86 @@ def add_piecewise_constraints(

return convex_con

def _add_pwl_incremental(
self,
name: str,
breakpoints: DataArray,
dim: str,
target_expr: LinearExpression,
extra_coords: list[pd.Index],
mask: DataArray | None,
link_dim: str | None,
) -> Constraint:
"""
Create piecewise linear constraint using incremental formulation.

For strictly monotonic breakpoints bp₀, bp₁, ..., bpₙ:
- Delta variables δᵢ ∈ [0, 1] for i = 1, ..., n
- Filling-order: δᵢ₊₁ ≤ δᵢ
- Link: expr = bp₀ + Σᵢ δᵢ × (bpᵢ - bpᵢ₋₁)
"""
if not self._check_strict_monotonicity(breakpoints, dim):
raise ValueError(
"Incremental method requires strictly monotonic breakpoints "
"along the breakpoint dimension."
)

delta_name = f"{name}{PWL_DELTA_SUFFIX}"
fill_name = f"{name}{PWL_FILL_SUFFIX}"
link_name = f"{name}{PWL_LINK_SUFFIX}"

n_segments = breakpoints.sizes[dim] - 1
seg_dim = f"{dim}_seg"
seg_index = pd.Index(range(n_segments), name=seg_dim)
delta_coords = extra_coords + [seg_index]

# Compute step sizes: bp[i+1] - bp[i] for each segment
steps = breakpoints.diff(dim).rename({dim: seg_dim})
steps[seg_dim] = seg_index

# Compute delta mask from breakpoints mask
if mask is not None:
bp_mask = mask
if link_dim is not None:
bp_mask = bp_mask.all(dim=link_dim)
# Segment valid if both adjacent breakpoints valid
mask_lo = bp_mask.isel({dim: slice(None, -1)}).rename({dim: seg_dim})
mask_hi = bp_mask.isel({dim: slice(1, None)}).rename({dim: seg_dim})
mask_lo[seg_dim] = seg_index
mask_hi[seg_dim] = seg_index
delta_mask: DataArray | None = mask_lo & mask_hi
else:
delta_mask = None

# Create delta variables δᵢ ∈ [0, 1]
delta_var = self.add_variables(
lower=0, upper=1, coords=delta_coords, name=delta_name, mask=delta_mask
)

# Filling-order constraints: δ[i+1] ≤ δ[i] (vectorized)
fill_con: Constraint | None = None
if n_segments >= 2:
# Slice adjacent pairs and drop coords so they align
delta_lo = delta_var.isel({seg_dim: slice(None, -1)}, drop=True)
delta_hi = delta_var.isel({seg_dim: slice(1, None)}, drop=True)
fill_con = self.add_constraints(delta_hi <= delta_lo, name=fill_name)

# Linking constraint: expr = bp₀ + Σᵢ δᵢ × step_i
bp0 = breakpoints.isel({dim: 0})
weighted_sum = (delta_var * steps).sum(dim=seg_dim) + bp0
link_con = self.add_constraints(target_expr == weighted_sum, name=link_name)

return fill_con if fill_con is not None else link_con

@staticmethod
def _check_strict_monotonicity(breakpoints: DataArray, dim: str) -> bool:
"""Check if breakpoints are strictly monotonic along dim."""
diffs = breakpoints.diff(dim)
# All diffs must be either all positive or all negative (strictly monotonic)
all_positive = bool((diffs > 0).all())
all_negative = bool((diffs < 0).all())
return all_positive or all_negative
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

# Find and examine the file
fd -t f "model.py" linopy/

Repository: FBumann/linopy

Length of output: 73


🏁 Script executed:

# Get the specific lines mentioned in the review
cat -n linopy/model.py | sed -n '930,945p'

Repository: FBumann/linopy

Length of output: 946


🏁 Script executed:

# Search for context around this function and its usage
rg "_check_strict_monotonicity" -B 5 -A 15

Repository: FBumann/linopy

Length of output: 3546


🏁 Script executed:

# Check if there are any tests for this function
rg "_check_strict_monotonicity" --type py -l

Repository: FBumann/linopy

Length of output: 73


🏁 Script executed:

# Get more context around the function calls
cat -n linopy/model.py | sed -n '750,780p'

Repository: FBumann/linopy

Length of output: 1423


🏁 Script executed:

# Check the full _add_pwl_link function to understand the context better
cat -n linopy/model.py | sed -n '750,820p'

Repository: FBumann/linopy

Length of output: 3246


🏁 Script executed:

# Look for tests related to PWL and monotonicity
fd -t f "test.*\.py" | xargs rg "pwl|monotonic" -l

Repository: FBumann/linopy

Length of output: 92


🏁 Script executed:

# Check imports to understand what DataArray is
head -50 linopy/model.py

Repository: FBumann/linopy

Length of output: 1157


🏁 Script executed:

# Look at the PWL tests to understand the use cases
cat test/test_piecewise_constraints.py | head -100

Repository: FBumann/linopy

Length of output: 3313


🏁 Script executed:

# Search for tests specifically about monotonicity
rg "monotonic|opposite" test/test_piecewise_constraints.py -B 2 -A 5

Repository: FBumann/linopy

Length of output: 1619


🏁 Script executed:

# Check if there are any tests with NaNs in breakpoints
rg "nan|NaN" test/test_piecewise_constraints.py -B 2 -A 5

Repository: FBumann/linopy

Length of output: 1424


🏁 Script executed:

# Look at the full _add_pwl_link signature to understand the dict case
rg "def _add_pwl_link" -A 30 linopy/model.py

Repository: FBumann/linopy

Length of output: 40


🏁 Script executed:

# Get the full signature of _add_pwl_link
cat -n linopy/model.py | sed -n '665,680p'

Repository: FBumann/linopy

Length of output: 876


🏁 Script executed:

# Look for dict test with opposite monotonicity
rg "opposite|decreasing.*increasing|increasing.*decreasing" test/test_piecewise_constraints.py -B 5 -A 10

Repository: FBumann/linopy

Length of output: 40


🏁 Script executed:

# Check if there's an existing test for dict with different monotonicities
rg "dict_of_variables" -A 20 test/test_piecewise_constraints.py | head -60

Repository: FBumann/linopy

Length of output: 801


🏁 Script executed:

# Look at tests showing what happens with dict and non-monotonic
cat test/test_piecewise_constraints.py | sed -n '100,250p'

Repository: FBumann/linopy

Length of output: 5197


🏁 Script executed:

# Look for any tests that show opposite monotonic directions
cat test/test_piecewise_constraints.py | sed -n '60,130p'

Repository: FBumann/linopy

Length of output: 2503


🏁 Script executed:

# Verify the xarray API for diff, notnull, and the proposed operations
python3 << 'EOF'
import xarray as xr
import numpy as np

# Simulate the proposed fix logic
breakpoints = xr.DataArray(
    [[0, 50, 100], [100, 50, 0]],  # power increasing, efficiency decreasing
    dims=["var", "bp"],
    coords={"var": ["power", "efficiency"], "bp": [0, 1, 2]},
)

print("Breakpoints shape:", breakpoints.shape)
print("Breakpoints:\n", breakpoints.values)

diffs = breakpoints.diff("bp")
print("\nDiffs shape:", diffs.shape)
print("Diffs:\n", diffs.values)

valid = diffs.notnull()
print("\nValid shape:", valid.shape)
print("Valid:\n", valid.values)

all_positive = ((diffs > 0) | ~valid).all(dim="bp")
print("\nAll positive (shape", all_positive.shape, "):\n", all_positive.values)

all_negative = ((diffs < 0) | ~valid).all(dim="bp")
print("\nAll negative (shape", all_negative.shape, "):\n", all_negative.values)

result = bool((all_positive | all_negative).all())
print("\nFinal result (opposite directions):", result)

# Now test with NaNs
breakpoints_nan = xr.DataArray(
    [[0, 50, np.nan, 100], [100, 50, 30, np.nan]],
    dims=["var", "bp"],
    coords={"var": ["power", "efficiency"], "bp": [0, 1, 2, 3]},
)

print("\n\n=== Test with NaNs ===")
print("Breakpoints shape:", breakpoints_nan.shape)
print("Breakpoints:\n", breakpoints_nan.values)

diffs_nan = breakpoints_nan.diff("bp")
print("\nDiffs shape:", diffs_nan.shape)
print("Diffs:\n", diffs_nan.values)

valid_nan = diffs_nan.notnull()
all_positive_nan = ((diffs_nan > 0) | ~valid_nan).all(dim="bp")
all_negative_nan = ((diffs_nan < 0) | ~valid_nan).all(dim="bp")

print("\nAll positive:\n", all_positive_nan.values)
print("All negative:\n", all_negative_nan.values)

result_nan = bool((all_positive_nan | all_negative_nan).all())
print("\nFinal result (with NaNs):", result_nan)
EOF

Repository: FBumann/linopy

Length of output: 176


🏁 Script executed:

# Check the current behavior with opposite directions to confirm the issue
python3 << 'EOF'
import xarray as xr
import numpy as np

# Current implementation test
breakpoints = xr.DataArray(
    [[0, 50, 100], [100, 50, 0]],  # power increasing, efficiency decreasing
    dims=["var", "bp"],
    coords={"var": ["power", "efficiency"], "bp": [0, 1, 2]},
)

diffs = breakpoints.diff("bp")
print("Diffs:\n", diffs.values)

# Current implementation
all_positive = bool((diffs > 0).all())
all_negative = bool((diffs < 0).all())
current_result = all_positive or all_negative

print(f"\nCurrent implementation: all_positive={all_positive}, all_negative={all_negative}")
print(f"Current result (would be used for incremental): {current_result}")
print("This shows the issue: opposite directions are rejected!")
EOF

Repository: FBumann/linopy

Length of output: 176


🏁 Script executed:

# Install xarray and numpy first
pip install xarray numpy -q && python3 << 'EOF'
import xarray as xr
import numpy as np

# Current implementation test
breakpoints = xr.DataArray(
    [[0, 50, 100], [100, 50, 0]],  # power increasing, efficiency decreasing
    dims=["var", "bp"],
    coords={"var": ["power", "efficiency"], "bp": [0, 1, 2]},
)

diffs = breakpoints.diff("bp")
print("Diffs:\n", diffs.values)

# Current implementation
all_positive = bool((diffs > 0).all())
all_negative = bool((diffs < 0).all())
current_result = all_positive or all_negative

print(f"\nCurrent implementation: all_positive={all_positive}, all_negative={all_negative}")
print(f"Current result (rejects opposite directions): {current_result}")

# Proposed implementation
valid = diffs.notnull()
all_positive_proposed = ((diffs > 0) | ~valid).all(dim="bp")
all_negative_proposed = ((diffs < 0) | ~valid).all(dim="bp")
proposed_result = bool((all_positive_proposed | all_negative_proposed).all())

print(f"\nProposed implementation:")
print(f"  all_positive per row: {all_positive_proposed.values}")
print(f"  all_negative per row: {all_negative_proposed.values}")
print(f"  final result (accepts opposite directions): {proposed_result}")
EOF

Repository: FBumann/linopy

Length of output: 602


🏁 Script executed:

# Test with NaNs to verify the proposed fix handles them correctly
pip install xarray numpy -q && python3 << 'EOF'
import xarray as xr
import numpy as np

# Test with NaNs
breakpoints_nan = xr.DataArray(
    [[0, 50, np.nan, 100], [100, 50, 30, np.nan]],
    dims=["var", "bp"],
    coords={"var": ["power", "efficiency"], "bp": [0, 1, 2, 3]},
)

print("Breakpoints with NaNs:")
print(breakpoints_nan.values)

diffs_nan = breakpoints_nan.diff("bp")
print("\nDiffs with NaNs:")
print(diffs_nan.values)

# Current implementation - NaNs break it
all_positive_curr = bool((diffs_nan > 0).all())
all_negative_curr = bool((diffs_nan < 0).all())
print(f"\nCurrent: all_positive={all_positive_curr}, all_negative={all_negative_curr}, result={all_positive_curr or all_negative_curr}")

# Proposed implementation
valid = diffs_nan.notnull()
all_positive_prop = ((diffs_nan > 0) | ~valid).all(dim="bp")
all_negative_prop = ((diffs_nan < 0) | ~valid).all(dim="bp")
proposed_result = bool((all_positive_prop | all_negative_prop).all())

print(f"\nProposed:")
print(f"  valid mask per row: {valid.values}")
print(f"  all_positive per row: {all_positive_prop.values}")
print(f"  all_negative per row: {all_negative_prop.values}")
print(f"  final result (handles NaNs): {proposed_result}")
EOF

Repository: FBumann/linopy

Length of output: 678


🏁 Script executed:

# Look for any test that shows the dict case with opposite monotonicity issue
cat test/test_piecewise_constraints.py | grep -A 40 "test_dict_of_variables"

Repository: FBumann/linopy

Length of output: 1574


Fix monotonicity check to support per-variable directions and handle masked NaNs.

The current implementation incorrectly rejects valid cases where:

  1. Different variables in a dict have opposite monotonic directions (e.g., power increasing while efficiency decreases)
  2. NaN values used for masking are present in the diffs

The check evaluates monotonicity globally across the full DataArray, but should evaluate per slice along dim to allow independent monotonic directions. NaN diffs should be ignored.

🛠️ Suggested fix
-        diffs = breakpoints.diff(dim)
-        # All diffs must be either all positive or all negative (strictly monotonic)
-        all_positive = bool((diffs > 0).all())
-        all_negative = bool((diffs < 0).all())
-        return all_positive or all_negative
+        diffs = breakpoints.diff(dim)
+        valid = diffs.notnull()
+        # Each slice must be strictly increasing or strictly decreasing; ignore NaNs
+        all_positive = ((diffs > 0) | ~valid).all(dim=dim)
+        all_negative = ((diffs < 0) | ~valid).all(dim=dim)
+        return bool((all_positive | all_negative).all())
🤖 Prompt for AI Agents
In `@linopy/model.py` around lines 934 - 941, The _check_strict_monotonicity
function currently evaluates monotonicity over the entire DataArray at once and
fails when different variables have opposite directions or when diffs contain
NaNs; change it to compute diffs = breakpoints.diff(dim) and then iterate (or
apply a vectorized reduction) over all orthogonal slices along dim (e.g.,
per-variable slices) ignoring NaNs in each slice, and for each slice consider it
strictly monotonic if all non-NaN diffs are either >0 or <0; return True only if
every slice meets that condition. Ensure you reference
_check_strict_monotonicity, the breakpoints DataArray and dim when locating and
updating the logic.


def _to_linexpr(self, expr: Variable | LinearExpression) -> LinearExpression:
"""Convert Variable or LinearExpression to LinearExpression."""
if isinstance(expr, LinearExpression):
Expand Down
Loading