Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions PEPit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from .wrapper import Wrapper
from .pep import PEP
from .point import Point, null_point
from .tools.symbolic_scalar import SymbolicScalar

__all__ = ['block_partition', 'BlockPartition',
'examples',
Expand All @@ -24,4 +25,5 @@
'pep', 'PEP',
'point', 'Point', 'null_point',
'wrapper', 'Wrapper',
'SymbolicScalar',
]
11 changes: 6 additions & 5 deletions PEPit/expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from PEPit.constraint import Constraint

from PEPit.tools.dict_operations import merge_dict, prune_dict
from PEPit.tools.symbolic_scalar import is_scalar, evaluate_scalar


class Expression(object):
Expand Down Expand Up @@ -151,7 +152,7 @@ def __add__(self, other):
if isinstance(other, Expression):
merged_decomposition_dict = merge_dict(self.decomposition_dict, other.decomposition_dict)
# If other is a scalar constant, add it to the decomposition_dict of self
elif isinstance(other, int) or isinstance(other, float):
elif is_scalar(other):
merged_decomposition_dict = merge_dict(self.decomposition_dict, {1: other})
# Raise an Exception in any other scenario
else:
Expand Down Expand Up @@ -249,7 +250,7 @@ def __rmul__(self, other):
"""

# Verify other is a scalar constant
assert isinstance(other, int) or isinstance(other, float)
assert is_scalar(other)

# Multiply uniformly self's decomposition_dict by other
new_decomposition_dict = dict()
Expand Down Expand Up @@ -414,14 +415,14 @@ def eval(self):
# Distinguish 3 cases: function values, inner products, and constant values
if type(key) == Expression:
assert key.get_is_leaf()
value += weight * key.eval()
value += evaluate_scalar(weight) * key.eval()
elif type(key) == tuple:
point1, point2 = key
assert point1.get_is_leaf()
assert point2.get_is_leaf()
value += weight * np.dot(point1.eval(), point2.eval())
value += evaluate_scalar(weight) * np.dot(point1.eval(), point2.eval())
elif key == 1:
value += weight
value += evaluate_scalar(weight)
# Raise Exception out of those 3 cases
else:
raise TypeError("Expressions are made of function values, inner products and constants only!"
Expand Down
5 changes: 3 additions & 2 deletions PEPit/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from PEPit.psd_matrix import PSDMatrix

from PEPit.tools.dict_operations import merge_dict, prune_dict
from PEPit.tools.symbolic_scalar import is_scalar


class Function(object):
Expand Down Expand Up @@ -241,7 +242,7 @@ def __rmul__(self, other):
"""

# Verify other is a scalar constant
assert isinstance(other, float) or isinstance(other, int)
assert is_scalar(other)

# Multiply uniformly self's decomposition_dict by other
new_decomposition_dict = dict()
Expand Down Expand Up @@ -493,7 +494,7 @@ def get_class_constraints_duals(self):
if isinstance(element, Constraint):
new_row.append(element.eval_dual())
# If there is a scalar number, simply return it.
elif isinstance(element, float) or isinstance(element, int):
elif is_scalar(element):
new_row.append(element)
else:
raise TypeError("The elements of table of constraints must either be Constraints objects"
Expand Down
5 changes: 3 additions & 2 deletions PEPit/pep.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from PEPit.function import Function
from PEPit.psd_matrix import PSDMatrix
from PEPit.block_partition import BlockPartition
from PEPit.tools.symbolic_scalar import evaluate_scalar


class PEP(object):
Expand Down Expand Up @@ -832,11 +833,11 @@ def check_feasibility(self, wc_value, verbose=1):
)
# Get the actual dual_objective from its dict
if 1 in dual_objective_expression_decomposition_dict.keys():
dual_objective = dual_objective_expression_decomposition_dict[1]
dual_objective = evaluate_scalar(dual_objective_expression_decomposition_dict[1])
else:
dual_objective = 0.
# Compute the remaining terms, that should be small and only due to numerical stability errors
remaining_terms = np.sum(np.abs([value for key, value in dual_objective_expression_decomposition_dict.items()
remaining_terms = np.sum(np.abs([evaluate_scalar(value) for key, value in dual_objective_expression_decomposition_dict.items()
if key != 1]))
if verbose:
message = "(PEPit) The worst-case guarantee proof is perfectly reconstituted"
Expand Down
5 changes: 3 additions & 2 deletions PEPit/point.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from PEPit.expression import Expression

from PEPit.tools.dict_operations import merge_dict, prune_dict, multiply_dicts
from PEPit.tools.symbolic_scalar import is_scalar, evaluate_scalar


class Point(object):
Expand Down Expand Up @@ -200,7 +201,7 @@ def __rmul__(self, other):
"""

# Multiplying by a scalar value is applying a homothety
if isinstance(other, int) or isinstance(other, float):
if is_scalar(other):
# Build the decomposition of the new point
new_decomposition_dict = dict()
for key, value in self.decomposition_dict.items():
Expand Down Expand Up @@ -293,7 +294,7 @@ def eval(self):
else:
value = np.zeros(Point.counter)
for point, weight in self.decomposition_dict.items():
value += weight * point.eval()
value += evaluate_scalar(weight) * point.eval()
self._value = value

return self._value
Expand Down
3 changes: 2 additions & 1 deletion PEPit/psd_matrix.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np

from PEPit.expression import Expression
from PEPit.tools.symbolic_scalar import is_scalar


class PSDMatrix(object):
Expand Down Expand Up @@ -141,7 +142,7 @@ def _store(matrix_of_expressions):
if isinstance(matrix[i, j], Expression):
pass
# ... or a python scalar. If so, store it as an Expression
elif isinstance(matrix[i, j], int) or isinstance(matrix[i, j], float):
elif is_scalar(matrix[i, j]):
matrix[i, j] = Expression(is_leaf=False, decomposition_dict={1: matrix[i, j]})
# Raise an Exception in any other scenario
else:
Expand Down
9 changes: 6 additions & 3 deletions PEPit/tools/dict_operations.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from PEPit.tools.symbolic_scalar import evaluate_scalar


def merge_dict(dict1, dict2):
"""
Merge keys of dict1 and dict2.
Expand Down Expand Up @@ -48,11 +51,11 @@ def prune_dict(my_dict):
pruned_dict = dict()

# Add all entry of dict1 that does not have a null value
for key in my_dict.keys():
for key, value in my_dict.items():

if my_dict[key] != 0:
if evaluate_scalar(value) != 0:

pruned_dict[key] = my_dict[key]
pruned_dict[key] = value

# Return pruned dict
return pruned_dict
Expand Down
15 changes: 8 additions & 7 deletions PEPit/tools/expressions_to_matrices.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from PEPit.point import Point
from PEPit.expression import Expression
from PEPit.tools.symbolic_scalar import evaluate_scalar


def expression_to_matrices(expression):
Expand Down Expand Up @@ -34,16 +35,16 @@ def expression_to_matrices(expression):
# Function values are stored in F
if type(key) == Expression:
assert key.get_is_leaf()
Fweights[key.counter] = weight
Fweights[key.counter] = evaluate_scalar(weight)
# Inner products are stored in G
elif type(key) == tuple:
point1, point2 = key
assert point1.get_is_leaf()
assert point2.get_is_leaf()
Gweights[point1.counter, point2.counter] = weight
Gweights[point1.counter, point2.counter] = evaluate_scalar(weight)
# Constants are simply constants
elif key == 1:
cons = weight
cons = evaluate_scalar(weight)
# Others don't exist and raise an Exception
else:
raise TypeError("Expressions are made of function values, inner products and constants only!")
Expand Down Expand Up @@ -91,7 +92,7 @@ def expression_to_sparse_matrices(expression):
if type(key) == Expression:
assert key.get_is_leaf()
Fweights_ind.append(key.counter)
Fweights_val.append(weight)
Fweights_val.append(evaluate_scalar(weight))
# Inner products are stored in G
elif type(key) == tuple:
point1, point2 = key
Expand All @@ -102,16 +103,16 @@ def expression_to_sparse_matrices(expression):
if (point2, point1) in expression.decomposition_dict:
if point1.counter >= point2.counter: # if both entry and symmetrical entry: only append in one case
weight_sym = expression.decomposition_dict[(point2, point1)]
Gweights_val.append((weight + weight_sym) / 2)
Gweights_val.append((evaluate_scalar(weight) + evaluate_scalar(weight_sym)) / 2)
Gweights_indi.append(point1.counter)
Gweights_indj.append(point2.counter)
else:
Gweights_val.append((weight + weight_sym) / 2)
Gweights_val.append((evaluate_scalar(weight) + weight_sym) / 2)
Gweights_indi.append(max(point1.counter, point2.counter))
Gweights_indj.append(min(point1.counter, point2.counter))
# Constants are simply constants
elif key == 1:
cons_val = weight
cons_val = evaluate_scalar(weight)
# Others don't exist and raise an Exception
else:
raise TypeError("Expressions are made of function values, inner products and constants only!")
Expand Down
163 changes: 163 additions & 0 deletions PEPit/tools/symbolic_scalar.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
from fractions import Fraction
import numbers

import sympy as sp


def _to_sympy_number(value, strict=False):
if isinstance(value, bool):
return sp.Integer(int(value))
if isinstance(value, sp.Rational):
return value
if isinstance(value, Fraction):
return sp.Rational(value.numerator, value.denominator)
if isinstance(value, numbers.Integral):
return sp.Integer(int(value))
if strict:
raise TypeError(
"SymbolicScalar only accepts exact values: int, fractions.Fraction, or sympy.Rational."
)
if isinstance(value, numbers.Real):
return sp.Rational(str(float(value)))
raise TypeError("Unsupported scalar type: {}".format(type(value)))


class SymbolicScalar:
"""
Symbolic scalar with deferred numeric evaluation.

Arithmetic operations only manipulate symbolic expressions. Numeric values are
computed when calling ``evaluate`` (directly or through ``evaluate_scalar``).
"""

def __init__(self, value, symbol=None, _expr=None, _defaults=None):
if _expr is not None:
self._expr = sp.simplify(_expr)
self._defaults = dict(_defaults or {})
return

if symbol is None:
self._expr = _to_sympy_number(value, strict=True)
self._defaults = {}
else:
name = str(symbol)
self._expr = sp.Symbol(name)
self._defaults = {name: _to_sympy_number(value, strict=True)}

@property
def symbol(self):
return str(sp.simplify(self._expr))

@staticmethod
def _is_numeric(value):
return isinstance(value, (SymbolicScalar, numbers.Real))

@staticmethod
def _merge_defaults(defaults_1, defaults_2):
merged = dict(defaults_1)
for key, value in defaults_2.items():
if key in merged and sp.simplify(merged[key] - value) != 0:
raise ValueError("Conflicting default value for symbol '{}'.".format(key))
merged[key] = value
return merged

@classmethod
def _coerce(cls, value):
if isinstance(value, SymbolicScalar):
return value._expr, dict(value._defaults)
return _to_sympy_number(value, strict=False), {}

@classmethod
def _from_op(cls, expr, defaults):
return cls(0, _expr=expr, _defaults=defaults)

@staticmethod
def _format_substitutions(substitutions):
if substitutions is None:
return {}
formatted = {}
for key, value in substitutions.items():
name = str(key)
formatted[name] = _to_sympy_number(value, strict=False)
return formatted

def evaluate(self, substitutions=None):
subs = dict(self._defaults)
subs.update(self._format_substitutions(substitutions))

sympy_subs = {sp.Symbol(name): value for name, value in subs.items()}
evaluated = sp.simplify(self._expr.subs(sympy_subs))
if evaluated.free_symbols:
raise ValueError(
"Missing substitutions for symbols: {}".format(
", ".join(sorted(str(s) for s in evaluated.free_symbols))
)
)
return float(sp.N(evaluated))

def __add__(self, other):
if not self._is_numeric(other):
return NotImplemented
other_expr, other_defaults = self._coerce(other)
defaults = self._merge_defaults(self._defaults, other_defaults)
return self._from_op(self._expr + other_expr, defaults)

def __radd__(self, other):
return self.__add__(other)

def __sub__(self, other):
if not self._is_numeric(other):
return NotImplemented
other_expr, other_defaults = self._coerce(other)
defaults = self._merge_defaults(self._defaults, other_defaults)
return self._from_op(self._expr - other_expr, defaults)

def __rsub__(self, other):
if not self._is_numeric(other):
return NotImplemented
other_expr, other_defaults = self._coerce(other)
defaults = self._merge_defaults(other_defaults, self._defaults)
return self._from_op(other_expr - self._expr, defaults)

def __mul__(self, other):
if not self._is_numeric(other):
return NotImplemented
other_expr, other_defaults = self._coerce(other)
defaults = self._merge_defaults(self._defaults, other_defaults)
return self._from_op(self._expr * other_expr, defaults)

def __rmul__(self, other):
return self.__mul__(other)

def __truediv__(self, other):
if not self._is_numeric(other):
return NotImplemented
other_expr, other_defaults = self._coerce(other)
defaults = self._merge_defaults(self._defaults, other_defaults)
return self._from_op(self._expr / other_expr, defaults)

def __rtruediv__(self, other):
if not self._is_numeric(other):
return NotImplemented
other_expr, other_defaults = self._coerce(other)
defaults = self._merge_defaults(other_defaults, self._defaults)
return self._from_op(other_expr / self._expr, defaults)

def __neg__(self):
return self._from_op(-self._expr, self._defaults)

def __float__(self):
return self.evaluate()

def __repr__(self):
return "SymbolicScalar({})".format(self.symbol)


def is_scalar(value):
return isinstance(value, (SymbolicScalar, numbers.Real))


def evaluate_scalar(value, substitutions=None):
if isinstance(value, SymbolicScalar):
return value.evaluate(substitutions=substitutions)
return float(value)
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ It relies on the following Python modules:
- Numpy
- Scipy
- Cvxpy
- Sympy
- Matplotlib (for the demo notebook)


Expand Down
Loading