From 0f6fe16a4a46f2147dec052345de4d9291594313 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Thu, 28 Mar 2024 11:45:57 -0600 Subject: [PATCH 01/46] Adding initial draft of nonlinear to pwl transformation --- pyomo/contrib/piecewise/__init__.py | 3 + .../piecewise/transform/nonlinear_to_pwl.py | 380 ++++++++++++++++++ 2 files changed, 383 insertions(+) create mode 100644 pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py diff --git a/pyomo/contrib/piecewise/__init__.py b/pyomo/contrib/piecewise/__init__.py index 37873c83b3b..b5452dd2bd5 100644 --- a/pyomo/contrib/piecewise/__init__.py +++ b/pyomo/contrib/piecewise/__init__.py @@ -33,3 +33,6 @@ from pyomo.contrib.piecewise.transform.convex_combination import ( ConvexCombinationTransformation, ) +from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( + NonlinearToPWL, +) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py new file mode 100644 index 00000000000..519736723f6 --- /dev/null +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -0,0 +1,380 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import numpy as np +import itertools + +from pyomo.environ import ( + TransformationFactory, + Transformation, + Var, + Constraint, + Objective, + Any, + value, +) +from pyomo.core.expr.numeric_expr import SumExpression +from pyomo.core.expr import identify_variables +from pyomo.repn.quadratic import QuadraticRepnVisitor +from pyomo.core.expr import SumExpression +from pyomo.contrib.piecewise import PiecewiseLinearExpression + + +# TODO remove +MAX_DIM = 5 + +# This should be safe to use many times; declare it globally +_quadratic_repn_visitor = QuadraticRepnVisitor( + subexpression_cache={}, var_map={}, var_order={}, sorter=None +) + + +def get_pwl_function_approximation(func, method, n, bounds, **kwargs): + """ + Get a piecewise-linear approximation to a function, given: + + func: function to approximate + method: method to use for the approximation, current options are: + - 'simple_random_point_grid' + - 'simple_uniform_point_grid' + - 'naive_lmt' + n: parameter controlling fineness of the approximation based on the specified method + bounds: list of tuples giving upper and lower bounds for each of func's arguments + kwargs: additional arguments to be specified to the method used + """ + + points = None + match (method): + case 'simple_random_point_grid': + points = get_simple_random_point_grid(bounds, n) + case 'simple_uniform_point_grid': + points = get_simple_uniform_point_grid(bounds, n) + case 'naive_lmt': + points = get_points_naive_lmt(bounds, n, func, randomize=True) + case 'naive_lmt_uniform': + points = get_points_naive_lmt(bounds, n, func, randomize=False) + case _: + raise NotImplementedError(f"Invalid method: {method}") + + # Default path: after getting the points, construct PWLF using the + # function-and-list-of-points constructor + + # DUCT TAPE WARNING: work around deficiency in PiecewiseLinearFunction constructor. TODO + dim = len(points[0]) + if dim == 1: + points = [pt[0] for pt in points] + + print( + f" Constructing PWLF with {len(points)} points, each of which are {dim}-dimensional" + ) + return PiecewiseLinearFunction(points=points, function=func) + + +def get_simple_random_point_grid(bounds, n, seed=42): + # Generate randomized grid of points + linspaces = [] + for b in bounds: + np.random.seed(seed) + linspaces.append(np.random.uniform(b[0], b[1], n)) + return list(itertools.product(*linspaces)) + + +def get_simple_uniform_point_grid(bounds, n): + # Generate non-randomized grid of points + linspaces = [] + for b in bounds: + # Issues happen when exactly using the boundary + nudge = (b[1] - b[0]) * 1e-4 + linspaces.append( + # np.linspace(b[0], b[1], n) + np.linspace(b[0] + nudge, b[1] - nudge, n) + ) + return list(itertools.product(*linspaces)) + + +# TODO this was copypasted from shumeng; make it better +def get_points_naive_lmt(bounds, n, func, seed=42, randomize=True): + from lineartree import LinearTreeRegressor + from sklearn.linear_model import LinearRegression + from sklearn.metrics import mean_squared_error + from sklearn.model_selection import train_test_split + import PWLTransformation.lmt as lmtutils + + points = None + if randomize: + points = get_simple_random_point_grid(bounds, n, seed=seed) + else: + points = get_simple_uniform_point_grid(bounds, n) + # perturb(points, 0.01) + x_list = np.array(points) + y_list = [] + for point in points: + y_list.append(func(*point)) + regr = LinearTreeRegressor( + LinearRegression(), + criterion='mse', + max_bins=120, + min_samples_leaf=4, + max_depth=5, + ) + + # Using train_test_split is silly. TODO: remove this and just sample my own + # extra points if I want to estimate the error. + X_train, X_test, y_train, y_test = train_test_split( + x_list, y_list, test_size=0.2, random_state=seed + ) + regr.fit(X_train, y_train) + y_pred = regr.predict(X_test) + error = mean_squared_error(y_test, y_pred) + + leaves, splits, ths = lmtutils.parse_linear_tree_regressor(regr, bounds) + + # This was originally part of the LMT_Model_component and used to calculate + # avg_leaves for the output data. TODO: get this back + # self.total_leaves += len(leaves) + + # bound_point_list = lmt.generate_bound(leaves) + bound_point_list = lmtutils.generate_bound_points(leaves, bounds) + # duct tape to fix possible issues from unknown bugs. TODO should this go + # here? + return bound_point_list + + +@TransformationFactory.register( + 'contrib.piecewise.nonlinear_to_pwl', + doc="Convert nonlinear constraints and objectives to piecewise-linear approximations.", +) +class NonlinearToPWL(Transformation): + """ + Convert nonlinear constraints and objectives to piecewise-linear approximations. + """ + + def __init__(self): + super(Transformation).__init__() + + def _apply_to( + self, + model, + n=3, + method='simple_uniform_point_grid', + allow_quadratic_cons=True, + allow_quadratic_objs=True, + additively_decompose=True, + ): + """Apply the transformation""" + + # Check ahead of time whether there are any unbounded variables. If + # there are, we'll have to bail out + # But deactivated variables can be left alone -- or should they be? + # Let's not, for now. + for v in model.component_objects(Var): + if None in v.bounds: + print( + "Error: cannot apply transformation to model with unbounded variables" + ) + raise NotImplementedError( + "Cannot apply transformation to model with unbounded variables" + ) + + # Upcoming steps will trash the values of the vars, since I don't know + # a better way. But what if the user set them with initialize= ? We'd + # better restore them after we're done. + orig_var_map = {id(var): var.value for var in model.component_objects(Var)} + + # Now we are ready to start + original_cons = list(model.component_data_objects(Constraint)) + original_objs = list(model.component_data_objects(Objective)) + + model._pwl_quadratic_count = 0 + model._pwl_nonlinear_count = 0 + + # Let's put all our new constraints in one big index + model._pwl_cons = Constraint(Any) + + for con in original_cons: + repn = _quadratic_repn_visitor.walk_expression(con.body) + if repn.nonlinear is None: + if repn.quadratic is None: + # Linear constraint. Always skip. + continue + else: + model._pwl_quadratic_count += 1 + if allow_quadratic_cons: + continue + else: + model._pwl_nonlinear_count += 1 + _replace_con( + model, con, method, n, allow_quadratic_cons, additively_decompose + ) + + # And do the same for objectives + for obj in original_objs: + repn = _quadratic_repn_visitor.walk_expression(obj) + if repn.nonlinear is None: + if repn.quadratic is None: + # Linear objective. Skip. + continue + else: + model._pwl_quadratic_count += 1 + if allow_quadratic_objs: + continue + else: + model._pwl_nonlinear_count += 1 + _replace_obj( + model, obj, method, n, allow_quadratic_objs, additively_decompose + ) + + # Before we're done, replace the old variable values + for var in model.component_objects(Var): + var.value = orig_var_map[id(var)] + + +# Check whether a term should be skipped for approximation. Do not touch +# model's quadratic or nonlinear counts; those are only for top-level +# expressions which were already checked +def _check_skip_approx(expr, allow_quadratic, model): + repn = _quadratic_repn_visitor.walk_expression(expr) + if repn.nonlinear is None: + if repn.quadratic is None: + # Linear expression. Skip. + return True + else: + # model._pwl_quadratic_count += 1 + if allow_quadratic: + return True + else: + pass + # model._pwl_nonlinear_count += 1 + dim = len(list(identify_variables(expr))) + if dim > MAX_DIM: + print(f"Refusing to approximate function with {dim}-dimensional component.") + raise RuntimeError( + f"Refusing to approximate function with {dim}-dimensional component." + ) + return False + + +def _replace_con(model, con, method, n, allow_quadratic_cons, additively_decompose): + vars = list(identify_variables(con.body)) + bounds = [(v.bounds[0], v.bounds[1]) for v in vars] + + # Alright, let's do it like this. Additively decompose con.body and work on the pieces + func_pieces = [] + for k, expr in enumerate( + _additively_decompose_expr(con.body) if additively_decompose else [con.body] + ): + # First, check if we actually need to do anything + if _check_skip_approx(expr, allow_quadratic_cons, model): + # We're skipping this term. Just add expr directly to the pieces + func_pieces.append(expr) + continue + + vars_inner = list(identify_variables(expr)) + bounds = [(v.bounds[0], v.bounds[1]) for v in vars_inner] + + def eval_con_func(*args): + # sanity check + assert len(args) == len( + vars_inner + ), f"eval_con_func was called with {len(args)} arguments, but expected {len(vars_inner)}" + for i, v in enumerate(vars_inner): + v.value = args[i] + return value(con.body) + + pwlf = get_pwl_function_approximation(eval_con_func, method, n, bounds) + + con_name = con.getname(fully_qualified=False) + model.add_component(f"_pwle_{con_name}_{k}", pwlf) + # func_pieces.append(pwlf(*vars_inner).expr) + func_pieces.append(pwlf(*vars_inner)) + + pwl_func = sum(func_pieces) + + # Change the constraint. This is hard to do in-place, so I'll + # remake it and deactivate the old one as was done originally. + + # Now we need a ton of if statements to properly set up the constraint + if con.equality: + model._pwl_cons[str(con)] = pwl_func == con.ub + elif con.strict_lower: + model._pwl_cons[str(con)] = pwl_func > con.lb + elif con.strict_upper: + model._pwl_cons[str(con)] = pwl_func < con.ub + elif con.has_lb(): + if con.has_ub(): # constraint is of the form lb <= expr <= ub + model._pwl_cons[str(con)] = (con.lb, pwl_func, con.ub) + else: + model._pwl_cons[str(con)] = pwl_func >= con.lb + elif con.has_ub(): + model._pwl_cons[str(con)] = pwl_func <= con.ub + else: + assert ( + False + ), f"unreachable: original Constraint '{con_name}' did not have any upper or lower bound" + con.deactivate() + + +def _replace_obj(model, obj, method, n, allow_quadratic_obj, additively_decompose): + vars = list(identify_variables(obj)) + bounds = [(v.bounds[0], v.bounds[1]) for v in vars] + + func_pieces = [] + for k, expr in enumerate( + _additively_decompose_expr(obj.expr) if additively_decompose else [obj.expr] + ): + # First, check if we actually need to do anything + if _check_skip_approx(expr, allow_quadratic_obj, model): + # We're skipping this term. Just add expr directly to the pieces + func_pieces.append(expr) + continue + + vars_inner = list(identify_variables(expr)) + bounds = [(v.bounds[0], v.bounds[1]) for v in vars_inner] + + def eval_obj_func(*args): + # sanity check + assert len(args) == len( + vars_inner + ), f"eval_obj_func was called with {len(args)} arguments, but expected {len(vars_inner)}" + for i, v in enumerate(vars_inner): + v.value = args[i] + return value(obj) + + pwlf = get_pwl_function_approximation(eval_obj_func, method, n, bounds) + + obj_name = obj.getname(fully_qualified=False) + model.add_component(f"_pwle_{obj_name}_{k}", pwlf) + func_pieces.append(pwlf(*vars_inner)) + + pwl_func = sum(func_pieces[1:], func_pieces[0]) + + # Add the new objective + obj_name = obj.getname(fully_qualified=False) + # model.add_component(f"_pwle_{obj_name}", pwl_func) + model.add_component( + f"_pwl_obj_{obj_name}", Objective(expr=pwl_func, sense=obj.sense) + ) + obj.deactivate() + + +# Copypasted from gdp/plugins/partition_disjuncts.py for now. This is the +# stupid approach that will not properly catch all additive separability; to do +# it better we need a walker. +def _additively_decompose_expr(input_expr): + if input_expr.__class__ is not SumExpression: + # print(f"couldn't decompose: input_expr.__class__ was {input_expr.__class__}, not SumExpression") + # This isn't separable, so we just have the one expression + return [input_expr] + # else, it was a SumExpression, and we will break it into the summands + summands = list(input_expr.args) + # print(f"len(summands) is {len(summands)}") + # print(f"summands is {summands}") + return summands From b34820b8f78f1ecb7e83fd49224a4c3c79d3474f Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Thu, 28 Mar 2024 14:19:43 -0600 Subject: [PATCH 02/46] Not screaming about fixed Var bounds, but this still doesn't work because the space isn't full-dimensional --- .../piecewise/transform/nonlinear_to_pwl.py | 216 ++++++++++++++---- 1 file changed, 174 insertions(+), 42 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 519736723f6..3db29af6784 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -9,9 +9,13 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ -import numpy as np import itertools +from lineartree import LinearTreeRegressor +import lineartree + +import numpy as np + from pyomo.environ import ( TransformationFactory, Transformation, @@ -25,7 +29,15 @@ from pyomo.core.expr import identify_variables from pyomo.repn.quadratic import QuadraticRepnVisitor from pyomo.core.expr import SumExpression -from pyomo.contrib.piecewise import PiecewiseLinearExpression +from pyomo.contrib.piecewise import ( + PiecewiseLinearExpression, + PiecewiseLinearFunction +) + +from sklearn.linear_model import LinearRegression +import random +from sklearn.metrics import mean_squared_error +from sklearn.model_selection import train_test_split # TODO remove @@ -36,7 +48,6 @@ subexpression_cache={}, var_map={}, var_order={}, sorter=None ) - def get_pwl_function_approximation(func, method, n, bounds, **kwargs): """ Get a piecewise-linear approximation to a function, given: @@ -81,33 +92,28 @@ def get_pwl_function_approximation(func, method, n, bounds, **kwargs): def get_simple_random_point_grid(bounds, n, seed=42): # Generate randomized grid of points linspaces = [] - for b in bounds: + for (lb, ub) in bounds: np.random.seed(seed) - linspaces.append(np.random.uniform(b[0], b[1], n)) + linspaces.append(np.random.uniform(lb, ub, n)) return list(itertools.product(*linspaces)) def get_simple_uniform_point_grid(bounds, n): # Generate non-randomized grid of points linspaces = [] - for b in bounds: + for (lb, ub) in bounds: # Issues happen when exactly using the boundary - nudge = (b[1] - b[0]) * 1e-4 + nudge = (ub - lb) * 1e-4 linspaces.append( # np.linspace(b[0], b[1], n) - np.linspace(b[0] + nudge, b[1] - nudge, n) + np.linspace(lb + nudge, ub - nudge, n) ) return list(itertools.product(*linspaces)) # TODO this was copypasted from shumeng; make it better def get_points_naive_lmt(bounds, n, func, seed=42, randomize=True): - from lineartree import LinearTreeRegressor - from sklearn.linear_model import LinearRegression - from sklearn.metrics import mean_squared_error - from sklearn.model_selection import train_test_split - import PWLTransformation.lmt as lmtutils - + points = None if randomize: points = get_simple_random_point_grid(bounds, n, seed=seed) @@ -135,19 +141,149 @@ def get_points_naive_lmt(bounds, n, func, seed=42, randomize=True): y_pred = regr.predict(X_test) error = mean_squared_error(y_test, y_pred) - leaves, splits, ths = lmtutils.parse_linear_tree_regressor(regr, bounds) + leaves, splits, ths = parse_linear_tree_regressor(regr, bounds) # This was originally part of the LMT_Model_component and used to calculate # avg_leaves for the output data. TODO: get this back # self.total_leaves += len(leaves) # bound_point_list = lmt.generate_bound(leaves) - bound_point_list = lmtutils.generate_bound_points(leaves, bounds) + bound_point_list = generate_bound_points(leaves, bounds) # duct tape to fix possible issues from unknown bugs. TODO should this go # here? return bound_point_list +# TODO: this is still horrible. Maybe I should put these back together into +# a wrapper class again, but better this time? + + +# Given a leaves dict (as generated by parse_tree) and a list of tuples +# representing variable bounds, generate the set of vertices separating each +# subset of the domain +def generate_bound_points(leaves, bounds): + bound_points = [] + for leaf in leaves.values(): + lower_corner_list = [] + upper_corner_list = [] + for var_bound in leaf['bounds'].values(): + lower_corner_list.append(var_bound[0]) + upper_corner_list.append(var_bound[1]) + + # Duct tape to fix issues from unknown bugs + for pt in [lower_corner_list, upper_corner_list]: + for i in range(len(pt)): + # clamp within bounds range + pt[i] = max(pt[i], bounds[i][0]) + pt[i] = min(pt[i], bounds[i][1]) + + if tuple(lower_corner_list) not in bound_points: + bound_points.append(tuple(lower_corner_list)) + if tuple(upper_corner_list) not in bound_points: + bound_points.append(tuple(upper_corner_list)) + + # This process should have gotten every interior bound point. However, all + # but two of the corners of the overall bounding box should have been + # missed. Let's fix that now. + for outer_corner in itertools.product(*bounds): + if outer_corner not in bound_points: + bound_points.append(outer_corner) + return bound_points + + +# Parse a LinearTreeRegressor and identify features such as bounds, slope, and +# intercept for leaves. Return some dicts. +def parse_linear_tree_regressor(linear_tree_regressor, bounds): + leaves = linear_tree_regressor.summary(only_leaves=True) + splits = linear_tree_regressor.summary() + + for key, leaf in leaves.items(): + del splits[key] + leaf['bounds'] = {} + leaf['slope'] = list(leaf['models'].coef_) + leaf['intercept'] = leaf['models'].intercept_ + + L = np.array(list(leaves.keys())) + features = np.arange(0, len(leaves[L[0]]['slope'])) + + for node in splits.values(): + left_child_node = node['children'][0] # find its left child + right_child_node = node['children'][1] # find its right child + # create the list to save leaves + node['left_leaves'], node['right_leaves'] = [], [] + if left_child_node in leaves: # if left child is a leaf node + node['left_leaves'].append(left_child_node) + else: # traverse its left node by calling function to find all the leaves from its left node + node['left_leaves'] = find_leaves(splits, leaves, splits[left_child_node]) + if right_child_node in leaves: # if right child is a leaf node + node['right_leaves'].append(right_child_node) + else: # traverse its right node by calling function to find all the leaves from its right node + node['right_leaves'] = find_leaves(splits, leaves, splits[right_child_node]) + + # For each feature in each leaf, initialize lower and upper bounds to None + for th in features: + for leaf in leaves: + leaves[leaf]['bounds'][th] = [None, None] + for split in splits: + var = splits[split]['col'] + for leaf in splits[split]['left_leaves']: + leaves[leaf]['bounds'][var][1] = splits[split]['th'] + + for leaf in splits[split]['right_leaves']: + leaves[leaf]['bounds'][var][0] = splits[split]['th'] + + leaves_new = reassign_none_bounds(leaves, bounds) + splitting_thresholds = {} + for split in splits: + var = splits[split]['col'] + splitting_thresholds[var] = {} + for split in splits: + var = splits[split]['col'] + splitting_thresholds[var][split] = splits[split]['th'] + # Make sure every nested dictionary in the splitting_thresholds dictionary + # is sorted by value + for var in splitting_thresholds: + splitting_thresholds[var] = dict( + sorted(splitting_thresholds[var].items(), key=lambda x: x[1]) + ) + + return leaves_new, splits, splitting_thresholds + + +# Populate the "None" bounds with the bounding box bounds for a leaves-dict-tree +# amalgamation. +def reassign_none_bounds(leaves, input_bounds): + L = np.array(list(leaves.keys())) + features = np.arange(0, len(leaves[L[0]]['slope'])) + + for l in L: + for f in features: + if leaves[l]['bounds'][f][0] == None: + leaves[l]['bounds'][f][0] = input_bounds[f][0] + if leaves[l]['bounds'][f][1] == None: + leaves[l]['bounds'][f][1] = input_bounds[f][1] + return leaves + + +def find_leaves(splits, leaves, input_node): + root_node = input_node + leaves_list = [] + queue = [root_node] + while queue: + node = queue.pop() + node_left = node['children'][0] + node_right = node['children'][1] + if node_left in leaves: + leaves_list.append(node_left) + else: + queue.append(splits[node_left]) + if node_right in leaves: + leaves_list.append(node_right) + else: + queue.append(splits[node_right]) + return leaves_list + + @TransformationFactory.register( 'contrib.piecewise.nonlinear_to_pwl', doc="Convert nonlinear constraints and objectives to piecewise-linear approximations.", @@ -159,7 +295,7 @@ class NonlinearToPWL(Transformation): def __init__(self): super(Transformation).__init__() - + # TODO: ConfigDict def _apply_to( self, model, @@ -169,25 +305,12 @@ def _apply_to( allow_quadratic_objs=True, additively_decompose=True, ): - """Apply the transformation""" - - # Check ahead of time whether there are any unbounded variables. If - # there are, we'll have to bail out - # But deactivated variables can be left alone -- or should they be? - # Let's not, for now. - for v in model.component_objects(Var): - if None in v.bounds: - print( - "Error: cannot apply transformation to model with unbounded variables" - ) - raise NotImplementedError( - "Cannot apply transformation to model with unbounded variables" - ) + """TODO: docstring""" # Upcoming steps will trash the values of the vars, since I don't know # a better way. But what if the user set them with initialize= ? We'd # better restore them after we're done. - orig_var_map = {id(var): var.value for var in model.component_objects(Var)} + orig_var_map = {id(var): var.value for var in model.component_data_objects(Var)} # Now we are ready to start original_cons = list(model.component_data_objects(Constraint)) @@ -233,7 +356,7 @@ def _apply_to( ) # Before we're done, replace the old variable values - for var in model.component_objects(Var): + for var in model.component_data_objects(Var): var.value = orig_var_map[id(var)] @@ -262,11 +385,23 @@ def _check_skip_approx(expr, allow_quadratic, model): return False -def _replace_con(model, con, method, n, allow_quadratic_cons, additively_decompose): - vars = list(identify_variables(con.body)) - bounds = [(v.bounds[0], v.bounds[1]) for v in vars] +def _generate_bounds_list(vars_inner, con): + bounds = [] + for v in vars_inner: + if v.fixed: + bounds.append((value(v), value(v))) + elif None in v.bounds: + raise ValueError( + "Cannot automatically approximate constraints with unbounded " + "variables. Var '%s' appearining in component '%s' is missing " + "at least one bound" % (con.name, v.name)) + else: + bounds.append(v.bounds) + return bounds + - # Alright, let's do it like this. Additively decompose con.body and work on the pieces +def _replace_con(model, con, method, n, allow_quadratic_cons, additively_decompose): + # Additively decompose con.body and work on the pieces func_pieces = [] for k, expr in enumerate( _additively_decompose_expr(con.body) if additively_decompose else [con.body] @@ -278,7 +413,7 @@ def _replace_con(model, con, method, n, allow_quadratic_cons, additively_decompo continue vars_inner = list(identify_variables(expr)) - bounds = [(v.bounds[0], v.bounds[1]) for v in vars_inner] + bounds = _generate_bounds_list(vars_inner, con) def eval_con_func(*args): # sanity check @@ -323,9 +458,6 @@ def eval_con_func(*args): def _replace_obj(model, obj, method, n, allow_quadratic_obj, additively_decompose): - vars = list(identify_variables(obj)) - bounds = [(v.bounds[0], v.bounds[1]) for v in vars] - func_pieces = [] for k, expr in enumerate( _additively_decompose_expr(obj.expr) if additively_decompose else [obj.expr] @@ -337,7 +469,7 @@ def _replace_obj(model, obj, method, n, allow_quadratic_obj, additively_decompos continue vars_inner = list(identify_variables(expr)) - bounds = [(v.bounds[0], v.bounds[1]) for v in vars_inner] + bounds = _generate_bounds_list(vars_inner, obj) def eval_obj_func(*args): # sanity check From 33fd778cce713598dbcd84296b3922c2bf9ffa59 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Fri, 29 Mar 2024 15:09:56 -0600 Subject: [PATCH 03/46] Complete rewrite of nonlinear to piecewise linear --- .../piecewise/transform/nonlinear_to_pwl.py | 645 ++++++++++-------- 1 file changed, 378 insertions(+), 267 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 3db29af6784..fde08103446 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -9,11 +9,12 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ +import enum import itertools from lineartree import LinearTreeRegressor import lineartree - +import logging import numpy as np from pyomo.environ import ( @@ -24,72 +25,54 @@ Objective, Any, value, + BooleanVar, + Connector, + Expression, + Suffix, + Param, + Set, + SetOf, + RangeSet, + Block, + ExternalFunction, + SortComponents, + LogicalConstraint ) +from pyomo.common.collections import ComponentMap, ComponentSet +from pyomo.common.config import ConfigDict, ConfigValue, PositiveInt, InEnum +from pyomo.common.modeling import unique_component_name from pyomo.core.expr.numeric_expr import SumExpression from pyomo.core.expr import identify_variables -from pyomo.repn.quadratic import QuadraticRepnVisitor from pyomo.core.expr import SumExpression +from pyomo.core.util import target_list from pyomo.contrib.piecewise import ( PiecewiseLinearExpression, PiecewiseLinearFunction ) +from pyomo.gdp import Disjunct, Disjunction +from pyomo.network import Port +from pyomo.repn.quadratic import QuadraticRepnVisitor from sklearn.linear_model import LinearRegression import random from sklearn.metrics import mean_squared_error from sklearn.model_selection import train_test_split +logger = logging.getLogger(__name__) -# TODO remove -MAX_DIM = 5 +class DomainPartitioningMethod(enum.IntEnum): + RANDOM_GRID = 1 + UNIFORM_GRID = 2 + LINEAR_MODEL_TREE_UNIFORM = 3 + LINEAR_MODEL_TREE_RANDOM = 4 # This should be safe to use many times; declare it globally _quadratic_repn_visitor = QuadraticRepnVisitor( subexpression_cache={}, var_map={}, var_order={}, sorter=None ) -def get_pwl_function_approximation(func, method, n, bounds, **kwargs): - """ - Get a piecewise-linear approximation to a function, given: - - func: function to approximate - method: method to use for the approximation, current options are: - - 'simple_random_point_grid' - - 'simple_uniform_point_grid' - - 'naive_lmt' - n: parameter controlling fineness of the approximation based on the specified method - bounds: list of tuples giving upper and lower bounds for each of func's arguments - kwargs: additional arguments to be specified to the method used - """ - - points = None - match (method): - case 'simple_random_point_grid': - points = get_simple_random_point_grid(bounds, n) - case 'simple_uniform_point_grid': - points = get_simple_uniform_point_grid(bounds, n) - case 'naive_lmt': - points = get_points_naive_lmt(bounds, n, func, randomize=True) - case 'naive_lmt_uniform': - points = get_points_naive_lmt(bounds, n, func, randomize=False) - case _: - raise NotImplementedError(f"Invalid method: {method}") - - # Default path: after getting the points, construct PWLF using the - # function-and-list-of-points constructor - - # DUCT TAPE WARNING: work around deficiency in PiecewiseLinearFunction constructor. TODO - dim = len(points[0]) - if dim == 1: - points = [pt[0] for pt in points] - - print( - f" Constructing PWLF with {len(points)} points, each of which are {dim}-dimensional" - ) - return PiecewiseLinearFunction(points=points, function=func) - -def get_simple_random_point_grid(bounds, n, seed=42): +def get_random_point_grid(bounds, n, func, seed=42): # Generate randomized grid of points linspaces = [] for (lb, ub) in bounds: @@ -98,7 +81,7 @@ def get_simple_random_point_grid(bounds, n, seed=42): return list(itertools.product(*linspaces)) -def get_simple_uniform_point_grid(bounds, n): +def get_uniform_point_grid(bounds, n, func): # Generate non-randomized grid of points linspaces = [] for (lb, ub) in bounds: @@ -111,15 +94,17 @@ def get_simple_uniform_point_grid(bounds, n): return list(itertools.product(*linspaces)) -# TODO this was copypasted from shumeng; make it better -def get_points_naive_lmt(bounds, n, func, seed=42, randomize=True): - - points = None - if randomize: - points = get_simple_random_point_grid(bounds, n, seed=seed) - else: - points = get_simple_uniform_point_grid(bounds, n) - # perturb(points, 0.01) +def get_points_lmt_random_sample(bounds, n, func, seed=42): + points = get_random_point_grid(bounds, n, func, seed=seed) + return get_points_lmt(points, bounds, func, seed) + + +def get_points_lmt_uniform_sample(bounds, n, func, seed=42): + points = get_uniform_point_grid(bounds, n, func) + return get_points_lmt(points, bounds, func, seed) + + +def get_points_lmt(points, bounds, func, seed): x_list = np.array(points) y_list = [] for point in points: @@ -153,6 +138,36 @@ def get_points_naive_lmt(bounds, n, func, seed=42, randomize=True): # here? return bound_point_list +_partition_method_dispatcher = { + DomainPartitioningMethod.RANDOM_GRID: get_random_point_grid, + DomainPartitioningMethod.UNIFORM_GRID: get_uniform_point_grid, + DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM: get_points_lmt_uniform_sample, + DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM: get_points_lmt_random_sample, +} + +def get_pwl_function_approximation(func, method, n, bounds): + """ + Get a piecewise-linear approximation of a function, given: + + func: function to approximate + method: method to use for the approximation, member of DomainPartitioningMethod + n: parameter controlling fineness of the approximation based on the specified method + bounds: list of tuples giving upper and lower bounds for each of func's arguments + """ + points = _partition_method_dispatcher[method](bounds, n, func) + + # DUCT TAPE WARNING: work around deficiency in PiecewiseLinearFunction + # constructor. TODO + dim = len(points[0]) + if dim == 1: + points = [pt[0] for pt in points] + + # After getting the points, construct PWLF using the + # function-and-list-of-points constructor + logger.debug(f"Constructing PWLF with {len(points)} points, each of which " + f"are {dim}-dimensional") + return PiecewiseLinearFunction(points=points, function=func) + # TODO: this is still horrible. Maybe I should put these back together into # a wrapper class again, but better this time? @@ -213,11 +228,13 @@ def parse_linear_tree_regressor(linear_tree_regressor, bounds): node['left_leaves'], node['right_leaves'] = [], [] if left_child_node in leaves: # if left child is a leaf node node['left_leaves'].append(left_child_node) - else: # traverse its left node by calling function to find all the leaves from its left node + else: # traverse its left node by calling function to find all the + # leaves from its left node node['left_leaves'] = find_leaves(splits, leaves, splits[left_child_node]) if right_child_node in leaves: # if right child is a leaf node node['right_leaves'].append(right_child_node) - else: # traverse its right node by calling function to find all the leaves from its right node + else: # traverse its right node by calling function to find all the + # leaves from its right node node['right_leaves'] = find_leaves(splits, leaves, splits[right_child_node]) # For each feature in each leaf, initialize lower and upper bounds to None @@ -250,6 +267,16 @@ def parse_linear_tree_regressor(linear_tree_regressor, bounds): return leaves_new, splits, splitting_thresholds +# This doesn't catch all additively separable expressions--we really need a +# walker (as does gdp.partition_disjuncts) +def _additively_decompose_expr(input_expr): + if input_expr.__class__ is not SumExpression: + # This isn't separable, so we just have the one expression + return [input_expr] + # else, it was a SumExpression, and we will break it into the summands + return list(input_expr.args) + + # Populate the "None" bounds with the bounding box bounds for a leaves-dict-tree # amalgamation. def reassign_none_bounds(leaves, input_bounds): @@ -286,227 +313,311 @@ def find_leaves(splits, leaves, input_node): @TransformationFactory.register( 'contrib.piecewise.nonlinear_to_pwl', - doc="Convert nonlinear constraints and objectives to piecewise-linear approximations.", + doc="Convert nonlinear constraints and objectives to piecewise-linear " + "approximations.", ) class NonlinearToPWL(Transformation): """ Convert nonlinear constraints and objectives to piecewise-linear approximations. """ - + CONFIG = ConfigDict('contrib.piecewise.nonlinear_to_pwl') + CONFIG.declare( + 'targets', + ConfigValue( + default=None, + domain=target_list, + description="target or list of targets that will be approximated", + doc=""" + This specifies the list of components to approximate. If None (default), + the entire model is transformed. Note that if the transformation is + done out of place, the list of targets should be attached to the model + before it is cloned, and the list will specify the targets on the cloned + instance.""", + ), + ) + CONFIG.declare( + 'num_points', + ConfigValue( + default=3, + domain=PositiveInt, + description="Number of breakpoints for each piecewise-linear approximation", + doc=""" + Specifies the number of points in each function domain to triangulate in + order to construct the piecewise-linear approximation. Must be an integer + greater than 1.""", + ), + ) + CONFIG.declare( + 'domain_partitioning_method', + ConfigValue( + default=DomainPartitioningMethod.UNIFORM_GRID, + domain=InEnum(DomainPartitioningMethod), + description="Method for sampling points that will partition function " + "domains.", + doc=""" + The method by which the points used to partition each function domain + are selected. By default, the range of each variable is partitioned + uniformly, however it is possible to sample randomly or to use the + partitions from training a linear model tree based on either uniform + or random samples of the ranges.""", + ), + ) + CONFIG.declare( + 'approximate_quadratic_constraints', + ConfigValue( + default=True, + domain=bool, + description="Whether or not to approximate quadratic constraints.", + doc=""" + Whether or not to calculate piecewise-linear approximations for + quadratic constraints. If True, the resulting approximation will be + a mixed-integer linear program. If False, the resulting approximation + will be a mixed-integer quadratic program.""", + ), + ) + CONFIG.declare( + 'approximate_quadratic_objectives', + ConfigValue( + default=True, + domain=bool, + description="Whether or not to approximate quadratic objectives.", + doc=""" + Whether or not to calculate piecewise-linear approximations for + quadratic objectives. If True, the resulting approximation will be + a mixed-integer linear program. If False, the resulting approximation + will be a mixed-integer quadratic program.""", + ), + ) + CONFIG.declare( + 'additively_decompose', + ConfigValue( + default=False, + domain=bool, + description="Whether or not to additively decompose constraints and " + "approximate the summands separately.", + doc=""" + If False, each nonlinear constraint expression will be approximated by + exactly one piecewise-linear function. If True, constraints will be + additively decomposed, and each of the resulting summands will be + approximated by a separate piecewise-linear function. + + It is recommended to leave this False as long as no nonlinear constraint + involves more than about 5-6 variables. For constraints with higher- + dimmensional nonlinear functions, additive decomposition will improve + the scalability of the approximation (since paritioning the domain is + subject to the curse of dimensionality).""", + ), + ) + CONFIG.declare( + 'max_dimension', + ConfigValue( + default=5, + domain=PositiveInt, + description="The maximum dimension of functions that will be approximated.", + doc=""" + Specifies the maximum dimension function the transformation should + attempt to approximate. If a nonlinear function dimension exceeds + 'max_dimension' the transformation will log a warning and leave the + expression as-is. For functions with dimension significantly the default + (5), it is likely that this transformation will stall triangulating the + points in order to partition the function domain.""", + ), + ) def __init__(self): super(Transformation).__init__() - # TODO: ConfigDict - def _apply_to( - self, - model, - n=3, - method='simple_uniform_point_grid', - allow_quadratic_cons=True, - allow_quadratic_objs=True, - additively_decompose=True, - ): - """TODO: docstring""" - - # Upcoming steps will trash the values of the vars, since I don't know - # a better way. But what if the user set them with initialize= ? We'd - # better restore them after we're done. - orig_var_map = {id(var): var.value for var in model.component_data_objects(Var)} - - # Now we are ready to start - original_cons = list(model.component_data_objects(Constraint)) - original_objs = list(model.component_data_objects(Objective)) - - model._pwl_quadratic_count = 0 - model._pwl_nonlinear_count = 0 - - # Let's put all our new constraints in one big index - model._pwl_cons = Constraint(Any) - - for con in original_cons: - repn = _quadratic_repn_visitor.walk_expression(con.body) - if repn.nonlinear is None: - if repn.quadratic is None: - # Linear constraint. Always skip. - continue - else: - model._pwl_quadratic_count += 1 - if allow_quadratic_cons: - continue + self._handlers = { + Constraint: self._transform_constraint, + Objective: self._transform_objective, + Var: False, + BooleanVar: False, + Connector: False, + Expression: False, + Suffix: False, + Param: False, + Set: False, + SetOf: False, + RangeSet: False, + Disjunction: False, + Disjunct: self._transform_block_components, + Block: self._transform_block_components, + ExternalFunction: False, + Port: False, + PiecewiseLinearFunction: False, + LogicalConstraint: False, + } + self._transformation_blocks = {} + self._transformation_block_set = ComponentSet() + + def _apply_to(self, instance, **kwds): + try: + self._apply_to_impl(instance, **kwds) + finally: + self._transformation_blocks.clear() + self._transformation_block_set.clear() + + def _apply_to_impl( self, model, **kwds): + config = self.CONFIG(kwds.pop('options', {})) + config.set_value(kwds) + + targets = config.targets + if targets is None: + targets = (model,) + + for target in targets: + if target.ctype is Block or target.ctype is Disjunct: + self._transform_block_components(target, config) + elif target.ctype is Constraint: + self._transform_constraint(target, config) + elif target.ctype is Objective: + self._transform_objective(target, config) else: - model._pwl_nonlinear_count += 1 - _replace_con( - model, con, method, n, allow_quadratic_cons, additively_decompose - ) + raise ValueError( + "Target '%s' is not a Block, Constraint, or Objective. It " + "is of type '%s' and cannot be transformed." + % (target.name, type(t)) + ) + + def _get_transformation_block(self, parent): + if parent in self._transformation_blocks: + return self._transformation_blocks[parent] + + nm = unique_component_name( + parent, '_pyomo_contrib_nonlinear_to_pwl' + ) + self._transformation_blocks[parent] = transBlock = Block() + parent.add_component(nm, transBlock) + self._transformation_block_set.add(transBlock) - # And do the same for objectives - for obj in original_objs: - repn = _quadratic_repn_visitor.walk_expression(obj) - if repn.nonlinear is None: - if repn.quadratic is None: - # Linear objective. Skip. + transBlock._pwl_cons = Constraint(Any) + return transBlock + + def _transform_block_components(self, block, config): + blocks = block.values() if block.is_indexed() else (block,) + for b in blocks: + for obj in b.component_objects( + active=True, + descend_into=False, + sort=SortComponents.deterministic + ): + if obj in self._transformation_block_set: + # This is a Block we created--we know we don't need to look + # on it. + continue + handler = self._handlers.get(obj.ctype, None) + if not handler: + if handler is None: + raise RuntimeError( + "No transformation handler registered for modeling " + "components of type '%s'." % obj.ctype + ) continue - else: - model._pwl_quadratic_count += 1 - if allow_quadratic_objs: - continue + handler(obj, config) + + def _transform_constraint(self, cons, config): + trans_block = self._get_transformation_block(cons.parent_block()) + constraints = cons.values() if cons.is_indexed() else (cons,) + for c in constraints: + pw_approx = self._approximate_expression( + c.body, c, trans_block, config, + config.approximate_quadratic_constraints) + + if pw_approx is None: + # Didn't need approximated, nothing to do + continue + + trans_block._pwl_cons[c.name, len(trans_block._pwl_cons)] = (c.lower, + pw_approx, + c.upper) + # deactivate original + c.deactivate() + + def _transform_objective(self, objective, config): + trans_block = self._get_transformation_block(objective.parent_block()) + objectives = objective.values() if objective.is_indexed() else (objective,) + for obj in objectives: + pw_approx = self._approximate_expression( + obj.expr, obj, trans_block, config, + config.approximate_quadratic_objectives) + + if pw_approx is None: + # Didn't need approximated, nothing to do + continue + + trans_block.add_component( + unique_component_name(trans_block, obj.name), + Objective(expr=pw_approx, sense=obj.sense) + ) + obj.deactivate() + + def _get_bounds_list(self, var_list, parent_component): + bounds = [] + for v in var_list: + if None in v.bounds: + raise ValueError( + "Cannot automatically approximate constraints with unbounded " + "variables. Var '%s' appearining in component '%s' is missing " + "at least one bound" % (con.name, v.name)) else: - model._pwl_nonlinear_count += 1 - _replace_obj( - model, obj, method, n, allow_quadratic_objs, additively_decompose + bounds.append(v.bounds) + return bounds + + def _needs_approximating(self, expr, approximate_quadratic): + repn = _quadratic_repn_visitor.walk_expression(expr) + if repn.nonlinear is None: + if repn.quadratic is None: + # Linear constraint. Always skip. + return False + else: + if not approximate_quadratic: + # Didn't need approximated, nothing to do + return False + return True + + def _approximate_expression(self, obj, parent_component, trans_block, + config, approximate_quadratic): + if not self._needs_approximating(obj, approximate_quadratic): + return + + # Additively decompose obj and work on the pieces + pwl_func = 0 + for k, expr in enumerate(_additively_decompose_expr(obj) if + config.additively_decompose else (obj,)): + # First check is this is a good idea + expr_vars = list(identify_variables(expr, include_fixed=False)) + orig_values = ComponentMap((v, v.value) for v in expr_vars) + + dim = len(expr_vars) + if dim > config.max_dimension: + logger.warning( + "Not approximating expression for component '%s' as " + "it exceeds the maximum dimension of %s. Try increasing " + "'max_dimension' or additively separating the expression." + % (parent_component.name, config.max_dimension)) + pwl_func += expr + continue + elif not self._needs_approximating(expr, approximate_quadratic): + pwl_func += expr + continue + + def eval_expr(*args): + for i, v in enumerate(expr_vars): + v.value = args[i] + return value(expr) + + pwlf = get_pwl_function_approximation( + eval_expr, config.domain_partitioning_method, + config.num_points, + self._get_bounds_list(expr_vars, parent_component) ) + name = unique_component_name( + trans_block, + parent_component.getname(fully_qualified=False) + ) + trans_block.add_component(f"_pwle_{name}_{k}", pwlf) + pwl_func += pwlf(*expr_vars) - # Before we're done, replace the old variable values - for var in model.component_data_objects(Var): - var.value = orig_var_map[id(var)] - - -# Check whether a term should be skipped for approximation. Do not touch -# model's quadratic or nonlinear counts; those are only for top-level -# expressions which were already checked -def _check_skip_approx(expr, allow_quadratic, model): - repn = _quadratic_repn_visitor.walk_expression(expr) - if repn.nonlinear is None: - if repn.quadratic is None: - # Linear expression. Skip. - return True - else: - # model._pwl_quadratic_count += 1 - if allow_quadratic: - return True - else: - pass - # model._pwl_nonlinear_count += 1 - dim = len(list(identify_variables(expr))) - if dim > MAX_DIM: - print(f"Refusing to approximate function with {dim}-dimensional component.") - raise RuntimeError( - f"Refusing to approximate function with {dim}-dimensional component." - ) - return False - - -def _generate_bounds_list(vars_inner, con): - bounds = [] - for v in vars_inner: - if v.fixed: - bounds.append((value(v), value(v))) - elif None in v.bounds: - raise ValueError( - "Cannot automatically approximate constraints with unbounded " - "variables. Var '%s' appearining in component '%s' is missing " - "at least one bound" % (con.name, v.name)) - else: - bounds.append(v.bounds) - return bounds - - -def _replace_con(model, con, method, n, allow_quadratic_cons, additively_decompose): - # Additively decompose con.body and work on the pieces - func_pieces = [] - for k, expr in enumerate( - _additively_decompose_expr(con.body) if additively_decompose else [con.body] - ): - # First, check if we actually need to do anything - if _check_skip_approx(expr, allow_quadratic_cons, model): - # We're skipping this term. Just add expr directly to the pieces - func_pieces.append(expr) - continue - - vars_inner = list(identify_variables(expr)) - bounds = _generate_bounds_list(vars_inner, con) - - def eval_con_func(*args): - # sanity check - assert len(args) == len( - vars_inner - ), f"eval_con_func was called with {len(args)} arguments, but expected {len(vars_inner)}" - for i, v in enumerate(vars_inner): - v.value = args[i] - return value(con.body) - - pwlf = get_pwl_function_approximation(eval_con_func, method, n, bounds) - - con_name = con.getname(fully_qualified=False) - model.add_component(f"_pwle_{con_name}_{k}", pwlf) - # func_pieces.append(pwlf(*vars_inner).expr) - func_pieces.append(pwlf(*vars_inner)) - - pwl_func = sum(func_pieces) - - # Change the constraint. This is hard to do in-place, so I'll - # remake it and deactivate the old one as was done originally. - - # Now we need a ton of if statements to properly set up the constraint - if con.equality: - model._pwl_cons[str(con)] = pwl_func == con.ub - elif con.strict_lower: - model._pwl_cons[str(con)] = pwl_func > con.lb - elif con.strict_upper: - model._pwl_cons[str(con)] = pwl_func < con.ub - elif con.has_lb(): - if con.has_ub(): # constraint is of the form lb <= expr <= ub - model._pwl_cons[str(con)] = (con.lb, pwl_func, con.ub) - else: - model._pwl_cons[str(con)] = pwl_func >= con.lb - elif con.has_ub(): - model._pwl_cons[str(con)] = pwl_func <= con.ub - else: - assert ( - False - ), f"unreachable: original Constraint '{con_name}' did not have any upper or lower bound" - con.deactivate() - - -def _replace_obj(model, obj, method, n, allow_quadratic_obj, additively_decompose): - func_pieces = [] - for k, expr in enumerate( - _additively_decompose_expr(obj.expr) if additively_decompose else [obj.expr] - ): - # First, check if we actually need to do anything - if _check_skip_approx(expr, allow_quadratic_obj, model): - # We're skipping this term. Just add expr directly to the pieces - func_pieces.append(expr) - continue - - vars_inner = list(identify_variables(expr)) - bounds = _generate_bounds_list(vars_inner, obj) - - def eval_obj_func(*args): - # sanity check - assert len(args) == len( - vars_inner - ), f"eval_obj_func was called with {len(args)} arguments, but expected {len(vars_inner)}" - for i, v in enumerate(vars_inner): - v.value = args[i] - return value(obj) - - pwlf = get_pwl_function_approximation(eval_obj_func, method, n, bounds) - - obj_name = obj.getname(fully_qualified=False) - model.add_component(f"_pwle_{obj_name}_{k}", pwlf) - func_pieces.append(pwlf(*vars_inner)) - - pwl_func = sum(func_pieces[1:], func_pieces[0]) - - # Add the new objective - obj_name = obj.getname(fully_qualified=False) - # model.add_component(f"_pwle_{obj_name}", pwl_func) - model.add_component( - f"_pwl_obj_{obj_name}", Objective(expr=pwl_func, sense=obj.sense) - ) - obj.deactivate() - + # restore var values + for v, val in orig_values.items(): + v.value = val -# Copypasted from gdp/plugins/partition_disjuncts.py for now. This is the -# stupid approach that will not properly catch all additive separability; to do -# it better we need a walker. -def _additively_decompose_expr(input_expr): - if input_expr.__class__ is not SumExpression: - # print(f"couldn't decompose: input_expr.__class__ was {input_expr.__class__}, not SumExpression") - # This isn't separable, so we just have the one expression - return [input_expr] - # else, it was a SumExpression, and we will break it into the summands - summands = list(input_expr.args) - # print(f"len(summands) is {len(summands)}") - # print(f"summands is {summands}") - return summands + return pwl_func From 3271a0ead174d17ad2135fb1bf44b65cf163b024 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Fri, 29 Mar 2024 15:10:46 -0600 Subject: [PATCH 04/46] Adding handler to safely ignore LogicalConstraints in piecewise linear to GDP transformations --- .../piecewise/transform/piecewise_to_gdp_transformation.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py b/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py index 2e056c47a15..f36c222b4e0 100644 --- a/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py +++ b/pyomo/contrib/piecewise/transform/piecewise_to_gdp_transformation.py @@ -31,6 +31,7 @@ Connector, SortComponents, Any, + LogicalConstraint, ) from pyomo.core.base import Transformation from pyomo.core.base.block import _BlockData, Block @@ -102,6 +103,7 @@ def __init__(self): ExternalFunction: False, Port: False, PiecewiseLinearFunction: self._transform_piecewise_linear_function, + LogicalConstraint: False, } self._transformation_blocks = {} From 159766a4e39e00190fa3da0c501a1e04549b2441 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Fri, 29 Mar 2024 17:01:01 -0600 Subject: [PATCH 05/46] Starting to add tests for nonlinear to pw linear transformation --- .../piecewise/tests/test_nonlinear_to_pwl.py | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py new file mode 100644 index 00000000000..4d7af9dc7a6 --- /dev/null +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -0,0 +1,88 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import pyomo.common.unittest as unittest +from pyomo.contrib.piecewise import PiecewiseLinearFunction +from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( + NonlinearToPWL, + DomainPartitioningMethod +) +from pyomo.core.expr.compare import ( + assertExpressionsStructurallyEqual, +) +from pyomo.environ import ( + ConcreteModel, + Var, + Constraint, + TransformationFactory, + log, +) + +## debug +from pytest import set_trace + +class TestNonlinearToPWL_1D(unittest.TestCase): + def make_model(self): + m = ConcreteModel() + m.x = Var(bounds=(1, 10)) + m.cons = Constraint(expr=log(m.x) >= 0.35) + + return m + + def test_log_constraint_uniform_grid(self): + m = self.make_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + # cons is transformed + self.assertFalse(m.cons.active) + + pwlf = list(m.component_data_objects(PiecewiseLinearFunction, + descend_into=True)) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + points = [(1.0009,), (5.5,), (9.9991,)] + self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) + self.assertEqual(pwlf._points, points) + self.assertEqual(len(pwlf._linear_functions), 2) + + x1 = 1.0009 + x2 = 5.5 + assertExpressionsStructurallyEqual( + self, + pwlf._linear_functions[0](m.x), + ((log(x2) - log(x1))/(x2 - x1))*m.x + + (log(x2) - ((log(x2) - log(x1))/(x2 - x1))*x2), + places=7 + ) + x1 = 5.5 + x2 = 9.9991 + assertExpressionsStructurallyEqual( + self, + pwlf._linear_functions[1](m.x), + ((log(x2) - log(x1))/(x2 - x1))*m.x + + (log(x2) - ((log(x2) - log(x1))/(x2 - x1))*x2), + places=7 + ) + + self.assertEqual(len(pwlf._expressions), 1) + new_cons = n_to_pwl.get_transformed_component(m.cons) + self.assertTrue(new_cons.active) + self.assertIs(new_cons.body, pwlf._expressions[id(new_cons.body.expr)]) + self.assertIsNone(new_cons.ub) + self.assertEqual(new_cons.lb, 0.35) + self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) From f295490fa203e5963f70a47204e15c2bcc98f8bf Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 2 Apr 2024 21:23:15 -0600 Subject: [PATCH 06/46] Adding mapping between original and transformed components and starting on more tests --- .../piecewise/tests/test_nonlinear_to_pwl.py | 30 +++++++++++++ .../piecewise/transform/nonlinear_to_pwl.py | 45 +++++++++++++++++-- 2 files changed, 71 insertions(+), 4 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 4d7af9dc7a6..020edee3924 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -36,6 +36,10 @@ def make_model(self): m.cons = Constraint(expr=log(m.x) >= 0.35) return m + + def check_pw_linear_log_x(self, m, points): + x1 = points[0][0] + x2 = points def test_log_constraint_uniform_grid(self): m = self.make_model() @@ -86,3 +90,29 @@ def test_log_constraint_uniform_grid(self): self.assertIsNone(new_cons.ub) self.assertEqual(new_cons.lb, 0.35) self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) + + def test_log_constraint_random_grid(self): + m = self.make_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + # [ESJ 3/30/24]: The seed is actually set in the function for getting + # the points right now, so this will be deterministic. + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.RANDOM_GRID, + ) + + # cons is transformed + self.assertFalse(m.cons.active) + + pwlf = list(m.component_data_objects(PiecewiseLinearFunction, + descend_into=True)) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + set_trace() + points = [(4.370861069626263,), (7.587945476302646,), (9.556428757689245,)] + self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) + self.assertEqual(pwlf._points, points) + self.assertEqual(len(pwlf._linear_functions), 2) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index fde08103446..d819e893d5f 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -38,6 +38,7 @@ SortComponents, LogicalConstraint ) +from pyomo.common.autoslots import AutoSlots from pyomo.common.collections import ComponentMap, ComponentSet from pyomo.common.config import ConfigDict, ConfigValue, PositiveInt, InEnum from pyomo.common.modeling import unique_component_name @@ -71,6 +72,13 @@ class DomainPartitioningMethod(enum.IntEnum): subexpression_cache={}, var_map={}, var_order={}, sorter=None ) +class _NonlinearToPWLTransformationData(AutoSlots.Mixin): + __slots__ = ('transformed_component', 'src_component') + + def __init__(self): + self.transformed_component = ComponentMap() + self.src_component = ComponentMap() +Block.register_private_data_initializer(_NonlinearToPWLTransformationData) def get_random_point_grid(bounds, n, func, seed=42): # Generate randomized grid of points @@ -515,6 +523,8 @@ def _transform_block_components(self, block, config): def _transform_constraint(self, cons, config): trans_block = self._get_transformation_block(cons.parent_block()) + trans_data_dict = trans_block.private_data() + src_data_dict = cons.parent_block().private_data() constraints = cons.values() if cons.is_indexed() else (cons,) for c in constraints: pw_approx = self._approximate_expression( @@ -525,15 +535,20 @@ def _transform_constraint(self, cons, config): # Didn't need approximated, nothing to do continue - trans_block._pwl_cons[c.name, len(trans_block._pwl_cons)] = (c.lower, - pw_approx, - c.upper) + idx = len(trans_block._pwl_cons) + trans_block._pwl_cons[c.name, idx] = (c.lower, pw_approx, c.upper) + new_cons = trans_block._pwl_cons[c.name, idx] + trans_data_dict.src_component[new_cons] = c + src_data_dict.transformed_component[c] = new_cons + # deactivate original c.deactivate() def _transform_objective(self, objective, config): trans_block = self._get_transformation_block(objective.parent_block()) + trans_data_dict = trans_block.private_data() objectives = objective.values() if objective.is_indexed() else (objective,) + src_data_dict = objective.parent_block().private_data() for obj in objectives: pw_approx = self._approximate_expression( obj.expr, obj, trans_block, config, @@ -543,10 +558,14 @@ def _transform_objective(self, objective, config): # Didn't need approximated, nothing to do continue + new_obj = Objective(expr=pw_approx, sense=obj.sense) trans_block.add_component( unique_component_name(trans_block, obj.name), - Objective(expr=pw_approx, sense=obj.sense) + new_obj ) + trans_data_dict.src_component[new_obj] = obj + src_data_dict.transformed_component[obj] = new_obj + obj.deactivate() def _get_bounds_list(self, var_list, parent_component): @@ -621,3 +640,21 @@ def eval_expr(*args): v.value = val return pwl_func + + def get_src_component(self, cons): + data = cons.parent_block().private_data().src_component + if cons in data: + return data[cons] + else: + raise ValueError( + "It does not appear that '%s' is a transformed Constraint " + "created by the 'nonlinear_to_pwl' transformation." % cons.name) + + def get_transformed_component(self, cons): + data = cons.parent_block().private_data().transformed_component + if cons in data: + return data[cons] + else: + raise ValueError( + "It does not appear that '%s' is a Constraint that was " + "transformed by the 'nonlinear_to_pwl' transformation." % cons.name) From 48118832f1e286b3be40c1347807d629c340384e Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Thu, 18 Jul 2024 11:07:15 -0600 Subject: [PATCH 07/46] whoops, fixing typo from last commit --- pyomo/contrib/piecewise/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyomo/contrib/piecewise/__init__.py b/pyomo/contrib/piecewise/__init__.py index e5a0a541657..67596a709e3 100644 --- a/pyomo/contrib/piecewise/__init__.py +++ b/pyomo/contrib/piecewise/__init__.py @@ -35,6 +35,7 @@ ) from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( NonlinearToPWL, +) from pyomo.contrib.piecewise.transform.nested_inner_repn import ( NestedInnerRepresentationGDPTransformation, ) From 02cef2d00b68b07be4bfae9090b07745363a6070 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Thu, 18 Jul 2024 11:23:55 -0600 Subject: [PATCH 08/46] Putting common part of log x tests into helper function for tests --- .../piecewise/tests/test_nonlinear_to_pwl.py | 86 ++++++++++++------- 1 file changed, 55 insertions(+), 31 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 020edee3924..d0f5acc2ff1 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -37,35 +37,14 @@ def make_model(self): return m - def check_pw_linear_log_x(self, m, points): - x1 = points[0][0] - x2 = points - - def test_log_constraint_uniform_grid(self): - m = self.make_model() - + def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') - n_to_pwl.apply_to( - m, - num_points=3, - domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - ) - - # cons is transformed - self.assertFalse(m.cons.active) - pwlf = list(m.component_data_objects(PiecewiseLinearFunction, - descend_into=True)) - self.assertEqual(len(pwlf), 1) - pwlf = pwlf[0] - - points = [(1.0009,), (5.5,), (9.9991,)] + points = [(x1,), (x2,), (x3,)] self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) self.assertEqual(pwlf._points, points) self.assertEqual(len(pwlf._linear_functions), 2) - x1 = 1.0009 - x2 = 5.5 assertExpressionsStructurallyEqual( self, pwlf._linear_functions[0](m.x), @@ -73,13 +52,11 @@ def test_log_constraint_uniform_grid(self): (log(x2) - ((log(x2) - log(x1))/(x2 - x1))*x2), places=7 ) - x1 = 5.5 - x2 = 9.9991 assertExpressionsStructurallyEqual( self, pwlf._linear_functions[1](m.x), - ((log(x2) - log(x1))/(x2 - x1))*m.x + - (log(x2) - ((log(x2) - log(x1))/(x2 - x1))*x2), + ((log(x3) - log(x2))/(x3 - x2))*m.x + + (log(x3) - ((log(x3) - log(x2))/(x3 - x2))*x3), places=7 ) @@ -90,6 +67,28 @@ def test_log_constraint_uniform_grid(self): self.assertIsNone(new_cons.ub) self.assertEqual(new_cons.lb, 0.35) self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) + + def test_log_constraint_uniform_grid(self): + m = self.make_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + # cons is transformed + self.assertFalse(m.cons.active) + + pwlf = list(m.component_data_objects(PiecewiseLinearFunction, + descend_into=True)) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + points = [(1.0009,), (5.5,), (9.9991,)] + (x1, x2, x3) = 1.0009, 5.5, 9.9991 + self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) def test_log_constraint_random_grid(self): m = self.make_model() @@ -110,9 +109,34 @@ def test_log_constraint_random_grid(self): descend_into=True)) self.assertEqual(len(pwlf), 1) pwlf = pwlf[0] + + x1 = 4.370861069626263 + x2 = 7.587945476302646 + x3 = 9.556428757689245 + self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + + + def test_log_constraint_lmt_uniform_sample(self): + m = self.make_model() + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM, + ) + + # cons is transformed + self.assertFalse(m.cons.active) + + pwlf = list(m.component_data_objects(PiecewiseLinearFunction, + descend_into=True)) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + set_trace() - points = [(4.370861069626263,), (7.587945476302646,), (9.556428757689245,)] - self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) - self.assertEqual(pwlf._points, points) - self.assertEqual(len(pwlf._linear_functions), 2) + + x1 = 4.370861069626263 + x2 = 7.587945476302646 + x3 = 9.556428757689245 + self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) From a619cf68d6523366c9bfd44f9ad02a091be852f1 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Thu, 18 Jul 2024 14:05:38 -0600 Subject: [PATCH 09/46] Removing hard dependencies --- .../piecewise/tests/test_nonlinear_to_pwl.py | 47 ++++++++++--------- .../piecewise/transform/nonlinear_to_pwl.py | 28 ++++------- 2 files changed, 34 insertions(+), 41 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index d0f5acc2ff1..6b1e3f9f89f 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -116,27 +116,28 @@ def test_log_constraint_random_grid(self): self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) - def test_log_constraint_lmt_uniform_sample(self): - m = self.make_model() + # def test_log_constraint_lmt_uniform_sample(self): + # m = self.make_model() - n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') - n_to_pwl.apply_to( - m, - num_points=3, - domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM, - ) - - # cons is transformed - self.assertFalse(m.cons.active) - - pwlf = list(m.component_data_objects(PiecewiseLinearFunction, - descend_into=True)) - self.assertEqual(len(pwlf), 1) - pwlf = pwlf[0] - - set_trace() - - x1 = 4.370861069626263 - x2 = 7.587945476302646 - x3 = 9.556428757689245 - self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + # n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + # n_to_pwl.apply_to( + # m, + # num_points=3, + # domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM, + # ) + + # # cons is transformed + # self.assertFalse(m.cons.active) + + # pwlf = list(m.component_data_objects(PiecewiseLinearFunction, + # descend_into=True)) + # self.assertEqual(len(pwlf), 1) + # pwlf = pwlf[0] + + # set_trace() + + # # TODO + # x1 = 4.370861069626263 + # x2 = 7.587945476302646 + # x3 = 9.556428757689245 + # self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index d819e893d5f..408efdb4b32 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -12,10 +12,7 @@ import enum import itertools -from lineartree import LinearTreeRegressor -import lineartree import logging -import numpy as np from pyomo.environ import ( TransformationFactory, @@ -41,6 +38,8 @@ from pyomo.common.autoslots import AutoSlots from pyomo.common.collections import ComponentMap, ComponentSet from pyomo.common.config import ConfigDict, ConfigValue, PositiveInt, InEnum +from pyomo.common.dependencies import attempt_import +from pyomo.common.dependencies import numpy as np from pyomo.common.modeling import unique_component_name from pyomo.core.expr.numeric_expr import SumExpression from pyomo.core.expr import identify_variables @@ -54,13 +53,12 @@ from pyomo.network import Port from pyomo.repn.quadratic import QuadraticRepnVisitor -from sklearn.linear_model import LinearRegression -import random -from sklearn.metrics import mean_squared_error -from sklearn.model_selection import train_test_split +lineartree, lineartree_available = attempt_import('lineartree') +sklearn_lm, sklearn_available = attempt_import('sklearn.linear_model') logger = logging.getLogger(__name__) + class DomainPartitioningMethod(enum.IntEnum): RANDOM_GRID = 1 UNIFORM_GRID = 2 @@ -117,22 +115,15 @@ def get_points_lmt(points, bounds, func, seed): y_list = [] for point in points: y_list.append(func(*point)) - regr = LinearTreeRegressor( - LinearRegression(), + # ESJ: Do we really need the sklearn dependency to get LinearRegression?? + regr = lineartree.LinearTreeRegressor( + sklearn_lm.LinearRegression(), criterion='mse', max_bins=120, min_samples_leaf=4, max_depth=5, ) - - # Using train_test_split is silly. TODO: remove this and just sample my own - # extra points if I want to estimate the error. - X_train, X_test, y_train, y_test = train_test_split( - x_list, y_list, test_size=0.2, random_state=seed - ) - regr.fit(X_train, y_train) - y_pred = regr.predict(X_test) - error = mean_squared_error(y_test, y_pred) + regr.fit(x_list, y_list) leaves, splits, ths = parse_linear_tree_regressor(regr, bounds) @@ -572,6 +563,7 @@ def _get_bounds_list(self, var_list, parent_component): bounds = [] for v in var_list: if None in v.bounds: + # ESJ TODO: Con is undefined--this is a bug! raise ValueError( "Cannot automatically approximate constraints with unbounded " "variables. Var '%s' appearining in component '%s' is missing " From 5ccd890d7b449bbe79fe7bf7704a2f90357ea4c5 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 23 Jul 2024 15:53:01 -0600 Subject: [PATCH 10/46] Generalizing log tests, adding chaos test that doesn't do anything yet --- pyomo/contrib/piecewise/__init__.py | 4 +- .../piecewise/tests/test_nonlinear_to_pwl.py | 108 +++++++++++++----- .../piecewise/transform/nonlinear_to_pwl.py | 95 ++++++++------- 3 files changed, 137 insertions(+), 70 deletions(-) diff --git a/pyomo/contrib/piecewise/__init__.py b/pyomo/contrib/piecewise/__init__.py index 67596a709e3..5fc75dcd091 100644 --- a/pyomo/contrib/piecewise/__init__.py +++ b/pyomo/contrib/piecewise/__init__.py @@ -33,9 +33,7 @@ from pyomo.contrib.piecewise.transform.convex_combination import ( ConvexCombinationTransformation, ) -from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( - NonlinearToPWL, -) +from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import NonlinearToPWL from pyomo.contrib.piecewise.transform.nested_inner_repn import ( NestedInnerRepresentationGDPTransformation, ) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 6b1e3f9f89f..3a1b5270aea 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -13,22 +13,15 @@ from pyomo.contrib.piecewise import PiecewiseLinearFunction from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( NonlinearToPWL, - DomainPartitioningMethod -) -from pyomo.core.expr.compare import ( - assertExpressionsStructurallyEqual, -) -from pyomo.environ import ( - ConcreteModel, - Var, - Constraint, - TransformationFactory, - log, + DomainPartitioningMethod, ) +from pyomo.core.expr.compare import assertExpressionsStructurallyEqual +from pyomo.environ import ConcreteModel, Var, Constraint, TransformationFactory, log ## debug from pytest import set_trace + class TestNonlinearToPWL_1D(unittest.TestCase): def make_model(self): m = ConcreteModel() @@ -48,16 +41,16 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): assertExpressionsStructurallyEqual( self, pwlf._linear_functions[0](m.x), - ((log(x2) - log(x1))/(x2 - x1))*m.x + - (log(x2) - ((log(x2) - log(x1))/(x2 - x1))*x2), - places=7 + ((log(x2) - log(x1)) / (x2 - x1)) * m.x + + (log(x2) - ((log(x2) - log(x1)) / (x2 - x1)) * x2), + places=7, ) assertExpressionsStructurallyEqual( self, pwlf._linear_functions[1](m.x), - ((log(x3) - log(x2))/(x3 - x2))*m.x + - (log(x3) - ((log(x3) - log(x2))/(x3 - x2))*x3), - places=7 + ((log(x3) - log(x2)) / (x3 - x2)) * m.x + + (log(x3) - ((log(x3) - log(x2)) / (x3 - x2)) * x3), + places=7, ) self.assertEqual(len(pwlf._expressions), 1) @@ -67,7 +60,7 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): self.assertIsNone(new_cons.ub) self.assertEqual(new_cons.lb, 0.35) self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) - + def test_log_constraint_uniform_grid(self): m = self.make_model() @@ -81,18 +74,19 @@ def test_log_constraint_uniform_grid(self): # cons is transformed self.assertFalse(m.cons.active) - pwlf = list(m.component_data_objects(PiecewiseLinearFunction, - descend_into=True)) + pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) self.assertEqual(len(pwlf), 1) pwlf = pwlf[0] - + points = [(1.0009,), (5.5,), (9.9991,)] (x1, x2, x3) = 1.0009, 5.5, 9.9991 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) def test_log_constraint_random_grid(self): m = self.make_model() - + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') # [ESJ 3/30/24]: The seed is actually set in the function for getting # the points right now, so this will be deterministic. @@ -105,8 +99,9 @@ def test_log_constraint_random_grid(self): # cons is transformed self.assertFalse(m.cons.active) - pwlf = list(m.component_data_objects(PiecewiseLinearFunction, - descend_into=True)) + pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) self.assertEqual(len(pwlf), 1) pwlf = pwlf[0] @@ -114,11 +109,10 @@ def test_log_constraint_random_grid(self): x2 = 7.587945476302646 x3 = 9.556428757689245 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) - # def test_log_constraint_lmt_uniform_sample(self): # m = self.make_model() - + # n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') # n_to_pwl.apply_to( # m, @@ -141,3 +135,65 @@ def test_log_constraint_random_grid(self): # x2 = 7.587945476302646 # x3 = 9.556428757689245 # self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + + +class TestNonlinearToPWLIntegration(unittest.TestCase): + def test_Ali_example(self): + m = ConcreteModel() + m.flow_super_heated_vapor = Var() + m.flow_super_heated_vapor.fix(0.4586949988166174) + m.super_heated_vapor_temperature = Var(bounds=(31, 200), initialize=45) + m.evaporator_condensate_temperature = Var( + bounds=(29, 120.8291392028045), initialize=30 + ) + m.LMTD = Var(bounds=(0, 130.61608989795093), initialize=1) + m.evaporator_condensate_enthalpy = Var( + bounds=(-15836.847, -15510.210751855624), initialize=100 + ) + m.evaporator_condensate_vapor_enthalpy = Var( + bounds=(-13416.64, -13247.674383866839), initialize=100 + ) + m.heat_transfer_coef = Var( + bounds=(1.9936854577372858, 5.995319594088982), initialize=0.1 + ) + m.evaporator_brine_temperature = Var( + bounds=(27, 118.82913920280366), initialize=35 + ) + m.each_evaporator_area = Var() + + m.c = Constraint( + expr=m.each_evaporator_area + == ( + 1.873 + * m.flow_super_heated_vapor + * ( + m.super_heated_vapor_temperature + - m.evaporator_condensate_temperature + ) + / (100 * m.LMTD) + + m.flow_super_heated_vapor + * ( + m.evaporator_condensate_vapor_enthalpy + - m.evaporator_condensate_enthalpy + ) + / ( + m.heat_transfer_coef + * ( + m.evaporator_condensate_temperature + - m.evaporator_brine_temperature + ) + ) + ) + ) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + m.pprint() + + from pyomo.environ import SolverFactory + SolverFactory('gurobi').solve(m, tee=True) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 408efdb4b32..798eaafdddf 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -33,7 +33,7 @@ Block, ExternalFunction, SortComponents, - LogicalConstraint + LogicalConstraint, ) from pyomo.common.autoslots import AutoSlots from pyomo.common.collections import ComponentMap, ComponentSet @@ -45,10 +45,7 @@ from pyomo.core.expr import identify_variables from pyomo.core.expr import SumExpression from pyomo.core.util import target_list -from pyomo.contrib.piecewise import ( - PiecewiseLinearExpression, - PiecewiseLinearFunction -) +from pyomo.contrib.piecewise import PiecewiseLinearExpression, PiecewiseLinearFunction from pyomo.gdp import Disjunct, Disjunction from pyomo.network import Port from pyomo.repn.quadratic import QuadraticRepnVisitor @@ -65,23 +62,28 @@ class DomainPartitioningMethod(enum.IntEnum): LINEAR_MODEL_TREE_UNIFORM = 3 LINEAR_MODEL_TREE_RANDOM = 4 + # This should be safe to use many times; declare it globally _quadratic_repn_visitor = QuadraticRepnVisitor( subexpression_cache={}, var_map={}, var_order={}, sorter=None ) + class _NonlinearToPWLTransformationData(AutoSlots.Mixin): __slots__ = ('transformed_component', 'src_component') def __init__(self): self.transformed_component = ComponentMap() self.src_component = ComponentMap() + + Block.register_private_data_initializer(_NonlinearToPWLTransformationData) + def get_random_point_grid(bounds, n, func, seed=42): # Generate randomized grid of points linspaces = [] - for (lb, ub) in bounds: + for lb, ub in bounds: np.random.seed(seed) linspaces.append(np.random.uniform(lb, ub, n)) return list(itertools.product(*linspaces)) @@ -90,7 +92,7 @@ def get_random_point_grid(bounds, n, func, seed=42): def get_uniform_point_grid(bounds, n, func): # Generate non-randomized grid of points linspaces = [] - for (lb, ub) in bounds: + for lb, ub in bounds: # Issues happen when exactly using the boundary nudge = (ub - lb) * 1e-4 linspaces.append( @@ -137,6 +139,7 @@ def get_points_lmt(points, bounds, func, seed): # here? return bound_point_list + _partition_method_dispatcher = { DomainPartitioningMethod.RANDOM_GRID: get_random_point_grid, DomainPartitioningMethod.UNIFORM_GRID: get_uniform_point_grid, @@ -144,6 +147,7 @@ def get_points_lmt(points, bounds, func, seed): DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM: get_points_lmt_random_sample, } + def get_pwl_function_approximation(func, method, n, bounds): """ Get a piecewise-linear approximation of a function, given: @@ -163,8 +167,10 @@ def get_pwl_function_approximation(func, method, n, bounds): # After getting the points, construct PWLF using the # function-and-list-of-points constructor - logger.debug(f"Constructing PWLF with {len(points)} points, each of which " - f"are {dim}-dimensional") + logger.debug( + f"Constructing PWLF with {len(points)} points, each of which " + f"are {dim}-dimensional" + ) return PiecewiseLinearFunction(points=points, function=func) @@ -183,7 +189,7 @@ def generate_bound_points(leaves, bounds): for var_bound in leaf['bounds'].values(): lower_corner_list.append(var_bound[0]) upper_corner_list.append(var_bound[1]) - + # Duct tape to fix issues from unknown bugs for pt in [lower_corner_list, upper_corner_list]: for i in range(len(pt)): @@ -228,12 +234,12 @@ def parse_linear_tree_regressor(linear_tree_regressor, bounds): if left_child_node in leaves: # if left child is a leaf node node['left_leaves'].append(left_child_node) else: # traverse its left node by calling function to find all the - # leaves from its left node + # leaves from its left node node['left_leaves'] = find_leaves(splits, leaves, splits[left_child_node]) if right_child_node in leaves: # if right child is a leaf node node['right_leaves'].append(right_child_node) else: # traverse its right node by calling function to find all the - # leaves from its right node + # leaves from its right node node['right_leaves'] = find_leaves(splits, leaves, splits[right_child_node]) # For each feature in each leaf, initialize lower and upper bounds to None @@ -319,6 +325,7 @@ class NonlinearToPWL(Transformation): """ Convert nonlinear constraints and objectives to piecewise-linear approximations. """ + CONFIG = ConfigDict('contrib.piecewise.nonlinear_to_pwl') CONFIG.declare( 'targets', @@ -422,6 +429,7 @@ class NonlinearToPWL(Transformation): points in order to partition the function domain.""", ), ) + def __init__(self): super(Transformation).__init__() self._handlers = { @@ -454,7 +462,7 @@ def _apply_to(self, instance, **kwds): self._transformation_blocks.clear() self._transformation_block_set.clear() - def _apply_to_impl( self, model, **kwds): + def _apply_to_impl(self, model, **kwds): config = self.CONFIG(kwds.pop('options', {})) config.set_value(kwds) @@ -480,23 +488,19 @@ def _get_transformation_block(self, parent): if parent in self._transformation_blocks: return self._transformation_blocks[parent] - nm = unique_component_name( - parent, '_pyomo_contrib_nonlinear_to_pwl' - ) + nm = unique_component_name(parent, '_pyomo_contrib_nonlinear_to_pwl') self._transformation_blocks[parent] = transBlock = Block() parent.add_component(nm, transBlock) self._transformation_block_set.add(transBlock) transBlock._pwl_cons = Constraint(Any) return transBlock - + def _transform_block_components(self, block, config): blocks = block.values() if block.is_indexed() else (block,) for b in blocks: for obj in b.component_objects( - active=True, - descend_into=False, - sort=SortComponents.deterministic + active=True, descend_into=False, sort=SortComponents.deterministic ): if obj in self._transformation_block_set: # This is a Block we created--we know we don't need to look @@ -519,8 +523,8 @@ def _transform_constraint(self, cons, config): constraints = cons.values() if cons.is_indexed() else (cons,) for c in constraints: pw_approx = self._approximate_expression( - c.body, c, trans_block, config, - config.approximate_quadratic_constraints) + c.body, c, trans_block, config, config.approximate_quadratic_constraints + ) if pw_approx is None: # Didn't need approximated, nothing to do @@ -531,7 +535,7 @@ def _transform_constraint(self, cons, config): new_cons = trans_block._pwl_cons[c.name, idx] trans_data_dict.src_component[new_cons] = c src_data_dict.transformed_component[c] = new_cons - + # deactivate original c.deactivate() @@ -542,8 +546,12 @@ def _transform_objective(self, objective, config): src_data_dict = objective.parent_block().private_data() for obj in objectives: pw_approx = self._approximate_expression( - obj.expr, obj, trans_block, config, - config.approximate_quadratic_objectives) + obj.expr, + obj, + trans_block, + config, + config.approximate_quadratic_objectives, + ) if pw_approx is None: # Didn't need approximated, nothing to do @@ -551,12 +559,11 @@ def _transform_objective(self, objective, config): new_obj = Objective(expr=pw_approx, sense=obj.sense) trans_block.add_component( - unique_component_name(trans_block, obj.name), - new_obj + unique_component_name(trans_block, obj.name), new_obj ) trans_data_dict.src_component[new_obj] = obj src_data_dict.transformed_component[obj] = new_obj - + obj.deactivate() def _get_bounds_list(self, var_list, parent_component): @@ -567,7 +574,8 @@ def _get_bounds_list(self, var_list, parent_component): raise ValueError( "Cannot automatically approximate constraints with unbounded " "variables. Var '%s' appearining in component '%s' is missing " - "at least one bound" % (con.name, v.name)) + "at least one bound" % (con.name, v.name) + ) else: bounds.append(v.bounds) return bounds @@ -584,15 +592,17 @@ def _needs_approximating(self, expr, approximate_quadratic): return False return True - def _approximate_expression(self, obj, parent_component, trans_block, - config, approximate_quadratic): + def _approximate_expression( + self, obj, parent_component, trans_block, config, approximate_quadratic + ): if not self._needs_approximating(obj, approximate_quadratic): return - + # Additively decompose obj and work on the pieces pwl_func = 0 - for k, expr in enumerate(_additively_decompose_expr(obj) if - config.additively_decompose else (obj,)): + for k, expr in enumerate( + _additively_decompose_expr(obj) if config.additively_decompose else (obj,) + ): # First check is this is a good idea expr_vars = list(identify_variables(expr, include_fixed=False)) orig_values = ComponentMap((v, v.value) for v in expr_vars) @@ -603,7 +613,8 @@ def _approximate_expression(self, obj, parent_component, trans_block, "Not approximating expression for component '%s' as " "it exceeds the maximum dimension of %s. Try increasing " "'max_dimension' or additively separating the expression." - % (parent_component.name, config.max_dimension)) + % (parent_component.name, config.max_dimension) + ) pwl_func += expr continue elif not self._needs_approximating(expr, approximate_quadratic): @@ -616,13 +627,13 @@ def eval_expr(*args): return value(expr) pwlf = get_pwl_function_approximation( - eval_expr, config.domain_partitioning_method, + eval_expr, + config.domain_partitioning_method, config.num_points, - self._get_bounds_list(expr_vars, parent_component) + self._get_bounds_list(expr_vars, parent_component), ) name = unique_component_name( - trans_block, - parent_component.getname(fully_qualified=False) + trans_block, parent_component.getname(fully_qualified=False) ) trans_block.add_component(f"_pwle_{name}_{k}", pwlf) pwl_func += pwlf(*expr_vars) @@ -640,7 +651,8 @@ def get_src_component(self, cons): else: raise ValueError( "It does not appear that '%s' is a transformed Constraint " - "created by the 'nonlinear_to_pwl' transformation." % cons.name) + "created by the 'nonlinear_to_pwl' transformation." % cons.name + ) def get_transformed_component(self, cons): data = cons.parent_block().private_data().transformed_component @@ -649,4 +661,5 @@ def get_transformed_component(self, cons): else: raise ValueError( "It does not appear that '%s' is a Constraint that was " - "transformed by the 'nonlinear_to_pwl' transformation." % cons.name) + "transformed by the 'nonlinear_to_pwl' transformation." % cons.name + ) From 2c6b76072150f344fc27089fdfd15c367e5dba2c Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 30 Jul 2024 15:04:24 -0600 Subject: [PATCH 11/46] Adding in an API to get the transformed linear and general nonlinear constraints --- .../piecewise/tests/test_nonlinear_to_pwl.py | 6 +++ .../piecewise/transform/nonlinear_to_pwl.py | 50 ++++++++++++------- 2 files changed, 39 insertions(+), 17 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 3a1b5270aea..a06c74b2a51 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -61,6 +61,12 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): self.assertEqual(new_cons.lb, 0.35) self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) + quadratic = n_to_pwl.get_transformed_quadratic_constraints(m) + self.assertEqual(len(quadratic), 0) + nonlinear = n_to_pwl.get_transformed_nonlinear_constraints(m) + self.assertEqual(len(nonlinear), 1) + self.assertIn(m.cons, nonlinear) + def test_log_constraint_uniform_grid(self): m = self.make_model() diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 798eaafdddf..bc6ea3a1f7d 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -9,6 +9,7 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ +from collections import defaultdict import enum import itertools @@ -49,6 +50,7 @@ from pyomo.gdp import Disjunct, Disjunction from pyomo.network import Port from pyomo.repn.quadratic import QuadraticRepnVisitor +from pyomo.repn.util import ExprType lineartree, lineartree_available = attempt_import('lineartree') sklearn_lm, sklearn_available = attempt_import('sklearn.linear_model') @@ -70,11 +72,12 @@ class DomainPartitioningMethod(enum.IntEnum): class _NonlinearToPWLTransformationData(AutoSlots.Mixin): - __slots__ = ('transformed_component', 'src_component') + __slots__ = ('transformed_component', 'src_component', 'transformed_constraints') def __init__(self): self.transformed_component = ComponentMap() self.src_component = ComponentMap() + self.transformed_constraints = defaultdict(ComponentSet) Block.register_private_data_initializer(_NonlinearToPWLTransformationData) @@ -585,26 +588,33 @@ def _needs_approximating(self, expr, approximate_quadratic): if repn.nonlinear is None: if repn.quadratic is None: # Linear constraint. Always skip. - return False + return ExprType.LINEAR, False else: if not approximate_quadratic: # Didn't need approximated, nothing to do - return False - return True + return ExprType.QUADRATIC, False + return ExprType.QUADRATIC, True + return ExprType.GENERAL, True def _approximate_expression( - self, obj, parent_component, trans_block, config, approximate_quadratic + self, expr, obj, trans_block, config, approximate_quadratic ): - if not self._needs_approximating(obj, approximate_quadratic): + expr_type, needs_approximating = self._needs_approximating( + expr, + approximate_quadratic + ) + if not needs_approximating: return - # Additively decompose obj and work on the pieces + obj.model().private_data().transformed_constraints[expr_type].add(obj) + + # Additively decompose expr and work on the pieces pwl_func = 0 - for k, expr in enumerate( - _additively_decompose_expr(obj) if config.additively_decompose else (obj,) + for k, subexpr in enumerate( + _additively_decompose_expr(expr) if config.additively_decompose else (expr,) ): # First check is this is a good idea - expr_vars = list(identify_variables(expr, include_fixed=False)) + expr_vars = list(identify_variables(subexpr, include_fixed=False)) orig_values = ComponentMap((v, v.value) for v in expr_vars) dim = len(expr_vars) @@ -613,27 +623,27 @@ def _approximate_expression( "Not approximating expression for component '%s' as " "it exceeds the maximum dimension of %s. Try increasing " "'max_dimension' or additively separating the expression." - % (parent_component.name, config.max_dimension) + % (obj.name, config.max_dimension) ) - pwl_func += expr + pwl_func += subexpr continue - elif not self._needs_approximating(expr, approximate_quadratic): - pwl_func += expr + elif not self._needs_approximating(expr, approximate_quadratic)[1]: + pwl_func += subexpr continue def eval_expr(*args): for i, v in enumerate(expr_vars): v.value = args[i] - return value(expr) + return value(subexpr) pwlf = get_pwl_function_approximation( eval_expr, config.domain_partitioning_method, config.num_points, - self._get_bounds_list(expr_vars, parent_component), + self._get_bounds_list(expr_vars, obj), ) name = unique_component_name( - trans_block, parent_component.getname(fully_qualified=False) + trans_block, obj.getname(fully_qualified=False) ) trans_block.add_component(f"_pwle_{name}_{k}", pwlf) pwl_func += pwlf(*expr_vars) @@ -663,3 +673,9 @@ def get_transformed_component(self, cons): "It does not appear that '%s' is a Constraint that was " "transformed by the 'nonlinear_to_pwl' transformation." % cons.name ) + + def get_transformed_nonlinear_constraints(self, model): + return model.private_data().transformed_constraints[ExprType.GENERAL] + + def get_transformed_quadratic_constraints(self, model): + return model.private_data().transformed_constraints[ExprType.QUADRATIC] From 5b85af7000b5a12b4c6153b6e16fd32d54df84d5 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Wed, 31 Jul 2024 15:21:05 -0600 Subject: [PATCH 12/46] Adding APIs for getting transformed nonlinear and quadratic Constraints and Objectives, fixing a few bugs --- .../piecewise/transform/nonlinear_to_pwl.py | 57 ++++++++++++++----- 1 file changed, 44 insertions(+), 13 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index bc6ea3a1f7d..0adca4c8d80 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -72,12 +72,14 @@ class DomainPartitioningMethod(enum.IntEnum): class _NonlinearToPWLTransformationData(AutoSlots.Mixin): - __slots__ = ('transformed_component', 'src_component', 'transformed_constraints') + __slots__ = ('transformed_component', 'src_component', 'transformed_constraints', + 'transformed_objectives') def __init__(self): self.transformed_component = ComponentMap() self.src_component = ComponentMap() self.transformed_constraints = defaultdict(ComponentSet) + self.transformed_objectives = defaultdict(ComponentSet) Block.register_private_data_initializer(_NonlinearToPWLTransformationData) @@ -432,6 +434,10 @@ class NonlinearToPWL(Transformation): points in order to partition the function domain.""", ), ) + # TODO: Minimum dimension to additively decompose--(Only decompose if the + # dimension exceeds this.) + + # TODO: incorporate Bashar's linear tree changes. def __init__(self): super(Transformation).__init__() @@ -484,7 +490,7 @@ def _apply_to_impl(self, model, **kwds): raise ValueError( "Target '%s' is not a Block, Constraint, or Objective. It " "is of type '%s' and cannot be transformed." - % (target.name, type(t)) + % (target.name, type(target)) ) def _get_transformation_block(self, parent): @@ -525,13 +531,14 @@ def _transform_constraint(self, cons, config): src_data_dict = cons.parent_block().private_data() constraints = cons.values() if cons.is_indexed() else (cons,) for c in constraints: - pw_approx = self._approximate_expression( + pw_approx, expr_type = self._approximate_expression( c.body, c, trans_block, config, config.approximate_quadratic_constraints ) if pw_approx is None: # Didn't need approximated, nothing to do continue + c.model().private_data().transformed_constraints[expr_type].add(c) idx = len(trans_block._pwl_cons) trans_block._pwl_cons[c.name, idx] = (c.lower, pw_approx, c.upper) @@ -548,7 +555,7 @@ def _transform_objective(self, objective, config): objectives = objective.values() if objective.is_indexed() else (objective,) src_data_dict = objective.parent_block().private_data() for obj in objectives: - pw_approx = self._approximate_expression( + pw_approx, expr_type = self._approximate_expression( obj.expr, obj, trans_block, @@ -559,6 +566,7 @@ def _transform_objective(self, objective, config): if pw_approx is None: # Didn't need approximated, nothing to do continue + obj.model().private_data().transformed_objectives[expr_type].add(obj) new_obj = Objective(expr=pw_approx, sense=obj.sense) trans_block.add_component( @@ -569,15 +577,14 @@ def _transform_objective(self, objective, config): obj.deactivate() - def _get_bounds_list(self, var_list, parent_component): + def _get_bounds_list(self, var_list, obj): bounds = [] for v in var_list: if None in v.bounds: - # ESJ TODO: Con is undefined--this is a bug! raise ValueError( "Cannot automatically approximate constraints with unbounded " - "variables. Var '%s' appearining in component '%s' is missing " - "at least one bound" % (con.name, v.name) + "variables. Var '%s' appearing in component '%s' is missing " + "at least one bound" % (v.name, obj.name) ) else: bounds.append(v.bounds) @@ -604,16 +611,14 @@ def _approximate_expression( approximate_quadratic ) if not needs_approximating: - return - - obj.model().private_data().transformed_constraints[expr_type].add(obj) + return None, expr_type # Additively decompose expr and work on the pieces pwl_func = 0 for k, subexpr in enumerate( _additively_decompose_expr(expr) if config.additively_decompose else (expr,) ): - # First check is this is a good idea + # First check if this is a good idea expr_vars = list(identify_variables(subexpr, include_fixed=False)) orig_values = ComponentMap((v, v.value) for v in expr_vars) @@ -652,7 +657,7 @@ def eval_expr(*args): for v, val in orig_values.items(): v.value = val - return pwl_func + return pwl_func, expr_type def get_src_component(self, cons): data = cons.parent_block().private_data().src_component @@ -675,7 +680,33 @@ def get_transformed_component(self, cons): ) def get_transformed_nonlinear_constraints(self, model): + """ + Given a model that has been transformed with contrib.piecewise.nonlinear_to_pwl, + return the list of general (not quadratic) nonlinear Constraints that were + approximated with PiecewiseLinearFunctions + """ return model.private_data().transformed_constraints[ExprType.GENERAL] def get_transformed_quadratic_constraints(self, model): + """ + Given a model that has been transformed with contrib.piecewise.nonlinear_to_pwl, + return the list of quadratic Constraints that were approximated with + PiecewiseLinearFunctions + """ return model.private_data().transformed_constraints[ExprType.QUADRATIC] + + def get_transformed_nonlinear_objectives(self, model): + """ + Given a model that has been transformed with contrib.piecewise.nonlinear_to_pwl, + return the list of general (not quadratic) nonlinear Constraints that were + approximated with PiecewiseLinearFunctions + """ + return model.private_data().transformed_objectives[ExprType.GENERAL] + + def get_transformed_quadratic_objectives(self, model): + """ + Given a model that has been transformed with contrib.piecewise.nonlinear_to_pwl, + return the list of quadratic Constraints that were approximated with + PiecewiseLinearFunctions + """ + return model.private_data().transformed_objectives[ExprType.QUADRATIC] From 1cc71bda35d1505d16a5b7c119326ecfeb956758 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Wed, 31 Jul 2024 15:21:20 -0600 Subject: [PATCH 13/46] Adding a lot of tests, including 3D --- .../piecewise/tests/test_nonlinear_to_pwl.py | 324 +++++++++++++++--- 1 file changed, 274 insertions(+), 50 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index a06c74b2a51..ef1a5a4e096 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -16,7 +16,9 @@ DomainPartitioningMethod, ) from pyomo.core.expr.compare import assertExpressionsStructurallyEqual -from pyomo.environ import ConcreteModel, Var, Constraint, TransformationFactory, log +from pyomo.environ import ( + ConcreteModel, Var, Constraint, TransformationFactory, log, Objective +) ## debug from pytest import set_trace @@ -116,6 +118,101 @@ def test_log_constraint_random_grid(self): x3 = 9.556428757689245 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + def test_do_not_transform_quadratic_constraint(self): + m = self.make_model() + m.quad = Constraint(expr=m.x ** 2 <= 9) + m.lin = Constraint(expr=m.x >= 2) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + approximate_quadratic_constraints=False + ) + + # cons is transformed + self.assertFalse(m.cons.active) + + pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + points = [(1.0009,), (5.5,), (9.9991,)] + (x1, x2, x3) = 1.0009, 5.5, 9.9991 + self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + + # quad is not + self.assertTrue(m.quad.active) + # neither is the linear one + self.assertTrue(m.lin.active) + + def test_constraint_target(self): + m = self.make_model() + m.quad = Constraint(expr=m.x ** 2 <= 9) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + targets=[m.cons] + ) + + # cons is transformed + self.assertFalse(m.cons.active) + + pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + points = [(1.0009,), (5.5,), (9.9991,)] + (x1, x2, x3) = 1.0009, 5.5, 9.9991 + self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + + # quad is not + self.assertTrue(m.quad.active) + + def test_crazy_target_error(self): + m = self.make_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + with self.assertRaisesRegex( + ValueError, + "Target 'x' is not a Block, Constraint, or Objective. It " + "is of type '' and cannot " + "be transformed." + ): + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + targets=[m.x] + ) + + def test_cannot_approximate_constraints_with_unbounded_vars(self): + m = ConcreteModel() + m.x = Var() + m.quad = Constraint(expr=m.x ** 2 <= 9) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + with self.assertRaisesRegex( + ValueError, + "Cannot automatically approximate constraints with unbounded " + "variables. Var 'x' appearing in component 'quad' is missing " + "at least one bound" + ): + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + # def test_log_constraint_lmt_uniform_sample(self): # m = self.make_model() @@ -143,63 +240,190 @@ def test_log_constraint_random_grid(self): # self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) -class TestNonlinearToPWLIntegration(unittest.TestCase): - def test_Ali_example(self): +class TestNonlinearToPWL_2D(unittest.TestCase): + def make_paraboloid_model(self): m = ConcreteModel() - m.flow_super_heated_vapor = Var() - m.flow_super_heated_vapor.fix(0.4586949988166174) - m.super_heated_vapor_temperature = Var(bounds=(31, 200), initialize=45) - m.evaporator_condensate_temperature = Var( - bounds=(29, 120.8291392028045), initialize=30 - ) - m.LMTD = Var(bounds=(0, 130.61608989795093), initialize=1) - m.evaporator_condensate_enthalpy = Var( - bounds=(-15836.847, -15510.210751855624), initialize=100 - ) - m.evaporator_condensate_vapor_enthalpy = Var( - bounds=(-13416.64, -13247.674383866839), initialize=100 + m.x1 = Var(bounds=(0, 3)) + m.x2 = Var(bounds=(1, 7)) + m.obj = Objective(expr=m.x1 ** 2 + m.x2 ** 2) + + return m + + def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + points = [ + (x1, y1), + (x1, y2), + (x2, y1), + (x2, y2), + ] + self.assertEqual(pwlf._points, points) + self.assertEqual(pwlf._simplices, [(0, 1, 3), (0, 2, 3)]) + self.assertEqual(len(pwlf._linear_functions), 2) + + # just check that the linear functions make sense--they intersect the + # paraboloid at the vertices of the simplices. + self.assertAlmostEqual(pwlf._linear_functions[0](x1, y1), x1 **2 + y1 ** 2) + self.assertAlmostEqual(pwlf._linear_functions[0](x1, y2), x1 **2 + y2 ** 2) + self.assertAlmostEqual(pwlf._linear_functions[0](x2, y2), x2 **2 + y2 ** 2) + + self.assertAlmostEqual(pwlf._linear_functions[1](x1, y1), x1 ** 2 + y1 ** 2) + self.assertAlmostEqual(pwlf._linear_functions[1](x2, y1), x2 ** 2 + y1 ** 2) + self.assertAlmostEqual(pwlf._linear_functions[1](x2, y2), x2 ** 2 + y2 ** 2) + + self.assertEqual(len(pwlf._expressions), 1) + new_obj = n_to_pwl.get_transformed_component(m.obj) + self.assertTrue(new_obj.active) + self.assertIs(new_obj.expr, pwlf._expressions[id(new_obj.expr.expr)]) + self.assertIs(n_to_pwl.get_src_component(new_obj), m.obj) + + quadratic = n_to_pwl.get_transformed_quadratic_constraints(m) + self.assertEqual(len(quadratic), 0) + nonlinear = n_to_pwl.get_transformed_nonlinear_constraints(m) + self.assertEqual(len(nonlinear), 0) + quadratic = n_to_pwl.get_transformed_quadratic_objectives(m) + self.assertEqual(len(quadratic), 1) + self.assertIn(m.obj, quadratic) + nonlinear = n_to_pwl.get_transformed_nonlinear_objectives(m) + self.assertEqual(len(nonlinear), 0) + + def test_paraboloid_objective_uniform_grid(self): + m = self.make_paraboloid_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, num_points=2, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID ) - m.heat_transfer_coef = Var( - bounds=(1.9936854577372858, 5.995319594088982), initialize=0.1 + + # check obj is transformed + self.assertFalse(m.obj.active) + + pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) ) - m.evaporator_brine_temperature = Var( - bounds=(27, 118.82913920280366), initialize=35 + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + x1 = 0.00030000000000000003 + x2 = 2.9997 + y1 = 1.0006 + y2 = 6.9994 + + self.check_pw_linear_paraboloid(m, pwlf, x1, x2, y1, y2) + + def test_objective_target(self): + m = self.make_paraboloid_model() + + m.some_other_nonlinear_constraint = Constraint(expr=m.x1 ** 3 + m.x2 <= 6) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, num_points=2, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + targets=[m.obj] ) - m.each_evaporator_area = Var() - - m.c = Constraint( - expr=m.each_evaporator_area - == ( - 1.873 - * m.flow_super_heated_vapor - * ( - m.super_heated_vapor_temperature - - m.evaporator_condensate_temperature - ) - / (100 * m.LMTD) - + m.flow_super_heated_vapor - * ( - m.evaporator_condensate_vapor_enthalpy - - m.evaporator_condensate_enthalpy - ) - / ( - m.heat_transfer_coef - * ( - m.evaporator_condensate_temperature - - m.evaporator_brine_temperature - ) - ) - ) + + + # check obj is transformed + self.assertFalse(m.obj.active) + + pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) ) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + x1 = 0.00030000000000000003 + x2 = 2.9997 + y1 = 1.0006 + y2 = 6.9994 + + self.check_pw_linear_paraboloid(m, pwlf, x1, x2, y1, y2) + + # and check that the constraint isn't transformed + self.assertTrue(m.some_other_nonlinear_constraint.active) + + def test_do_not_transform_quadratic_objective(self): + m = self.make_paraboloid_model() n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( - m, - num_points=3, + m, num_points=2, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + approximate_quadratic_objectives=False ) + + # check obj is *not* transformed + self.assertTrue(m.obj.active) - m.pprint() - - from pyomo.environ import SolverFactory - SolverFactory('gurobi').solve(m, tee=True) + quadratic = n_to_pwl.get_transformed_quadratic_constraints(m) + self.assertEqual(len(quadratic), 0) + nonlinear = n_to_pwl.get_transformed_nonlinear_constraints(m) + self.assertEqual(len(nonlinear), 0) + quadratic = n_to_pwl.get_transformed_quadratic_objectives(m) + self.assertEqual(len(quadratic), 0) + nonlinear = n_to_pwl.get_transformed_nonlinear_objectives(m) + self.assertEqual(len(nonlinear), 0) + + +# class TestNonlinearToPWLIntegration(unittest.TestCase): +# def test_Ali_example(self): +# m = ConcreteModel() +# m.flow_super_heated_vapor = Var() +# m.flow_super_heated_vapor.fix(0.4586949988166174) +# m.super_heated_vapor_temperature = Var(bounds=(31, 200), initialize=45) +# m.evaporator_condensate_temperature = Var( +# bounds=(29, 120.8291392028045), initialize=30 +# ) +# m.LMTD = Var(bounds=(0, 130.61608989795093), initialize=1) +# m.evaporator_condensate_enthalpy = Var( +# bounds=(-15836.847, -15510.210751855624), initialize=100 +# ) +# m.evaporator_condensate_vapor_enthalpy = Var( +# bounds=(-13416.64, -13247.674383866839), initialize=100 +# ) +# m.heat_transfer_coef = Var( +# bounds=(1.9936854577372858, 5.995319594088982), initialize=0.1 +# ) +# m.evaporator_brine_temperature = Var( +# bounds=(27, 118.82913920280366), initialize=35 +# ) +# m.each_evaporator_area = Var() + +# m.c = Constraint( +# expr=m.each_evaporator_area +# == ( +# 1.873 +# * m.flow_super_heated_vapor +# * ( +# m.super_heated_vapor_temperature +# - m.evaporator_condensate_temperature +# ) +# / (100 * m.LMTD) +# + m.flow_super_heated_vapor +# * ( +# m.evaporator_condensate_vapor_enthalpy +# - m.evaporator_condensate_enthalpy +# ) +# / ( +# m.heat_transfer_coef +# * ( +# m.evaporator_condensate_temperature +# - m.evaporator_brine_temperature +# ) +# ) +# ) +# ) + +# n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') +# n_to_pwl.apply_to( +# m, +# num_points=3, +# domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, +# ) + +# m.pprint() + +# from pyomo.environ import SolverFactory +# SolverFactory('gurobi').solve(m, tee=True) From 2a6ad4603ff6e3baf611dc96ddc09f42c43f13ae Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Wed, 31 Jul 2024 15:22:15 -0600 Subject: [PATCH 14/46] Incorporating black's opinions --- .../piecewise/tests/test_nonlinear_to_pwl.py | 81 ++++++++++--------- .../piecewise/transform/nonlinear_to_pwl.py | 15 ++-- 2 files changed, 50 insertions(+), 46 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index ef1a5a4e096..c9edabd00bd 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -17,7 +17,12 @@ ) from pyomo.core.expr.compare import assertExpressionsStructurallyEqual from pyomo.environ import ( - ConcreteModel, Var, Constraint, TransformationFactory, log, Objective + ConcreteModel, + Var, + Constraint, + TransformationFactory, + log, + Objective, ) ## debug @@ -120,7 +125,7 @@ def test_log_constraint_random_grid(self): def test_do_not_transform_quadratic_constraint(self): m = self.make_model() - m.quad = Constraint(expr=m.x ** 2 <= 9) + m.quad = Constraint(expr=m.x**2 <= 9) m.lin = Constraint(expr=m.x >= 2) n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') @@ -128,7 +133,7 @@ def test_do_not_transform_quadratic_constraint(self): m, num_points=3, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - approximate_quadratic_constraints=False + approximate_quadratic_constraints=False, ) # cons is transformed @@ -151,14 +156,14 @@ def test_do_not_transform_quadratic_constraint(self): def test_constraint_target(self): m = self.make_model() - m.quad = Constraint(expr=m.x ** 2 <= 9) + m.quad = Constraint(expr=m.x**2 <= 9) n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( m, num_points=3, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - targets=[m.cons] + targets=[m.cons], ) # cons is transformed @@ -179,32 +184,32 @@ def test_constraint_target(self): def test_crazy_target_error(self): m = self.make_model() - + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') with self.assertRaisesRegex( - ValueError, - "Target 'x' is not a Block, Constraint, or Objective. It " - "is of type '' and cannot " - "be transformed." + ValueError, + "Target 'x' is not a Block, Constraint, or Objective. It " + "is of type '' and cannot " + "be transformed.", ): n_to_pwl.apply_to( m, num_points=3, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - targets=[m.x] + targets=[m.x], ) def test_cannot_approximate_constraints_with_unbounded_vars(self): m = ConcreteModel() m.x = Var() - m.quad = Constraint(expr=m.x ** 2 <= 9) + m.quad = Constraint(expr=m.x**2 <= 9) n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') with self.assertRaisesRegex( - ValueError, - "Cannot automatically approximate constraints with unbounded " - "variables. Var 'x' appearing in component 'quad' is missing " - "at least one bound" + ValueError, + "Cannot automatically approximate constraints with unbounded " + "variables. Var 'x' appearing in component 'quad' is missing " + "at least one bound", ): n_to_pwl.apply_to( m, @@ -212,7 +217,6 @@ def test_cannot_approximate_constraints_with_unbounded_vars(self): domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, ) - # def test_log_constraint_lmt_uniform_sample(self): # m = self.make_model() @@ -245,31 +249,26 @@ def make_paraboloid_model(self): m = ConcreteModel() m.x1 = Var(bounds=(0, 3)) m.x2 = Var(bounds=(1, 7)) - m.obj = Objective(expr=m.x1 ** 2 + m.x2 ** 2) + m.obj = Objective(expr=m.x1**2 + m.x2**2) return m def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') - points = [ - (x1, y1), - (x1, y2), - (x2, y1), - (x2, y2), - ] + points = [(x1, y1), (x1, y2), (x2, y1), (x2, y2)] self.assertEqual(pwlf._points, points) self.assertEqual(pwlf._simplices, [(0, 1, 3), (0, 2, 3)]) self.assertEqual(len(pwlf._linear_functions), 2) # just check that the linear functions make sense--they intersect the # paraboloid at the vertices of the simplices. - self.assertAlmostEqual(pwlf._linear_functions[0](x1, y1), x1 **2 + y1 ** 2) - self.assertAlmostEqual(pwlf._linear_functions[0](x1, y2), x1 **2 + y2 ** 2) - self.assertAlmostEqual(pwlf._linear_functions[0](x2, y2), x2 **2 + y2 ** 2) + self.assertAlmostEqual(pwlf._linear_functions[0](x1, y1), x1**2 + y1**2) + self.assertAlmostEqual(pwlf._linear_functions[0](x1, y2), x1**2 + y2**2) + self.assertAlmostEqual(pwlf._linear_functions[0](x2, y2), x2**2 + y2**2) - self.assertAlmostEqual(pwlf._linear_functions[1](x1, y1), x1 ** 2 + y1 ** 2) - self.assertAlmostEqual(pwlf._linear_functions[1](x2, y1), x2 ** 2 + y1 ** 2) - self.assertAlmostEqual(pwlf._linear_functions[1](x2, y2), x2 ** 2 + y2 ** 2) + self.assertAlmostEqual(pwlf._linear_functions[1](x1, y1), x1**2 + y1**2) + self.assertAlmostEqual(pwlf._linear_functions[1](x2, y1), x2**2 + y1**2) + self.assertAlmostEqual(pwlf._linear_functions[1](x2, y2), x2**2 + y2**2) self.assertEqual(len(pwlf._expressions), 1) new_obj = n_to_pwl.get_transformed_component(m.obj) @@ -292,8 +291,9 @@ def test_paraboloid_objective_uniform_grid(self): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( - m, num_points=2, - domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID + m, + num_points=2, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, ) # check obj is transformed @@ -315,16 +315,16 @@ def test_paraboloid_objective_uniform_grid(self): def test_objective_target(self): m = self.make_paraboloid_model() - m.some_other_nonlinear_constraint = Constraint(expr=m.x1 ** 3 + m.x2 <= 6) + m.some_other_nonlinear_constraint = Constraint(expr=m.x1**3 + m.x2 <= 6) n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( - m, num_points=2, + m, + num_points=2, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - targets=[m.obj] + targets=[m.obj], ) - # check obj is transformed self.assertFalse(m.obj.active) @@ -349,11 +349,12 @@ def test_do_not_transform_quadratic_objective(self): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( - m, num_points=2, + m, + num_points=2, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - approximate_quadratic_objectives=False + approximate_quadratic_objectives=False, ) - + # check obj is *not* transformed self.assertTrue(m.obj.active) @@ -365,7 +366,7 @@ def test_do_not_transform_quadratic_objective(self): self.assertEqual(len(quadratic), 0) nonlinear = n_to_pwl.get_transformed_nonlinear_objectives(m) self.assertEqual(len(nonlinear), 0) - + # class TestNonlinearToPWLIntegration(unittest.TestCase): # def test_Ali_example(self): diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 0adca4c8d80..fd11c67a17b 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -72,8 +72,12 @@ class DomainPartitioningMethod(enum.IntEnum): class _NonlinearToPWLTransformationData(AutoSlots.Mixin): - __slots__ = ('transformed_component', 'src_component', 'transformed_constraints', - 'transformed_objectives') + __slots__ = ( + 'transformed_component', + 'src_component', + 'transformed_constraints', + 'transformed_objectives', + ) def __init__(self): self.transformed_component = ComponentMap() @@ -607,8 +611,7 @@ def _approximate_expression( self, expr, obj, trans_block, config, approximate_quadratic ): expr_type, needs_approximating = self._needs_approximating( - expr, - approximate_quadratic + expr, approximate_quadratic ) if not needs_approximating: return None, expr_type @@ -690,7 +693,7 @@ def get_transformed_nonlinear_constraints(self, model): def get_transformed_quadratic_constraints(self, model): """ Given a model that has been transformed with contrib.piecewise.nonlinear_to_pwl, - return the list of quadratic Constraints that were approximated with + return the list of quadratic Constraints that were approximated with PiecewiseLinearFunctions """ return model.private_data().transformed_constraints[ExprType.QUADRATIC] @@ -706,7 +709,7 @@ def get_transformed_nonlinear_objectives(self, model): def get_transformed_quadratic_objectives(self, model): """ Given a model that has been transformed with contrib.piecewise.nonlinear_to_pwl, - return the list of quadratic Constraints that were approximated with + return the list of quadratic Constraints that were approximated with PiecewiseLinearFunctions """ return model.private_data().transformed_objectives[ExprType.QUADRATIC] From e3d3fd63a7b0368704fc08fe06b1d96a68982451 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Wed, 31 Jul 2024 17:51:31 -0600 Subject: [PATCH 15/46] Fixing some typos --- pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index fd11c67a17b..f88c83af17f 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -136,7 +136,8 @@ def get_points_lmt(points, bounds, func, seed): ) regr.fit(x_list, y_list) - leaves, splits, ths = parse_linear_tree_regressor(regr, bounds) + # ESJ TODO: we actually only needs leaves from here... + leaves, splits, thresholds = parse_linear_tree_regressor(regr, bounds) # This was originally part of the LMT_Model_component and used to calculate # avg_leaves for the output data. TODO: get this back @@ -419,7 +420,7 @@ class NonlinearToPWL(Transformation): It is recommended to leave this False as long as no nonlinear constraint involves more than about 5-6 variables. For constraints with higher- dimmensional nonlinear functions, additive decomposition will improve - the scalability of the approximation (since paritioning the domain is + the scalability of the approximation (since partitioning the domain is subject to the curse of dimensionality).""", ), ) @@ -433,9 +434,9 @@ class NonlinearToPWL(Transformation): Specifies the maximum dimension function the transformation should attempt to approximate. If a nonlinear function dimension exceeds 'max_dimension' the transformation will log a warning and leave the - expression as-is. For functions with dimension significantly the default - (5), it is likely that this transformation will stall triangulating the - points in order to partition the function domain.""", + expression as-is. For functions with dimension significantly above the + default (5), it is likely that this transformation will stall + triangulating the points in order to partition the function domain.""", ), ) # TODO: Minimum dimension to additively decompose--(Only decompose if the From 1fe0f000415e14bd791d7592594605a702127e70 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Fri, 2 Aug 2024 14:17:01 -0600 Subject: [PATCH 16/46] Fixing a couple bugs with additive decomposition in nonlinear-to-pwl --- .../piecewise/transform/nonlinear_to_pwl.py | 33 ++++++++++++++++--- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index f88c83af17f..4daf69e14ea 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -52,6 +52,7 @@ from pyomo.repn.quadratic import QuadraticRepnVisitor from pyomo.repn.util import ExprType + lineartree, lineartree_available = attempt_import('lineartree') sklearn_lm, sklearn_available = attempt_import('sklearn.linear_model') @@ -124,6 +125,7 @@ def get_points_lmt_uniform_sample(bounds, n, func, seed=42): def get_points_lmt(points, bounds, func, seed): x_list = np.array(points) y_list = [] + for point in points: y_list.append(func(*point)) # ESJ: Do we really need the sklearn dependency to get LinearRegression?? @@ -132,7 +134,9 @@ def get_points_lmt(points, bounds, func, seed): criterion='mse', max_bins=120, min_samples_leaf=4, - max_depth=5, + max_depth=max(4, int(np.log2(len(points) / 4))), # Want the tree to grow + # with increasing points + # but not get too large. ) regr.fit(x_list, y_list) @@ -439,6 +443,20 @@ class NonlinearToPWL(Transformation): triangulating the points in order to partition the function domain.""", ), ) + CONFIG.declare( + 'min_additive_decomposition_dimension', + ConfigValue( + default=1, + domain=PositiveInt, + description="The minimum dimension of functions that will be additively decomposed.", + doc=""" + Specifies the minimum dimension of a function that the transformation should + attempt to additively decompose. If a nonlinear function dimension exceeds + 'min_additive_decomposition_dimension' the transformation will additively decompose + If a the dimension of an expression is less than the "min_additive_decomposition_dimension" + then, it will not be additively decomposed""", + ), + ) # TODO: Minimum dimension to additively decompose--(Only decompose if the # dimension exceeds this.) @@ -634,11 +652,12 @@ def _approximate_expression( "'max_dimension' or additively separating the expression." % (obj.name, config.max_dimension) ) - pwl_func += subexpr + pwl_func = pwl_func + subexpr continue - elif not self._needs_approximating(expr, approximate_quadratic)[1]: - pwl_func += subexpr + elif not self._needs_approximating(subexpr, approximate_quadratic)[1]: + pwl_func = pwl_func + subexpr continue + # else we approximate subexpr def eval_expr(*args): for i, v in enumerate(expr_vars): @@ -655,7 +674,11 @@ def eval_expr(*args): trans_block, obj.getname(fully_qualified=False) ) trans_block.add_component(f"_pwle_{name}_{k}", pwlf) - pwl_func += pwlf(*expr_vars) + # NOTE: We are *not* using += because it will hit the NamedExpression + # implementation of iadd and dereference the ExpressionData holding + # the PiecewiseLinearExpression that we later transform my remapping + # it to a Var... + pwl_func = pwl_func + pwlf(*expr_vars) # restore var values for v, val in orig_values.items(): From 9ae91fbcc514aeeec2ffec88c545e2dbfc9384cb Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Fri, 2 Aug 2024 14:17:38 -0600 Subject: [PATCH 17/46] Adding partial test of additive decomposition --- .../piecewise/tests/test_nonlinear_to_pwl.py | 56 ++++++++++++++++++- 1 file changed, 54 insertions(+), 2 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index c9edabd00bd..5c352bb3052 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -15,7 +15,11 @@ NonlinearToPWL, DomainPartitioningMethod, ) -from pyomo.core.expr.compare import assertExpressionsStructurallyEqual +from pyomo.core.expr.compare import ( + assertExpressionsEqual, + assertExpressionsStructurallyEqual, +) +from pyomo.core.expr.numeric_expr import SumExpression from pyomo.environ import ( ConcreteModel, Var, @@ -23,6 +27,8 @@ TransformationFactory, log, Objective, + Reals, + SolverFactory, ) ## debug @@ -368,7 +374,53 @@ def test_do_not_transform_quadratic_objective(self): self.assertEqual(len(nonlinear), 0) -# class TestNonlinearToPWLIntegration(unittest.TestCase): +class TestNonlinearToPWLIntegration(unittest.TestCase): + def test_additively_decompose(self): + m = ConcreteModel() + m.x1 = Var(within=Reals, bounds=(0, 2), initialize=1.745) + m.x4 = Var(within=Reals, bounds=(0, 5), initialize=3.048) + m.x7 = Var(within=Reals, bounds=(0.9, 0.95), initialize=0.928) + m.obj = Objective(expr=-6.3 * m.x4 * m.x7 + 5.04 * m.x1) + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=4, + domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM, + additively_decompose=True, + ) + + self.assertFalse(m.obj.active) + new_obj = n_to_pwl.get_transformed_component(m.obj) + self.assertIs(n_to_pwl.get_src_component(new_obj), m.obj) + self.assertTrue(new_obj.active) + # two terms + self.assertIsInstance(new_obj.expr, SumExpression) + self.assertEqual(len(new_obj.expr.args), 2) + first = new_obj.expr.args[0] + pwlf = first.expr.pw_linear_function + all_pwlf = list( + m.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) + self.assertEqual(len(all_pwlf), 1) + # It is on the active tree. + self.assertIs(pwlf, all_pwlf[0]) + + second = new_obj.expr.args[1] + assertExpressionsEqual(self, second, 5.04 * m.x1) + + objs = n_to_pwl.get_transformed_nonlinear_objectives(m) + self.assertEqual(len(objs), 0) + objs = n_to_pwl.get_transformed_quadratic_objectives(m) + self.assertEqual(len(objs), 1) + self.assertIn(m.obj, objs) + self.assertEqual(len(n_to_pwl.get_transformed_nonlinear_constraints(m)), 0) + self.assertEqual(len(n_to_pwl.get_transformed_quadratic_constraints(m)), 0) + + TransformationFactory('contrib.piecewise.outer_repn_gdp').apply_to(m) + TransformationFactory('gdp.bigm').apply_to(m) + SolverFactory('gurobi').solve(m) + + # def test_Ali_example(self): # m = ConcreteModel() # m.flow_super_heated_vapor = Var() From 8074fe083b4325acc5f1dbd9831f288755ccccf2 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 11:53:11 -0600 Subject: [PATCH 18/46] NFC: Reformatting some docstrings --- .../piecewise/transform/nonlinear_to_pwl.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 4daf69e14ea..d1002930a26 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -413,8 +413,8 @@ class NonlinearToPWL(Transformation): ConfigValue( default=False, domain=bool, - description="Whether or not to additively decompose constraints and " - "approximate the summands separately.", + description="Whether or not to additively decompose constraint expressions " + "and approximate the summands separately.", doc=""" If False, each nonlinear constraint expression will be approximated by exactly one piecewise-linear function. If True, constraints will be @@ -444,17 +444,19 @@ class NonlinearToPWL(Transformation): ), ) CONFIG.declare( - 'min_additive_decomposition_dimension', + 'min_dimension_to_additively_decompose', ConfigValue( default=1, domain=PositiveInt, - description="The minimum dimension of functions that will be additively decomposed.", + description="The minimum dimension of functions that will be additively " + "decomposed.", doc=""" - Specifies the minimum dimension of a function that the transformation should - attempt to additively decompose. If a nonlinear function dimension exceeds - 'min_additive_decomposition_dimension' the transformation will additively decompose - If a the dimension of an expression is less than the "min_additive_decomposition_dimension" - then, it will not be additively decomposed""", + Specifies the minimum dimension of a function that the transformation + should attempt to additively decompose. If a nonlinear function dimension + exceeds 'min_dimension_to_additively_decompose' the transformation will + additively decompose. If a the dimension of an expression is less than + the 'min_dimension_to_additively_decompose' then it will not be additively + decomposed""", ), ) # TODO: Minimum dimension to additively decompose--(Only decompose if the From b78efe540752b2f15778568836f437fcc70875fa Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 13:36:40 -0600 Subject: [PATCH 19/46] Adding max depth argument to be used for the linear tree domain partitioning methods --- .../piecewise/transform/nonlinear_to_pwl.py | 81 +++++++++++-------- 1 file changed, 48 insertions(+), 33 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index d1002930a26..851c5a9e0f4 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -90,7 +90,7 @@ def __init__(self): Block.register_private_data_initializer(_NonlinearToPWLTransformationData) -def get_random_point_grid(bounds, n, func, seed=42): +def get_random_point_grid(bounds, n, func, config, seed=42): # Generate randomized grid of points linspaces = [] for lb, ub in bounds: @@ -99,7 +99,7 @@ def get_random_point_grid(bounds, n, func, seed=42): return list(itertools.product(*linspaces)) -def get_uniform_point_grid(bounds, n, func): +def get_uniform_point_grid(bounds, n, func, config): # Generate non-randomized grid of points linspaces = [] for lb, ub in bounds: @@ -112,31 +112,32 @@ def get_uniform_point_grid(bounds, n, func): return list(itertools.product(*linspaces)) -def get_points_lmt_random_sample(bounds, n, func, seed=42): +def get_points_lmt_random_sample(bounds, n, func, config, seed=42): points = get_random_point_grid(bounds, n, func, seed=seed) - return get_points_lmt(points, bounds, func, seed) + return get_points_lmt(points, bounds, func, config, seed) -def get_points_lmt_uniform_sample(bounds, n, func, seed=42): +def get_points_lmt_uniform_sample(bounds, n, func, config, seed=42): points = get_uniform_point_grid(bounds, n, func) - return get_points_lmt(points, bounds, func, seed) + return get_points_lmt(points, bounds, func, seed, config) -def get_points_lmt(points, bounds, func, seed): +def get_points_lmt(points, bounds, func, seed, config): x_list = np.array(points) y_list = [] for point in points: y_list.append(func(*point)) - # ESJ: Do we really need the sklearn dependency to get LinearRegression?? + max_depth = config.linear_tree_max_depth + if max_depth is None: + # Want the tree to grow with increasing points but not get too large. + max_depth = max(4, int(np.log2(len(points) / 4))) regr = lineartree.LinearTreeRegressor( sklearn_lm.LinearRegression(), criterion='mse', max_bins=120, min_samples_leaf=4, - max_depth=max(4, int(np.log2(len(points) / 4))), # Want the tree to grow - # with increasing points - # but not get too large. + max_depth=max_depth ) regr.fit(x_list, y_list) @@ -162,19 +163,21 @@ def get_points_lmt(points, bounds, func, seed): } -def get_pwl_function_approximation(func, method, n, bounds): +def _get_pwl_function_approximation(func, config, bounds): """ Get a piecewise-linear approximation of a function, given: func: function to approximate - method: method to use for the approximation, member of DomainPartitioningMethod - n: parameter controlling fineness of the approximation based on the specified method - bounds: list of tuples giving upper and lower bounds for each of func's arguments + config: ConfigDict for transformation, specifying domain_partitioning_method, + num_points, and max_depth (if using linear trees) + bounds: list of tuples giving upper and lower bounds for each of func's + arguments """ - points = _partition_method_dispatcher[method](bounds, n, func) + method = config.domain_partitioning_method + n = config.num_points + points = _partition_method_dispatcher[method](bounds, n, func, config) - # DUCT TAPE WARNING: work around deficiency in PiecewiseLinearFunction - # constructor. TODO + # Don't confuse PiecewiseLinearFunction constructor... dim = len(points[0]) if dim == 1: points = [pt[0] for pt in points] @@ -188,10 +191,6 @@ def get_pwl_function_approximation(func, method, n, bounds): return PiecewiseLinearFunction(points=points, function=func) -# TODO: this is still horrible. Maybe I should put these back together into -# a wrapper class again, but better this time? - - # Given a leaves dict (as generated by parse_tree) and a list of tuples # representing variable bounds, generate the set of vertices separating each # subset of the domain @@ -288,9 +287,11 @@ def parse_linear_tree_regressor(linear_tree_regressor, bounds): # This doesn't catch all additively separable expressions--we really need a # walker (as does gdp.partition_disjuncts) -def _additively_decompose_expr(input_expr): - if input_expr.__class__ is not SumExpression: - # This isn't separable, so we just have the one expression +def _additively_decompose_expr(input_expr, min_dimension): + dimension = len(list(identify_variables(input_expr))) + if input_expr.__class__ is not SumExpression or dimension < min_dimension: + # This isn't separable or we don't want to separate it, so we just have + # the one expression return [input_expr] # else, it was a SumExpression, and we will break it into the summands return list(input_expr.args) @@ -459,10 +460,22 @@ class NonlinearToPWL(Transformation): decomposed""", ), ) - # TODO: Minimum dimension to additively decompose--(Only decompose if the - # dimension exceeds this.) - - # TODO: incorporate Bashar's linear tree changes. + CONFIG.declare( + 'linear_tree_max_depth', + ConfigValue( + default=None, + domain=PositiveInt, + description="Maximum depth for linear tree training, used if using a " + "domain partitioning method based on linear model trees.", + doc=""" + Only used if 'domain_partitioning_method' is LINEAR_MODEL_TREE_UNIFORM or + LINEAR_MODEL_TREE_RANDOM: Specifies the maximum depth of the linear model + trees trained to determine the points to be triangulated to form the + domain of the piecewise-linear approximations. If None (the default), + the max depth will be given as max(4, ln(num_points / 4)). + """, + ), + ) def __init__(self): super(Transformation).__init__() @@ -640,7 +653,10 @@ def _approximate_expression( # Additively decompose expr and work on the pieces pwl_func = 0 for k, subexpr in enumerate( - _additively_decompose_expr(expr) if config.additively_decompose else (expr,) + _additively_decompose_expr( + expr, + config.min_dimension_to_additively_decompose) + if config.additively_decompose else (expr,) ): # First check if this is a good idea expr_vars = list(identify_variables(subexpr, include_fixed=False)) @@ -666,10 +682,9 @@ def eval_expr(*args): v.value = args[i] return value(subexpr) - pwlf = get_pwl_function_approximation( + pwlf = _get_pwl_function_approximation( eval_expr, - config.domain_partitioning_method, - config.num_points, + config, self._get_bounds_list(expr_vars, obj), ) name = unique_component_name( From 31a95bab3b3894bdcaa27c4a0191a1e6fe77b117 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 14:38:31 -0600 Subject: [PATCH 20/46] Adding tests with absolute value for linear model tree partitioning --- .../piecewise/tests/test_nonlinear_to_pwl.py | 99 ++++++++++++++++++- .../piecewise/transform/nonlinear_to_pwl.py | 8 +- 2 files changed, 99 insertions(+), 8 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 5c352bb3052..b779543cc9b 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -9,6 +9,7 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ +from pyomo.common.dependencies import attempt_import, scipy_available import pyomo.common.unittest as unittest from pyomo.contrib.piecewise import PiecewiseLinearFunction from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( @@ -31,9 +32,15 @@ SolverFactory, ) -## debug -from pytest import set_trace +gurobi_available = ( + SolverFactory('gurobi').available(exception_flag=False) + and SolverFactory('gurobi').license_is_valid() +) +lineartree_available = attempt_import('lineartree')[1] +sklearn_available = attempt_import('sklearn.linear_model')[1] +## DEBUG +from pytest import set_trace class TestNonlinearToPWL_1D(unittest.TestCase): def make_model(self): @@ -374,8 +381,90 @@ def test_do_not_transform_quadratic_objective(self): self.assertEqual(len(nonlinear), 0) +@unittest.skipUnless(lineartree_available, "lineartree not available") +@unittest.skipUnless(sklearn_available, "sklearn not available") +class TestLinearTreeDomainPartitioning(unittest.TestCase): + def make_absolute_value_model(self): + m = ConcreteModel() + m.x = Var(bounds=(-10, 10)) + m.obj = Objective(expr=abs(m.x)) + + return m + + def test_linear_model_tree_uniform(self): + m = self.make_absolute_value_model() + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=301, # sample a lot so we train a good tree + domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM, + linear_tree_max_depth=1, # force parsimony + ) + + transformed_obj = n_to_pwl.get_transformed_component(m.obj) + pwlf = transformed_obj.expr.expr.pw_linear_function + + self.assertEqual(len(pwlf._simplices), 2) + self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) + self.assertEqual(pwlf._points, [(-10,), (-0.08402,), (10,)]) + self.assertEqual(len(pwlf._linear_functions), 2) + assertExpressionsEqual( + self, + pwlf._linear_functions[0](m.x), + - 1.0 * m.x + ) + assertExpressionsStructurallyEqual( + self, + pwlf._linear_functions[1](m.x), + # pretty close to m.x, but we're a bit off because we don't have 0 + # as a breakpoint. + 0.9833360108369479*m.x + 0.16663989163052034, + places=7 + ) + + def test_linear_model_tree_random(self): + m = self.make_absolute_value_model() + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=300, # sample a lot so we train a good tree + domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM, + linear_tree_max_depth=1, # force parsimony + ) + + transformed_obj = n_to_pwl.get_transformed_component(m.obj) + pwlf = transformed_obj.expr.expr.pw_linear_function + + self.assertEqual(len(pwlf._simplices), 2) + self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) + self.assertEqual(pwlf._points, [(-10,), (-0.03638,), (10,)]) + self.assertEqual(len(pwlf._linear_functions), 2) + assertExpressionsEqual( + self, + pwlf._linear_functions[0](m.x), + - 1.0 * m.x + ) + assertExpressionsStructurallyEqual( + self, + pwlf._linear_functions[1](m.x), + # pretty close to m.x, but we're a bit off because we don't have 0 + # as a breakpoint. + 0.9927503741388829*m.x + 0.07249625861117256, + places=7 + ) + + class TestNonlinearToPWLIntegration(unittest.TestCase): - def test_additively_decompose(self): + @unittest.skipUnless(gurobi_available, "Gurobi is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") + def test_transform_and_solve_additively_decomposes_model(self): + # A bit of an integration test to make sure that we build additively + # decomposed pw-linear approximations in such a way that they are + # transformed to MILP and solved correctly. (Largely because we have to + # be careful to make sure that we don't ever directly insert + # PiecewiseLinearExpression objects into expressions and are instead + # using the ExpressionData that points to them (and will eventually be + # replaced in transformation)) m = ConcreteModel() m.x1 = Var(within=Reals, bounds=(0, 2), initialize=1.745) m.x4 = Var(within=Reals, bounds=(0, 5), initialize=3.048) @@ -385,7 +474,7 @@ def test_additively_decompose(self): n_to_pwl.apply_to( m, num_points=4, - domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, additively_decompose=True, ) @@ -420,6 +509,8 @@ def test_additively_decompose(self): TransformationFactory('gdp.bigm').apply_to(m) SolverFactory('gurobi').solve(m) + # actually test the answer or something + self.assertTrue(False) # def test_Ali_example(self): # m = ConcreteModel() diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 851c5a9e0f4..59330a373eb 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -113,16 +113,16 @@ def get_uniform_point_grid(bounds, n, func, config): def get_points_lmt_random_sample(bounds, n, func, config, seed=42): - points = get_random_point_grid(bounds, n, func, seed=seed) + points = get_random_point_grid(bounds, n, func, config, seed=seed) return get_points_lmt(points, bounds, func, config, seed) def get_points_lmt_uniform_sample(bounds, n, func, config, seed=42): - points = get_uniform_point_grid(bounds, n, func) - return get_points_lmt(points, bounds, func, seed, config) + points = get_uniform_point_grid(bounds, n, func, config) + return get_points_lmt(points, bounds, func, config, seed) -def get_points_lmt(points, bounds, func, seed, config): +def get_points_lmt(points, bounds, func, config, seed): x_list = np.array(points) y_list = [] From c89ddbf454819acf4fef59d37696b858f09bf16c Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 14:39:38 -0600 Subject: [PATCH 21/46] NFC: black --- .../piecewise/tests/test_nonlinear_to_pwl.py | 30 ++++++++----------- .../piecewise/transform/nonlinear_to_pwl.py | 13 ++++---- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index b779543cc9b..7e8669e209a 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -42,6 +42,7 @@ ## DEBUG from pytest import set_trace + class TestNonlinearToPWL_1D(unittest.TestCase): def make_model(self): m = ConcreteModel() @@ -396,9 +397,9 @@ def test_linear_model_tree_uniform(self): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( m, - num_points=301, # sample a lot so we train a good tree + num_points=301, # sample a lot so we train a good tree domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM, - linear_tree_max_depth=1, # force parsimony + linear_tree_max_depth=1, # force parsimony ) transformed_obj = n_to_pwl.get_transformed_component(m.obj) @@ -408,18 +409,14 @@ def test_linear_model_tree_uniform(self): self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) self.assertEqual(pwlf._points, [(-10,), (-0.08402,), (10,)]) self.assertEqual(len(pwlf._linear_functions), 2) - assertExpressionsEqual( - self, - pwlf._linear_functions[0](m.x), - - 1.0 * m.x - ) + assertExpressionsEqual(self, pwlf._linear_functions[0](m.x), -1.0 * m.x) assertExpressionsStructurallyEqual( self, pwlf._linear_functions[1](m.x), # pretty close to m.x, but we're a bit off because we don't have 0 # as a breakpoint. - 0.9833360108369479*m.x + 0.16663989163052034, - places=7 + 0.9833360108369479 * m.x + 0.16663989163052034, + places=7, ) def test_linear_model_tree_random(self): @@ -427,9 +424,9 @@ def test_linear_model_tree_random(self): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( m, - num_points=300, # sample a lot so we train a good tree + num_points=300, # sample a lot so we train a good tree domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM, - linear_tree_max_depth=1, # force parsimony + linear_tree_max_depth=1, # force parsimony ) transformed_obj = n_to_pwl.get_transformed_component(m.obj) @@ -439,18 +436,14 @@ def test_linear_model_tree_random(self): self.assertEqual(pwlf._simplices, [(0, 1), (1, 2)]) self.assertEqual(pwlf._points, [(-10,), (-0.03638,), (10,)]) self.assertEqual(len(pwlf._linear_functions), 2) - assertExpressionsEqual( - self, - pwlf._linear_functions[0](m.x), - - 1.0 * m.x - ) + assertExpressionsEqual(self, pwlf._linear_functions[0](m.x), -1.0 * m.x) assertExpressionsStructurallyEqual( self, pwlf._linear_functions[1](m.x), # pretty close to m.x, but we're a bit off because we don't have 0 # as a breakpoint. - 0.9927503741388829*m.x + 0.07249625861117256, - places=7 + 0.9927503741388829 * m.x + 0.07249625861117256, + places=7, ) @@ -512,6 +505,7 @@ def test_transform_and_solve_additively_decomposes_model(self): # actually test the answer or something self.assertTrue(False) + # def test_Ali_example(self): # m = ConcreteModel() # m.flow_super_heated_vapor = Var() diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 59330a373eb..4b549c0be8d 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -137,7 +137,7 @@ def get_points_lmt(points, bounds, func, config, seed): criterion='mse', max_bins=120, min_samples_leaf=4, - max_depth=max_depth + max_depth=max_depth, ) regr.fit(x_list, y_list) @@ -654,9 +654,10 @@ def _approximate_expression( pwl_func = 0 for k, subexpr in enumerate( _additively_decompose_expr( - expr, - config.min_dimension_to_additively_decompose) - if config.additively_decompose else (expr,) + expr, config.min_dimension_to_additively_decompose + ) + if config.additively_decompose + else (expr,) ): # First check if this is a good idea expr_vars = list(identify_variables(subexpr, include_fixed=False)) @@ -683,9 +684,7 @@ def eval_expr(*args): return value(subexpr) pwlf = _get_pwl_function_approximation( - eval_expr, - config, - self._get_bounds_list(expr_vars, obj), + eval_expr, config, self._get_bounds_list(expr_vars, obj) ) name = unique_component_name( trans_block, obj.getname(fully_qualified=False) From e4bfca0af063f78e9aa50e0c3f758200b98fddbd Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:16:20 -0600 Subject: [PATCH 22/46] Finishing integration test, adding bigger linear model tree test --- .../piecewise/tests/test_nonlinear_to_pwl.py | 118 +++++++++++------- 1 file changed, 73 insertions(+), 45 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 7e8669e209a..074ef64b12d 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -30,6 +30,8 @@ Objective, Reals, SolverFactory, + TerminationCondition, + value ) gurobi_available = ( @@ -231,32 +233,6 @@ def test_cannot_approximate_constraints_with_unbounded_vars(self): domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, ) - # def test_log_constraint_lmt_uniform_sample(self): - # m = self.make_model() - - # n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') - # n_to_pwl.apply_to( - # m, - # num_points=3, - # domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM, - # ) - - # # cons is transformed - # self.assertFalse(m.cons.active) - - # pwlf = list(m.component_data_objects(PiecewiseLinearFunction, - # descend_into=True)) - # self.assertEqual(len(pwlf), 1) - # pwlf = pwlf[0] - - # set_trace() - - # # TODO - # x1 = 4.370861069626263 - # x2 = 7.587945476302646 - # x3 = 9.556428757689245 - # self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) - class TestNonlinearToPWL_2D(unittest.TestCase): def make_paraboloid_model(self): @@ -446,6 +422,46 @@ def test_linear_model_tree_random(self): places=7, ) + def test_linear_model_tree_random_auto_depth_tree(self): + m = self.make_absolute_value_model() + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=100, # sample a lot but not too many because this one is + # more prone to overfitting + domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM, + ) + + transformed_obj = n_to_pwl.get_transformed_component(m.obj) + pwlf = transformed_obj.expr.expr.pw_linear_function + + print(pwlf._simplices) + print(pwlf._points) + for f in pwlf._linear_functions: + print(f(m.x)) + + # We end up with 8, which is just what happens, but it's not a terrible + # approximation + self.assertEqual(len(pwlf._simplices), 8) + self.assertEqual(pwlf._simplices, [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), + (5, 6), (6, 7), (7, 8)]) + self.assertEqual(pwlf._points, [(-10,), (-9.24119,), (-8.71428,), (-8.11135,), + (0.06048,), (0.70015,), (1.9285,), (2.15597,), + (10,)]) + self.assertEqual(len(pwlf._linear_functions), 8) + for i in range(3): + assertExpressionsEqual(self, pwlf._linear_functions[i](m.x), -1.0 * m.x) + assertExpressionsStructurallyEqual( + self, + pwlf._linear_functions[3](m.x), + # pretty close to - m.x, but we're a bit off because we don't have 0 + # as a breakpoint. + -0.9851979299618323*m.x + 0.12006477080409184, + places=7, + ) + for i in range(4, 8): + assertExpressionsEqual(self, pwlf._linear_functions[i](m.x), m.x) + class TestNonlinearToPWLIntegration(unittest.TestCase): @unittest.skipUnless(gurobi_available, "Gurobi is not available") @@ -464,16 +480,16 @@ def test_transform_and_solve_additively_decomposes_model(self): m.x7 = Var(within=Reals, bounds=(0.9, 0.95), initialize=0.928) m.obj = Objective(expr=-6.3 * m.x4 * m.x7 + 5.04 * m.x1) n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') - n_to_pwl.apply_to( + xm = n_to_pwl.create_using( m, num_points=4, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, additively_decompose=True, ) - self.assertFalse(m.obj.active) - new_obj = n_to_pwl.get_transformed_component(m.obj) - self.assertIs(n_to_pwl.get_src_component(new_obj), m.obj) + self.assertFalse(xm.obj.active) + new_obj = n_to_pwl.get_transformed_component(xm.obj) + self.assertIs(n_to_pwl.get_src_component(new_obj), xm.obj) self.assertTrue(new_obj.active) # two terms self.assertIsInstance(new_obj.expr, SumExpression) @@ -481,30 +497,42 @@ def test_transform_and_solve_additively_decomposes_model(self): first = new_obj.expr.args[0] pwlf = first.expr.pw_linear_function all_pwlf = list( - m.component_data_objects(PiecewiseLinearFunction, descend_into=True) + xm.component_data_objects(PiecewiseLinearFunction, descend_into=True) ) self.assertEqual(len(all_pwlf), 1) # It is on the active tree. self.assertIs(pwlf, all_pwlf[0]) second = new_obj.expr.args[1] - assertExpressionsEqual(self, second, 5.04 * m.x1) + assertExpressionsEqual(self, second, 5.04 * xm.x1) - objs = n_to_pwl.get_transformed_nonlinear_objectives(m) + objs = n_to_pwl.get_transformed_nonlinear_objectives(xm) self.assertEqual(len(objs), 0) - objs = n_to_pwl.get_transformed_quadratic_objectives(m) + objs = n_to_pwl.get_transformed_quadratic_objectives(xm) self.assertEqual(len(objs), 1) - self.assertIn(m.obj, objs) - self.assertEqual(len(n_to_pwl.get_transformed_nonlinear_constraints(m)), 0) - self.assertEqual(len(n_to_pwl.get_transformed_quadratic_constraints(m)), 0) - - TransformationFactory('contrib.piecewise.outer_repn_gdp').apply_to(m) - TransformationFactory('gdp.bigm').apply_to(m) - SolverFactory('gurobi').solve(m) - - # actually test the answer or something - self.assertTrue(False) - + self.assertIn(xm.obj, objs) + self.assertEqual(len(n_to_pwl.get_transformed_nonlinear_constraints(xm)), 0) + self.assertEqual(len(n_to_pwl.get_transformed_quadratic_constraints(xm)), 0) + + TransformationFactory('contrib.piecewise.outer_repn_gdp').apply_to(xm) + TransformationFactory('gdp.bigm').apply_to(xm) + opt = SolverFactory('gurobi') + results = opt.solve(xm) + self.assertEqual(results.solver.termination_condition, + TerminationCondition.optimal) + + # solve the original + opt.options['NonConvex'] = 2 + results = opt.solve(m) + self.assertEqual(results.solver.termination_condition, + TerminationCondition.optimal) + + # Not a bad approximation: + self.assertAlmostEqual(value(xm.obj), value(m.obj), places=2) + + self.assertAlmostEqual(value(xm.x4), value(m.x4), places=3) + self.assertAlmostEqual(value(xm.x7), value(m.x7), places=4) + self.assertAlmostEqual(value(xm.x1), value(m.x1), places=7) # def test_Ali_example(self): # m = ConcreteModel() From 96b9aeecf37a9d3f954dc2c08b2f37b61ca84fa4 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:16:59 -0600 Subject: [PATCH 23/46] NFC: black --- .../piecewise/tests/test_nonlinear_to_pwl.py | 40 +++++++++++++------ 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 074ef64b12d..2a3a9821173 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -31,7 +31,7 @@ Reals, SolverFactory, TerminationCondition, - value + value, ) gurobi_available = ( @@ -428,7 +428,7 @@ def test_linear_model_tree_random_auto_depth_tree(self): n_to_pwl.apply_to( m, num_points=100, # sample a lot but not too many because this one is - # more prone to overfitting + # more prone to overfitting domain_partitioning_method=DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM, ) @@ -443,11 +443,24 @@ def test_linear_model_tree_random_auto_depth_tree(self): # We end up with 8, which is just what happens, but it's not a terrible # approximation self.assertEqual(len(pwlf._simplices), 8) - self.assertEqual(pwlf._simplices, [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), - (5, 6), (6, 7), (7, 8)]) - self.assertEqual(pwlf._points, [(-10,), (-9.24119,), (-8.71428,), (-8.11135,), - (0.06048,), (0.70015,), (1.9285,), (2.15597,), - (10,)]) + self.assertEqual( + pwlf._simplices, + [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8)], + ) + self.assertEqual( + pwlf._points, + [ + (-10,), + (-9.24119,), + (-8.71428,), + (-8.11135,), + (0.06048,), + (0.70015,), + (1.9285,), + (2.15597,), + (10,), + ], + ) self.assertEqual(len(pwlf._linear_functions), 8) for i in range(3): assertExpressionsEqual(self, pwlf._linear_functions[i](m.x), -1.0 * m.x) @@ -456,7 +469,7 @@ def test_linear_model_tree_random_auto_depth_tree(self): pwlf._linear_functions[3](m.x), # pretty close to - m.x, but we're a bit off because we don't have 0 # as a breakpoint. - -0.9851979299618323*m.x + 0.12006477080409184, + -0.9851979299618323 * m.x + 0.12006477080409184, places=7, ) for i in range(4, 8): @@ -518,14 +531,16 @@ def test_transform_and_solve_additively_decomposes_model(self): TransformationFactory('gdp.bigm').apply_to(xm) opt = SolverFactory('gurobi') results = opt.solve(xm) - self.assertEqual(results.solver.termination_condition, - TerminationCondition.optimal) + self.assertEqual( + results.solver.termination_condition, TerminationCondition.optimal + ) # solve the original opt.options['NonConvex'] = 2 results = opt.solve(m) - self.assertEqual(results.solver.termination_condition, - TerminationCondition.optimal) + self.assertEqual( + results.solver.termination_condition, TerminationCondition.optimal + ) # Not a bad approximation: self.assertAlmostEqual(value(xm.obj), value(m.obj), places=2) @@ -534,6 +549,7 @@ def test_transform_and_solve_additively_decomposes_model(self): self.assertAlmostEqual(value(xm.x7), value(m.x7), places=4) self.assertAlmostEqual(value(xm.x1), value(m.x1), places=7) + # def test_Ali_example(self): # m = ConcreteModel() # m.flow_super_heated_vapor = Var() From 76459a2ed5923a6ad83d48b6bbeeb0b1f82a0fc0 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:34:46 -0600 Subject: [PATCH 24/46] nonlinear_to_pwl.py --- .../piecewise/tests/test_nonlinear_to_pwl.py | 102 +++++++----------- .../piecewise/transform/nonlinear_to_pwl.py | 2 +- 2 files changed, 39 insertions(+), 65 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 2a3a9821173..36b646c6420 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -16,6 +16,7 @@ NonlinearToPWL, DomainPartitioningMethod, ) +from pyomo.core.base.expression import _ExpressionData from pyomo.core.expr.compare import ( assertExpressionsEqual, assertExpressionsStructurallyEqual, @@ -41,9 +42,6 @@ lineartree_available = attempt_import('lineartree')[1] sklearn_available = attempt_import('sklearn.linear_model')[1] -## DEBUG -from pytest import set_trace - class TestNonlinearToPWL_1D(unittest.TestCase): def make_model(self): @@ -233,6 +231,43 @@ def test_cannot_approximate_constraints_with_unbounded_vars(self): domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, ) + def test_error_for_non_separable_exceeding_max_dimension(self): + m = ConcreteModel() + m.x = Var([0, 1, 2, 3, 4], bounds=(-4, 5)) + m.ick = Constraint(expr=m.x[0] ** (m.x[1] * m.x[2] * m.x[3] * m.x[4]) <= 8) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + with self.assertRaisesRegex( + ValueError, + "Not approximating expression for component 'ick' as " + "it exceeds the maximum dimension of 4. Try increasing " + "'max_dimension' or additively separating the expression." + ): + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + max_dimension=4 + ) + + def test_do_not_additively_decompose_below_min_dimension(self): + m = ConcreteModel() + m.x = Var([0, 1, 2, 3, 4], bounds=(-4, 5)) + m.c = Constraint(expr=m.x[0] * m.x[1] + m.x[3] <= 4) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + additively_decompose=True, + min_dimension_to_additively_decompose=4, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + transformed_c = n_to_pwl.get_transformed_component(m.c) + # This is only approximated by one pwlf: + self.assertIsInstance(transformed_c.body, _ExpressionData) + class TestNonlinearToPWL_2D(unittest.TestCase): def make_paraboloid_model(self): @@ -548,64 +583,3 @@ def test_transform_and_solve_additively_decomposes_model(self): self.assertAlmostEqual(value(xm.x4), value(m.x4), places=3) self.assertAlmostEqual(value(xm.x7), value(m.x7), places=4) self.assertAlmostEqual(value(xm.x1), value(m.x1), places=7) - - -# def test_Ali_example(self): -# m = ConcreteModel() -# m.flow_super_heated_vapor = Var() -# m.flow_super_heated_vapor.fix(0.4586949988166174) -# m.super_heated_vapor_temperature = Var(bounds=(31, 200), initialize=45) -# m.evaporator_condensate_temperature = Var( -# bounds=(29, 120.8291392028045), initialize=30 -# ) -# m.LMTD = Var(bounds=(0, 130.61608989795093), initialize=1) -# m.evaporator_condensate_enthalpy = Var( -# bounds=(-15836.847, -15510.210751855624), initialize=100 -# ) -# m.evaporator_condensate_vapor_enthalpy = Var( -# bounds=(-13416.64, -13247.674383866839), initialize=100 -# ) -# m.heat_transfer_coef = Var( -# bounds=(1.9936854577372858, 5.995319594088982), initialize=0.1 -# ) -# m.evaporator_brine_temperature = Var( -# bounds=(27, 118.82913920280366), initialize=35 -# ) -# m.each_evaporator_area = Var() - -# m.c = Constraint( -# expr=m.each_evaporator_area -# == ( -# 1.873 -# * m.flow_super_heated_vapor -# * ( -# m.super_heated_vapor_temperature -# - m.evaporator_condensate_temperature -# ) -# / (100 * m.LMTD) -# + m.flow_super_heated_vapor -# * ( -# m.evaporator_condensate_vapor_enthalpy -# - m.evaporator_condensate_enthalpy -# ) -# / ( -# m.heat_transfer_coef -# * ( -# m.evaporator_condensate_temperature -# - m.evaporator_brine_temperature -# ) -# ) -# ) -# ) - -# n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') -# n_to_pwl.apply_to( -# m, -# num_points=3, -# domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, -# ) - -# m.pprint() - -# from pyomo.environ import SolverFactory -# SolverFactory('gurobi').solve(m, tee=True) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 4b549c0be8d..30797b5318e 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -665,7 +665,7 @@ def _approximate_expression( dim = len(expr_vars) if dim > config.max_dimension: - logger.warning( + raise ValueError( "Not approximating expression for component '%s' as " "it exceeds the maximum dimension of %s. Try increasing " "'max_dimension' or additively separating the expression." From c61a7052d680a57d5e67e24500359fdcf9ce2163 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:36:06 -0600 Subject: [PATCH 25/46] Black --- .../contrib/piecewise/tests/test_nonlinear_to_pwl.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 36b646c6420..f5f4ebc8009 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -238,23 +238,23 @@ def test_error_for_non_separable_exceeding_max_dimension(self): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') with self.assertRaisesRegex( - ValueError, - "Not approximating expression for component 'ick' as " - "it exceeds the maximum dimension of 4. Try increasing " - "'max_dimension' or additively separating the expression." + ValueError, + "Not approximating expression for component 'ick' as " + "it exceeds the maximum dimension of 4. Try increasing " + "'max_dimension' or additively separating the expression.", ): n_to_pwl.apply_to( m, num_points=3, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, - max_dimension=4 + max_dimension=4, ) def test_do_not_additively_decompose_below_min_dimension(self): m = ConcreteModel() m.x = Var([0, 1, 2, 3, 4], bounds=(-4, 5)) m.c = Constraint(expr=m.x[0] * m.x[1] + m.x[3] <= 4) - + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') n_to_pwl.apply_to( m, From 4145590e1b80aeed282e8ab757b348bdb416a6f8 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:56:21 -0600 Subject: [PATCH 26/46] Making a lot of helper functions private --- .../piecewise/transform/nonlinear_to_pwl.py | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 30797b5318e..8b73ec00678 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -90,7 +90,7 @@ def __init__(self): Block.register_private_data_initializer(_NonlinearToPWLTransformationData) -def get_random_point_grid(bounds, n, func, config, seed=42): +def _get_random_point_grid(bounds, n, func, config, seed=42): # Generate randomized grid of points linspaces = [] for lb, ub in bounds: @@ -99,7 +99,7 @@ def get_random_point_grid(bounds, n, func, config, seed=42): return list(itertools.product(*linspaces)) -def get_uniform_point_grid(bounds, n, func, config): +def _get_uniform_point_grid(bounds, n, func, config): # Generate non-randomized grid of points linspaces = [] for lb, ub in bounds: @@ -112,17 +112,17 @@ def get_uniform_point_grid(bounds, n, func, config): return list(itertools.product(*linspaces)) -def get_points_lmt_random_sample(bounds, n, func, config, seed=42): - points = get_random_point_grid(bounds, n, func, config, seed=seed) - return get_points_lmt(points, bounds, func, config, seed) +def _get_points_lmt_random_sample(bounds, n, func, config, seed=42): + points = _get_random_point_grid(bounds, n, func, config, seed=seed) + return _get_points_lmt(points, bounds, func, config, seed) -def get_points_lmt_uniform_sample(bounds, n, func, config, seed=42): - points = get_uniform_point_grid(bounds, n, func, config) - return get_points_lmt(points, bounds, func, config, seed) +def _get_points_lmt_uniform_sample(bounds, n, func, config, seed=42): + points = _get_uniform_point_grid(bounds, n, func, config) + return _get_points_lmt(points, bounds, func, config, seed) -def get_points_lmt(points, bounds, func, config, seed): +def _get_points_lmt(points, bounds, func, config, seed): x_list = np.array(points) y_list = [] @@ -142,24 +142,24 @@ def get_points_lmt(points, bounds, func, config, seed): regr.fit(x_list, y_list) # ESJ TODO: we actually only needs leaves from here... - leaves, splits, thresholds = parse_linear_tree_regressor(regr, bounds) + leaves, splits, thresholds = _parse_linear_tree_regressor(regr, bounds) # This was originally part of the LMT_Model_component and used to calculate # avg_leaves for the output data. TODO: get this back # self.total_leaves += len(leaves) # bound_point_list = lmt.generate_bound(leaves) - bound_point_list = generate_bound_points(leaves, bounds) + bound_point_list = _generate_bound_points(leaves, bounds) # duct tape to fix possible issues from unknown bugs. TODO should this go # here? return bound_point_list _partition_method_dispatcher = { - DomainPartitioningMethod.RANDOM_GRID: get_random_point_grid, - DomainPartitioningMethod.UNIFORM_GRID: get_uniform_point_grid, - DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM: get_points_lmt_uniform_sample, - DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM: get_points_lmt_random_sample, + DomainPartitioningMethod.RANDOM_GRID: _get_random_point_grid, + DomainPartitioningMethod.UNIFORM_GRID: _get_uniform_point_grid, + DomainPartitioningMethod.LINEAR_MODEL_TREE_UNIFORM: _get_points_lmt_uniform_sample, + DomainPartitioningMethod.LINEAR_MODEL_TREE_RANDOM: _get_points_lmt_random_sample, } @@ -194,7 +194,7 @@ def _get_pwl_function_approximation(func, config, bounds): # Given a leaves dict (as generated by parse_tree) and a list of tuples # representing variable bounds, generate the set of vertices separating each # subset of the domain -def generate_bound_points(leaves, bounds): +def _generate_bound_points(leaves, bounds): bound_points = [] for leaf in leaves.values(): lower_corner_list = [] @@ -226,7 +226,7 @@ def generate_bound_points(leaves, bounds): # Parse a LinearTreeRegressor and identify features such as bounds, slope, and # intercept for leaves. Return some dicts. -def parse_linear_tree_regressor(linear_tree_regressor, bounds): +def _parse_linear_tree_regressor(linear_tree_regressor, bounds): leaves = linear_tree_regressor.summary(only_leaves=True) splits = linear_tree_regressor.summary() @@ -248,12 +248,14 @@ def parse_linear_tree_regressor(linear_tree_regressor, bounds): node['left_leaves'].append(left_child_node) else: # traverse its left node by calling function to find all the # leaves from its left node - node['left_leaves'] = find_leaves(splits, leaves, splits[left_child_node]) + node['left_leaves'] = _find_leaves(splits, leaves, splits[left_child_node]) if right_child_node in leaves: # if right child is a leaf node node['right_leaves'].append(right_child_node) else: # traverse its right node by calling function to find all the # leaves from its right node - node['right_leaves'] = find_leaves(splits, leaves, splits[right_child_node]) + node['right_leaves'] = _find_leaves( + splits, leaves, splits[right_child_node] + ) # For each feature in each leaf, initialize lower and upper bounds to None for th in features: @@ -267,7 +269,7 @@ def parse_linear_tree_regressor(linear_tree_regressor, bounds): for leaf in splits[split]['right_leaves']: leaves[leaf]['bounds'][var][0] = splits[split]['th'] - leaves_new = reassign_none_bounds(leaves, bounds) + leaves_new = _reassign_none_bounds(leaves, bounds) splitting_thresholds = {} for split in splits: var = splits[split]['col'] @@ -299,7 +301,7 @@ def _additively_decompose_expr(input_expr, min_dimension): # Populate the "None" bounds with the bounding box bounds for a leaves-dict-tree # amalgamation. -def reassign_none_bounds(leaves, input_bounds): +def _reassign_none_bounds(leaves, input_bounds): L = np.array(list(leaves.keys())) features = np.arange(0, len(leaves[L[0]]['slope'])) @@ -312,7 +314,7 @@ def reassign_none_bounds(leaves, input_bounds): return leaves -def find_leaves(splits, leaves, input_node): +def _find_leaves(splits, leaves, input_node): root_node = input_node leaves_list = [] queue = [root_node] From 9e56e1dfa63c9e4248dc82079a1ae9a8d21af97b Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:58:10 -0600 Subject: [PATCH 27/46] NFC: removing some comments --- pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 8b73ec00678..8e5dd9d8219 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -106,7 +106,6 @@ def _get_uniform_point_grid(bounds, n, func, config): # Issues happen when exactly using the boundary nudge = (ub - lb) * 1e-4 linspaces.append( - # np.linspace(b[0], b[1], n) np.linspace(lb + nudge, ub - nudge, n) ) return list(itertools.product(*linspaces)) @@ -141,17 +140,9 @@ def _get_points_lmt(points, bounds, func, config, seed): ) regr.fit(x_list, y_list) - # ESJ TODO: we actually only needs leaves from here... leaves, splits, thresholds = _parse_linear_tree_regressor(regr, bounds) - # This was originally part of the LMT_Model_component and used to calculate - # avg_leaves for the output data. TODO: get this back - # self.total_leaves += len(leaves) - - # bound_point_list = lmt.generate_bound(leaves) bound_point_list = _generate_bound_points(leaves, bounds) - # duct tape to fix possible issues from unknown bugs. TODO should this go - # here? return bound_point_list @@ -203,7 +194,6 @@ def _generate_bound_points(leaves, bounds): lower_corner_list.append(var_bound[0]) upper_corner_list.append(var_bound[1]) - # Duct tape to fix issues from unknown bugs for pt in [lower_corner_list, upper_corner_list]: for i in range(len(pt)): # clamp within bounds range From 738a8f8fba874cac313a662927e95d752a3102a9 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 15:58:38 -0600 Subject: [PATCH 28/46] Apparently black has an opinion about that --- pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 8e5dd9d8219..5755286eabe 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -105,9 +105,7 @@ def _get_uniform_point_grid(bounds, n, func, config): for lb, ub in bounds: # Issues happen when exactly using the boundary nudge = (ub - lb) * 1e-4 - linspaces.append( - np.linspace(lb + nudge, ub - nudge, n) - ) + linspaces.append(np.linspace(lb + nudge, ub - nudge, n)) return list(itertools.product(*linspaces)) From 92b8ba1beefdf03db846df191927e21f759912e1 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 5 Aug 2024 16:04:19 -0600 Subject: [PATCH 29/46] DomainPartitioningMethod imports with the module --- pyomo/contrib/piecewise/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyomo/contrib/piecewise/__init__.py b/pyomo/contrib/piecewise/__init__.py index 5fc75dcd091..1bdb7a70676 100644 --- a/pyomo/contrib/piecewise/__init__.py +++ b/pyomo/contrib/piecewise/__init__.py @@ -33,7 +33,10 @@ from pyomo.contrib.piecewise.transform.convex_combination import ( ConvexCombinationTransformation, ) -from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import NonlinearToPWL +from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( + DomainPartitioningMethod, + NonlinearToPWL, +) from pyomo.contrib.piecewise.transform.nested_inner_repn import ( NestedInnerRepresentationGDPTransformation, ) From 953a39dd0cd354f4d129d84ab3f385d47adcb897 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 6 Aug 2024 09:41:48 -0600 Subject: [PATCH 30/46] Skipping tests when numpy not available --- pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index f5f4ebc8009..ed3dfd2129f 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -88,6 +88,7 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): self.assertEqual(len(nonlinear), 1) self.assertIn(m.cons, nonlinear) + @skipUnless(numpy_available, "Numpy is not available") def test_log_constraint_uniform_grid(self): m = self.make_model() @@ -111,6 +112,7 @@ def test_log_constraint_uniform_grid(self): (x1, x2, x3) = 1.0009, 5.5, 9.9991 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + @skipUnless(numpy_available, "Numpy is not available") def test_log_constraint_random_grid(self): m = self.make_model() @@ -137,6 +139,7 @@ def test_log_constraint_random_grid(self): x3 = 9.556428757689245 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + @skipUnless(numpy_available, "Numpy is not available") def test_do_not_transform_quadratic_constraint(self): m = self.make_model() m.quad = Constraint(expr=m.x**2 <= 9) @@ -168,6 +171,7 @@ def test_do_not_transform_quadratic_constraint(self): # neither is the linear one self.assertTrue(m.lin.active) + @skipUnless(numpy_available, "Numpy is not available") def test_constraint_target(self): m = self.make_model() m.quad = Constraint(expr=m.x**2 <= 9) @@ -250,6 +254,7 @@ def test_error_for_non_separable_exceeding_max_dimension(self): max_dimension=4, ) + @skipUnless(numpy_available, "Numpy is not available") def test_do_not_additively_decompose_below_min_dimension(self): m = ConcreteModel() m.x = Var([0, 1, 2, 3, 4], bounds=(-4, 5)) @@ -311,6 +316,7 @@ def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): nonlinear = n_to_pwl.get_transformed_nonlinear_objectives(m) self.assertEqual(len(nonlinear), 0) + @skipUnless(numpy_available, "Numpy is not available") def test_paraboloid_objective_uniform_grid(self): m = self.make_paraboloid_model() @@ -337,6 +343,7 @@ def test_paraboloid_objective_uniform_grid(self): self.check_pw_linear_paraboloid(m, pwlf, x1, x2, y1, y2) + @skipUnless(numpy_available, "Numpy is not available") def test_objective_target(self): m = self.make_paraboloid_model() From 68b15a5bafc65b224d6661704f9f9fe6b64e22cf Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 6 Aug 2024 09:43:23 -0600 Subject: [PATCH 31/46] HA, she can code... Fixing dumb typos in previous commit --- .../piecewise/tests/test_nonlinear_to_pwl.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index ed3dfd2129f..fec3245edd3 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -9,7 +9,7 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ -from pyomo.common.dependencies import attempt_import, scipy_available +from pyomo.common.dependencies import attempt_import, scipy_available, numpy_available import pyomo.common.unittest as unittest from pyomo.contrib.piecewise import PiecewiseLinearFunction from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( @@ -88,7 +88,7 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): self.assertEqual(len(nonlinear), 1) self.assertIn(m.cons, nonlinear) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_log_constraint_uniform_grid(self): m = self.make_model() @@ -112,7 +112,7 @@ def test_log_constraint_uniform_grid(self): (x1, x2, x3) = 1.0009, 5.5, 9.9991 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_log_constraint_random_grid(self): m = self.make_model() @@ -139,7 +139,7 @@ def test_log_constraint_random_grid(self): x3 = 9.556428757689245 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_do_not_transform_quadratic_constraint(self): m = self.make_model() m.quad = Constraint(expr=m.x**2 <= 9) @@ -171,7 +171,7 @@ def test_do_not_transform_quadratic_constraint(self): # neither is the linear one self.assertTrue(m.lin.active) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_constraint_target(self): m = self.make_model() m.quad = Constraint(expr=m.x**2 <= 9) @@ -254,7 +254,7 @@ def test_error_for_non_separable_exceeding_max_dimension(self): max_dimension=4, ) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_do_not_additively_decompose_below_min_dimension(self): m = ConcreteModel() m.x = Var([0, 1, 2, 3, 4], bounds=(-4, 5)) @@ -316,7 +316,7 @@ def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): nonlinear = n_to_pwl.get_transformed_nonlinear_objectives(m) self.assertEqual(len(nonlinear), 0) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_paraboloid_objective_uniform_grid(self): m = self.make_paraboloid_model() @@ -343,7 +343,7 @@ def test_paraboloid_objective_uniform_grid(self): self.check_pw_linear_paraboloid(m, pwlf, x1, x2, y1, y2) - @skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_objective_target(self): m = self.make_paraboloid_model() From dabafbf4a08ab5f72157dd1488b39c81a5a0b0c1 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 6 Aug 2024 09:46:06 -0600 Subject: [PATCH 32/46] Skipping 3 more tests if scipy is not available --- pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index fec3245edd3..67d0bb9f098 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -255,6 +255,7 @@ def test_error_for_non_separable_exceeding_max_dimension(self): ) @unittest.skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") def test_do_not_additively_decompose_below_min_dimension(self): m = ConcreteModel() m.x = Var([0, 1, 2, 3, 4], bounds=(-4, 5)) @@ -317,6 +318,7 @@ def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): self.assertEqual(len(nonlinear), 0) @unittest.skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") def test_paraboloid_objective_uniform_grid(self): m = self.make_paraboloid_model() @@ -344,6 +346,7 @@ def test_paraboloid_objective_uniform_grid(self): self.check_pw_linear_paraboloid(m, pwlf, x1, x2, y1, y2) @unittest.skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") def test_objective_target(self): m = self.make_paraboloid_model() From ead6f9385062f059318c90562dd8a3604a0bf8f1 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 12 Aug 2024 09:48:21 -0600 Subject: [PATCH 33/46] Adding a (currently failing) test for cloning a transformed model --- .../piecewise/tests/test_nonlinear_to_pwl.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 67d0bb9f098..81a1e186234 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -112,6 +112,33 @@ def test_log_constraint_uniform_grid(self): (x1, x2, x3) = 1.0009, 5.5, 9.9991 self.check_pw_linear_log_x(m, pwlf, x1, x2, x3) + @unittest.skipUnless(numpy_available, "Numpy is not available") + def test_clone_transformed_model(self): + m = self.make_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=3, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + twin = m.clone() + + # cons is transformed + self.assertFalse(twin.cons.active) + + pwlf = list( + twin.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + points = [(1.0009,), (5.5,), (9.9991,)] + (x1, x2, x3) = 1.0009, 5.5, 9.9991 + + self.check_pw_linear_log_x(twin, pwlf, x1, x2, x3) + @unittest.skipUnless(numpy_available, "Numpy is not available") def test_log_constraint_random_grid(self): m = self.make_model() From 7bd2118ef42a33795cbbfd269cfdea6bd25e1ec5 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 12 Aug 2024 20:32:09 -0600 Subject: [PATCH 34/46] Fixing two bugs with cloning transformed PiecewiseLinearFunctions --- .../piecewise/piecewise_linear_function.py | 16 +++++++++++++--- .../piecewise/tests/test_nonlinear_to_pwl.py | 4 ++-- .../piecewise/transform/nonlinear_to_pwl.py | 10 +++++----- .../transform/piecewise_to_mip_visitor.py | 2 +- 4 files changed, 21 insertions(+), 11 deletions(-) diff --git a/pyomo/contrib/piecewise/piecewise_linear_function.py b/pyomo/contrib/piecewise/piecewise_linear_function.py index e92edacc756..cdd977bbda1 100644 --- a/pyomo/contrib/piecewise/piecewise_linear_function.py +++ b/pyomo/contrib/piecewise/piecewise_linear_function.py @@ -43,6 +43,10 @@ def __init__(self, component=None): BlockData.__init__(self, component) with self._declare_reserved_components(): + # map of PiecewiseLinearExpression objects to integer indices in + # self._expressions + self._expression_ids = ComponentMap() + # index is monotonically increasing integer self._expressions = Expression(NonNegativeIntegers) self._transformed_exprs = ComponentMap() self._simplices = None @@ -63,8 +67,9 @@ def __call__(self, *args): return self._evaluate(*args) else: expr = PiecewiseLinearExpression(args, self) - idx = id(expr) + idx = len(self._expressions) self._expressions[idx] = expr + self._expression_ids[expr] = idx return self._expressions[idx] def _evaluate(self, *args): @@ -134,7 +139,12 @@ def map_transformation_var(self, pw_expr, v): Records on the PiecewiseLinearFunction object that the transformed form of the PiecewiseLinearExpression object pw_expr is the Var v. """ - self._transformed_exprs[self._expressions[id(pw_expr)]] = v + if pw_expr not in self._expression_ids: + raise DeveloperError( + "ID of PiecewiseLinearExpression '%s' not in the _expression_ids " + "dictionary of PiecewiseLinearFunction '%s'" % (pw_expr, self) + ) + self._transformed_exprs[self._expressions[self._expression_ids[pw_expr]]] = v def get_transformation_var(self, pw_expr): """ @@ -159,7 +169,7 @@ def __call__(self, x): class _multivariate_linear_functor(AutoSlots.Mixin): - __slots__ = 'normal' + __slots__ = ('normal',) def __init__(self, normal): self.normal = normal diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 81a1e186234..2530fda8fc2 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -77,7 +77,7 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): self.assertEqual(len(pwlf._expressions), 1) new_cons = n_to_pwl.get_transformed_component(m.cons) self.assertTrue(new_cons.active) - self.assertIs(new_cons.body, pwlf._expressions[id(new_cons.body.expr)]) + self.assertIs(new_cons.body, pwlf._expressions[pwlf._expression_ids[new_cons.body.expr]]) self.assertIsNone(new_cons.ub) self.assertEqual(new_cons.lb, 0.35) self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) @@ -331,7 +331,7 @@ def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): self.assertEqual(len(pwlf._expressions), 1) new_obj = n_to_pwl.get_transformed_component(m.obj) self.assertTrue(new_obj.active) - self.assertIs(new_obj.expr, pwlf._expressions[id(new_obj.expr.expr)]) + self.assertIs(new_obj.expr, pwlf._expressions[pwlf._expression_ids[new_obj.expr.expr]]) self.assertIs(n_to_pwl.get_src_component(new_obj), m.obj) quadratic = n_to_pwl.get_transformed_quadratic_constraints(m) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 5755286eabe..c4d7c801ba2 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -641,7 +641,7 @@ def _approximate_expression( return None, expr_type # Additively decompose expr and work on the pieces - pwl_func = 0 + pwl_summands = [] for k, subexpr in enumerate( _additively_decompose_expr( expr, config.min_dimension_to_additively_decompose @@ -661,10 +661,10 @@ def _approximate_expression( "'max_dimension' or additively separating the expression." % (obj.name, config.max_dimension) ) - pwl_func = pwl_func + subexpr + pwl_summands.append(subexpr) continue elif not self._needs_approximating(subexpr, approximate_quadratic)[1]: - pwl_func = pwl_func + subexpr + pwl_summands.append(subexpr) continue # else we approximate subexpr @@ -684,13 +684,13 @@ def eval_expr(*args): # implementation of iadd and dereference the ExpressionData holding # the PiecewiseLinearExpression that we later transform my remapping # it to a Var... - pwl_func = pwl_func + pwlf(*expr_vars) + pwl_summands.append(pwlf(*expr_vars)) # restore var values for v, val in orig_values.items(): v.value = val - return pwl_func, expr_type + return sum(pwl_summands), expr_type def get_src_component(self, cons): data = cons.parent_block().private_data().src_component diff --git a/pyomo/contrib/piecewise/transform/piecewise_to_mip_visitor.py b/pyomo/contrib/piecewise/transform/piecewise_to_mip_visitor.py index fae95a564bf..d40bbd8bb34 100644 --- a/pyomo/contrib/piecewise/transform/piecewise_to_mip_visitor.py +++ b/pyomo/contrib/piecewise/transform/piecewise_to_mip_visitor.py @@ -50,7 +50,7 @@ def exitNode(self, node, data): substitute_var = self.transform_pw_linear_expression( node, parent, self.transBlock ) - parent._expressions[id(node)] = substitute_var + parent._expressions[parent._expression_ids[node]] = substitute_var return node finalizeResult = None From 208c9d6ef2e6b464bbad45981e4ed954b42f9892 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 12 Aug 2024 20:32:34 -0600 Subject: [PATCH 35/46] Black --- pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 2530fda8fc2..2f13532b63f 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -77,7 +77,9 @@ def check_pw_linear_log_x(self, m, pwlf, x1, x2, x3): self.assertEqual(len(pwlf._expressions), 1) new_cons = n_to_pwl.get_transformed_component(m.cons) self.assertTrue(new_cons.active) - self.assertIs(new_cons.body, pwlf._expressions[pwlf._expression_ids[new_cons.body.expr]]) + self.assertIs( + new_cons.body, pwlf._expressions[pwlf._expression_ids[new_cons.body.expr]] + ) self.assertIsNone(new_cons.ub) self.assertEqual(new_cons.lb, 0.35) self.assertIs(n_to_pwl.get_src_component(new_cons), m.cons) @@ -331,7 +333,9 @@ def check_pw_linear_paraboloid(self, m, pwlf, x1, x2, y1, y2): self.assertEqual(len(pwlf._expressions), 1) new_obj = n_to_pwl.get_transformed_component(m.obj) self.assertTrue(new_obj.active) - self.assertIs(new_obj.expr, pwlf._expressions[pwlf._expression_ids[new_obj.expr.expr]]) + self.assertIs( + new_obj.expr, pwlf._expressions[pwlf._expression_ids[new_obj.expr.expr]] + ) self.assertIs(n_to_pwl.get_src_component(new_obj), m.obj) quadratic = n_to_pwl.get_transformed_quadratic_constraints(m) From defc953b3a120cfbed1d44356e7a37381664ad12 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 12 Aug 2024 20:36:14 -0600 Subject: [PATCH 36/46] Adding a test for cloning a multivariate piecewise linear function too --- .../piecewise/tests/test_nonlinear_to_pwl.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 2f13532b63f..a42846c2802 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -376,6 +376,36 @@ def test_paraboloid_objective_uniform_grid(self): self.check_pw_linear_paraboloid(m, pwlf, x1, x2, y1, y2) + @unittest.skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") + def test_multivariate_clone(self): + m = self.make_paraboloid_model() + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + n_to_pwl.apply_to( + m, + num_points=2, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + + twin = m.clone() + + # check obj is transformed + self.assertFalse(twin.obj.active) + + pwlf = list( + twin.component_data_objects(PiecewiseLinearFunction, descend_into=True) + ) + self.assertEqual(len(pwlf), 1) + pwlf = pwlf[0] + + x1 = 0.00030000000000000003 + x2 = 2.9997 + y1 = 1.0006 + y2 = 6.9994 + + self.check_pw_linear_paraboloid(twin, pwlf, x1, x2, y1, y2) + @unittest.skipUnless(numpy_available, "Numpy is not available") @unittest.skipUnless(scipy_available, "Scipy is not available") def test_objective_target(self): From 46e5bfb16b8ffc86c19be2314b46233e64c6f0d1 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 09:52:36 -0600 Subject: [PATCH 37/46] Adding linear-tree to optional dependencies --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 6d28e4d184b..4e2b8bd6042 100644 --- a/setup.py +++ b/setup.py @@ -262,6 +262,7 @@ def __ne__(self, other): 'optional': [ 'dill', # No direct use, but improves lambda pickle 'ipython', # contrib.viewer + 'linear-tree', # contrib.piecewise # Note: matplotlib 3.6.1 has bug #24127, which breaks # seaborn's histplot (triggering parmest failures) # Note: minimum version from community_detection use of From fc5ebc86cf940caf0579577a870f34866f64d0a2 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 09:58:56 -0600 Subject: [PATCH 38/46] Black being black --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4e2b8bd6042..b1f4a60c4c6 100644 --- a/setup.py +++ b/setup.py @@ -262,7 +262,7 @@ def __ne__(self, other): 'optional': [ 'dill', # No direct use, but improves lambda pickle 'ipython', # contrib.viewer - 'linear-tree', # contrib.piecewise + 'linear-tree', # contrib.piecewise # Note: matplotlib 3.6.1 has bug #24127, which breaks # seaborn's histplot (triggering parmest failures) # Note: minimum version from community_detection use of From 844b09acad540ee914f866b662a2959ddfac46a7 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 10:16:27 -0600 Subject: [PATCH 39/46] Adding (failing) test for sampling discrete domains --- .../piecewise/tests/test_nonlinear_to_pwl.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index a42846c2802..1428fa4a810 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -9,7 +9,11 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ +from io import StringIO +import logging + from pyomo.common.dependencies import attempt_import, scipy_available, numpy_available +from pyomo.common.log import LoggingIntercept import pyomo.common.unittest as unittest from pyomo.contrib.piecewise import PiecewiseLinearFunction from pyomo.contrib.piecewise.transform.nonlinear_to_pwl import ( @@ -23,9 +27,11 @@ ) from pyomo.core.expr.numeric_expr import SumExpression from pyomo.environ import ( + Binary, ConcreteModel, Var, Constraint, + Integers, TransformationFactory, log, Objective, @@ -303,6 +309,24 @@ def test_do_not_additively_decompose_below_min_dimension(self): # This is only approximated by one pwlf: self.assertIsInstance(transformed_c.body, _ExpressionData) + @unittest.skipUnless(numpy_available, "Numpy is not available") + def test_uniform_sampling_discrete_vars(self): + m = ConcreteModel() + m.x = Var(['rocky', 'bullwinkle'], domain=Binary) + m.y = Var(domain=Integers, bounds=(0, 5)) + m.c = Constraint(expr=m.x['rocky'] * m.x['bullwinkle'] + m.y <= 4) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + output = StringIO() + with LoggingIntercept(output, 'pyomo.contrib.piecewise', logging.WARNING): + n_to_pwl.apply_to( + m, + num_points=3, + additively_decompose=False, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + self.assertEqual(output.getvalue().strip()) + class TestNonlinearToPWL_2D(unittest.TestCase): def make_paraboloid_model(self): From 11326eb3fb73c43514c8be73b45f7a923dd46a23 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 10:23:21 -0600 Subject: [PATCH 40/46] Only use linear-tree for pypi, it appears to not be on conda --- .github/workflows/test_pr_and_main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_pr_and_main.yml b/.github/workflows/test_pr_and_main.yml index cc9760cbe5d..765e50826d0 100644 --- a/.github/workflows/test_pr_and_main.yml +++ b/.github/workflows/test_pr_and_main.yml @@ -29,7 +29,7 @@ defaults: env: PYTHONWARNINGS: ignore::UserWarning PYTHON_CORE_PKGS: wheel - PYPI_ONLY: z3-solver + PYPI_ONLY: z3-solver linear-tree PYPY_EXCLUDE: scipy numdifftools seaborn statsmodels CACHE_VER: v221013.1 NEOS_EMAIL: tests@pyomo.org From 3c233e13a2a20db4b5fece30792fe3d82b536498 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 12:09:42 -0600 Subject: [PATCH 41/46] Whoops, actually intercepting the logging stream I want to make sure is empty... --- pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 1428fa4a810..76246688cdf 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -318,14 +318,14 @@ def test_uniform_sampling_discrete_vars(self): n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') output = StringIO() - with LoggingIntercept(output, 'pyomo.contrib.piecewise', logging.WARNING): + with LoggingIntercept(output, 'pyomo.core', logging.WARNING): n_to_pwl.apply_to( m, num_points=3, additively_decompose=False, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, ) - self.assertEqual(output.getvalue().strip()) + self.assertEqual(output.getvalue().strip(), "") class TestNonlinearToPWL_2D(unittest.TestCase): From e890a254bae81b0f9fb0a67a0d0b1d72c97e3ee0 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 17:44:11 -0600 Subject: [PATCH 42/46] Not sampling outside of discrete domains in uniform_grid and random_grid --- .../piecewise/tests/test_nonlinear_to_pwl.py | 78 +++++++++++++++++++ .../piecewise/transform/nonlinear_to_pwl.py | 40 ++++++---- 2 files changed, 103 insertions(+), 15 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 76246688cdf..07a0f090cd6 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -325,8 +325,86 @@ def test_uniform_sampling_discrete_vars(self): additively_decompose=False, domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, ) + # No warnings (this is to check that we aren't emitting a bunch of + # warnings about setting variables outside of their domains) self.assertEqual(output.getvalue().strip(), "") + transformed_c = n_to_pwl.get_transformed_component(m.c) + pwlf = transformed_c.body.expr.pw_linear_function + + # should sample 0, 1 for th m.x's + # should sample 0, 2, 5 for m.y (because of half to even rounding (*sigh*)) + points = set(pwlf._points) + self.assertEqual(len(points), 12) + for x in [0, 1]: + for y in [0, 1]: + for z in [0, 2, 5]: + self.assertIn((x, y, z), points) + + @unittest.skipUnless(numpy_available, "Numpy is not available") + def test_uniform_sampling_discrete_vars(self): + m = ConcreteModel() + m.x = Var(['rocky', 'bullwinkle'], domain=Binary) + m.y = Var(domain=Integers, bounds=(0, 5)) + m.c = Constraint(expr=m.x['rocky'] * m.x['bullwinkle'] + m.y <= 4) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + output = StringIO() + with LoggingIntercept(output, 'pyomo.core', logging.WARNING): + n_to_pwl.apply_to( + m, + num_points=3, + additively_decompose=False, + domain_partitioning_method=DomainPartitioningMethod.UNIFORM_GRID, + ) + # No warnings (this is to check that we aren't emitting a bunch of + # warnings about setting variables outside of their domains) + self.assertEqual(output.getvalue().strip(), "") + + transformed_c = n_to_pwl.get_transformed_component(m.c) + pwlf = transformed_c.body.expr.pw_linear_function + + # should sample 0, 1 for th m.x's + # should sample 0, 2, 5 for m.y (because of half to even rounding (*sigh*)) + points = set(pwlf._points) + self.assertEqual(len(points), 12) + for x in [0, 1]: + for y in [0, 1]: + for z in [0, 2, 5]: + self.assertIn((x, y, z), points) + + @unittest.skipUnless(numpy_available, "Numpy is not available") + def test_random_sampling_discrete_vars(self): + m = ConcreteModel() + m.x = Var(['rocky', 'bullwinkle'], domain=Binary) + m.y = Var(domain=Integers, bounds=(0, 5)) + m.c = Constraint(expr=m.x['rocky'] * m.x['bullwinkle'] + m.y <= 4) + + n_to_pwl = TransformationFactory('contrib.piecewise.nonlinear_to_pwl') + output = StringIO() + with LoggingIntercept(output, 'pyomo.core', logging.WARNING): + n_to_pwl.apply_to( + m, + num_points=3, + additively_decompose=False, + domain_partitioning_method=DomainPartitioningMethod.RANDOM_GRID, + ) + # No warnings (this is to check that we aren't emitting a bunch of + # warnings about setting variables outside of their domains) + self.assertEqual(output.getvalue().strip(), "") + + transformed_c = n_to_pwl.get_transformed_component(m.c) + pwlf = transformed_c.body.expr.pw_linear_function + + # should sample 0, 1 for th m.x's + # Happen to get 0, 1, 5 for m.y + points = set(pwlf._points) + self.assertEqual(len(points), 12) + for x in [0, 1]: + for y in [0, 1]: + for z in [0, 1, 5]: + self.assertIn((x, y, z), points) + class TestNonlinearToPWL_2D(unittest.TestCase): def make_paraboloid_model(self): diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index c4d7c801ba2..03f006b66bc 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -93,19 +93,29 @@ def __init__(self): def _get_random_point_grid(bounds, n, func, config, seed=42): # Generate randomized grid of points linspaces = [] - for lb, ub in bounds: - np.random.seed(seed) - linspaces.append(np.random.uniform(lb, ub, n)) + np.random.seed(seed) + for (lb, ub), is_integer in bounds: + if not is_integer: + linspaces.append(np.random.uniform(lb, ub, n)) + else: + size = min(n, ub - lb + 1) + linspaces.append(np.random.choice(range(lb, ub + 1), size=size, + replace=False)) return list(itertools.product(*linspaces)) def _get_uniform_point_grid(bounds, n, func, config): # Generate non-randomized grid of points linspaces = [] - for lb, ub in bounds: - # Issues happen when exactly using the boundary - nudge = (ub - lb) * 1e-4 - linspaces.append(np.linspace(lb + nudge, ub - nudge, n)) + for (lb, ub), is_integer in bounds: + if not is_integer: + # Issues happen when exactly using the boundary + nudge = (ub - lb) * 1e-4 + linspaces.append(np.linspace(lb + nudge, ub - nudge, n)) + else: + size = min(n, ub - lb + 1) + pts = np.linspace(lb, ub, size) + linspaces.append(np.array([round(i) for i in pts])) return list(itertools.product(*linspaces)) @@ -159,8 +169,8 @@ def _get_pwl_function_approximation(func, config, bounds): func: function to approximate config: ConfigDict for transformation, specifying domain_partitioning_method, num_points, and max_depth (if using linear trees) - bounds: list of tuples giving upper and lower bounds for each of func's - arguments + bounds: list of tuples giving upper and lower bounds and a boolean indicating + if the variable's domain is discrete or not, for each of func's arguments """ method = config.domain_partitioning_method n = config.num_points @@ -195,8 +205,8 @@ def _generate_bound_points(leaves, bounds): for pt in [lower_corner_list, upper_corner_list]: for i in range(len(pt)): # clamp within bounds range - pt[i] = max(pt[i], bounds[i][0]) - pt[i] = min(pt[i], bounds[i][1]) + pt[i] = max(pt[i], bounds[i][0][0]) + pt[i] = min(pt[i], bounds[i][0][1]) if tuple(lower_corner_list) not in bound_points: bound_points.append(tuple(lower_corner_list)) @@ -206,7 +216,7 @@ def _generate_bound_points(leaves, bounds): # This process should have gotten every interior bound point. However, all # but two of the corners of the overall bounding box should have been # missed. Let's fix that now. - for outer_corner in itertools.product(*bounds): + for outer_corner in itertools.product(*[b[0] for b in bounds]): if outer_corner not in bound_points: bound_points.append(outer_corner) return bound_points @@ -296,9 +306,9 @@ def _reassign_none_bounds(leaves, input_bounds): for l in L: for f in features: if leaves[l]['bounds'][f][0] == None: - leaves[l]['bounds'][f][0] = input_bounds[f][0] + leaves[l]['bounds'][f][0] = input_bounds[f][0][0] if leaves[l]['bounds'][f][1] == None: - leaves[l]['bounds'][f][1] = input_bounds[f][1] + leaves[l]['bounds'][f][1] = input_bounds[f][0][1] return leaves @@ -615,7 +625,7 @@ def _get_bounds_list(self, var_list, obj): "at least one bound" % (v.name, obj.name) ) else: - bounds.append(v.bounds) + bounds.append((v.bounds, v.is_integer())) return bounds def _needs_approximating(self, expr, approximate_quadratic): From 10b902603511dba62e93d03286304dab170cd541 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Tue, 13 Aug 2024 17:44:33 -0600 Subject: [PATCH 43/46] black --- pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py | 2 +- pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index 07a0f090cd6..bc8d7a40027 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -371,7 +371,7 @@ def test_uniform_sampling_discrete_vars(self): for x in [0, 1]: for y in [0, 1]: for z in [0, 2, 5]: - self.assertIn((x, y, z), points) + self.assertIn((x, y, z), points) @unittest.skipUnless(numpy_available, "Numpy is not available") def test_random_sampling_discrete_vars(self): diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index 03f006b66bc..a35231dd890 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -99,8 +99,9 @@ def _get_random_point_grid(bounds, n, func, config, seed=42): linspaces.append(np.random.uniform(lb, ub, n)) else: size = min(n, ub - lb + 1) - linspaces.append(np.random.choice(range(lb, ub + 1), size=size, - replace=False)) + linspaces.append( + np.random.choice(range(lb, ub + 1), size=size, replace=False) + ) return list(itertools.product(*linspaces)) @@ -169,7 +170,7 @@ def _get_pwl_function_approximation(func, config, bounds): func: function to approximate config: ConfigDict for transformation, specifying domain_partitioning_method, num_points, and max_depth (if using linear trees) - bounds: list of tuples giving upper and lower bounds and a boolean indicating + bounds: list of tuples giving upper and lower bounds and a boolean indicating if the variable's domain is discrete or not, for each of func's arguments """ method = config.domain_partitioning_method From 86d4fffc2413d3109df74f0b40c69f3c37d5529d Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Wed, 14 Aug 2024 16:48:56 -0600 Subject: [PATCH 44/46] not triggering scipy install on pypy --- .github/workflows/test_branches.yml | 4 ++-- .github/workflows/test_pr_and_main.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_branches.yml b/.github/workflows/test_branches.yml index 03894a1cb20..a4f2f8128e9 100644 --- a/.github/workflows/test_branches.yml +++ b/.github/workflows/test_branches.yml @@ -21,8 +21,8 @@ defaults: env: PYTHONWARNINGS: ignore::UserWarning PYTHON_CORE_PKGS: wheel - PYPI_ONLY: z3-solver - PYPY_EXCLUDE: scipy numdifftools seaborn statsmodels + PYPI_ONLY: z3-solver linear-tree + PYPY_EXCLUDE: scipy numdifftools seaborn statsmodels linear-tree CACHE_VER: v221013.1 NEOS_EMAIL: tests@pyomo.org SRC_REF: ${{ github.head_ref || github.ref }} diff --git a/.github/workflows/test_pr_and_main.yml b/.github/workflows/test_pr_and_main.yml index 765e50826d0..2ca7e166fd8 100644 --- a/.github/workflows/test_pr_and_main.yml +++ b/.github/workflows/test_pr_and_main.yml @@ -30,7 +30,7 @@ env: PYTHONWARNINGS: ignore::UserWarning PYTHON_CORE_PKGS: wheel PYPI_ONLY: z3-solver linear-tree - PYPY_EXCLUDE: scipy numdifftools seaborn statsmodels + PYPY_EXCLUDE: scipy numdifftools seaborn statsmodels linear-tree CACHE_VER: v221013.1 NEOS_EMAIL: tests@pyomo.org SRC_REF: ${{ github.head_ref || github.ref }} From 5bda911a3f43739b2dc933fd350820521ce916d8 Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Thu, 15 Aug 2024 13:41:31 -0600 Subject: [PATCH 45/46] Skipping two more tests when scipy not available--this really should make the pypy tests pass --- pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py index bc8d7a40027..b937e09ce8b 100644 --- a/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/tests/test_nonlinear_to_pwl.py @@ -342,6 +342,7 @@ def test_uniform_sampling_discrete_vars(self): self.assertIn((x, y, z), points) @unittest.skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") def test_uniform_sampling_discrete_vars(self): m = ConcreteModel() m.x = Var(['rocky', 'bullwinkle'], domain=Binary) @@ -374,6 +375,7 @@ def test_uniform_sampling_discrete_vars(self): self.assertIn((x, y, z), points) @unittest.skipUnless(numpy_available, "Numpy is not available") + @unittest.skipUnless(scipy_available, "Scipy is not available") def test_random_sampling_discrete_vars(self): m = ConcreteModel() m.x = Var(['rocky', 'bullwinkle'], domain=Binary) From 804ad0480d91105234246fac50fb02a98c7df54f Mon Sep 17 00:00:00 2001 From: Emma Johnson Date: Mon, 19 Aug 2024 09:28:06 -0600 Subject: [PATCH 46/46] Moving quadratic repn visitor to instance attribute on nonlinear to pwl transformation --- pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py index a35231dd890..588fa8298f6 100644 --- a/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py +++ b/pyomo/contrib/piecewise/transform/nonlinear_to_pwl.py @@ -66,12 +66,6 @@ class DomainPartitioningMethod(enum.IntEnum): LINEAR_MODEL_TREE_RANDOM = 4 -# This should be safe to use many times; declare it globally -_quadratic_repn_visitor = QuadraticRepnVisitor( - subexpression_cache={}, var_map={}, var_order={}, sorter=None -) - - class _NonlinearToPWLTransformationData(AutoSlots.Mixin): __slots__ = ( 'transformed_component', @@ -502,6 +496,9 @@ def __init__(self): } self._transformation_blocks = {} self._transformation_block_set = ComponentSet() + self._quadratic_repn_visitor = QuadraticRepnVisitor( + subexpression_cache={}, var_map={}, var_order={}, sorter=None + ) def _apply_to(self, instance, **kwds): try: @@ -630,7 +627,7 @@ def _get_bounds_list(self, var_list, obj): return bounds def _needs_approximating(self, expr, approximate_quadratic): - repn = _quadratic_repn_visitor.walk_expression(expr) + repn = self._quadratic_repn_visitor.walk_expression(expr) if repn.nonlinear is None: if repn.quadratic is None: # Linear constraint. Always skip.