Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix a bug in gdp.bigm transformation for nested GDPs #3213

Merged
merged 13 commits into from
Apr 3, 2024
1 change: 1 addition & 0 deletions pyomo/contrib/gdpopt/tests/test_LBB.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def test_infeasible_GDP(self):
self.assertIsNone(m.d.disjuncts[0].indicator_var.value)
self.assertIsNone(m.d.disjuncts[1].indicator_var.value)

@unittest.skipUnless(z3_available, "Z3 SAT solver is not available")
def test_infeasible_GDP_check_sat(self):
"""Test for infeasible GDP with check_sat option True."""
m = ConcreteModel()
Expand Down
16 changes: 15 additions & 1 deletion pyomo/contrib/gdpopt/tests/test_gdpopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from pyomo.common.collections import Bunch
from pyomo.common.config import ConfigDict, ConfigValue
from pyomo.common.fileutils import import_file, PYOMO_ROOT_DIR
from pyomo.contrib.appsi.base import Solver
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why was this import added?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, good catch. Because of a long and stupid adventure trying to get the appsi test to skip properly when Gurobi is installed but has no license. But we actually don't need this import or the one below.

from pyomo.contrib.appsi.solvers.gurobi import Gurobi
from pyomo.contrib.gdpopt.create_oa_subproblems import (
add_util_block,
Expand Down Expand Up @@ -767,6 +768,9 @@ def test_time_limit(self):
results.solver.termination_condition, TerminationCondition.maxTimeLimit
)

@unittest.skipUnless(
license_available, "No BARON license--8PP logical problem exceeds demo size"
)
def test_LOA_8PP_logical_default_init(self):
"""Test logic-based outer approximation with 8PP."""
exfile = import_file(join(exdir, 'eight_process', 'eight_proc_logical.py'))
Expand Down Expand Up @@ -870,6 +874,9 @@ def test_LOA_8PP_maxBinary(self):
)
ct.check_8PP_solution(self, eight_process, results)

@unittest.skipUnless(
license_available, "No BARON license--8PP logical problem exceeds demo size"
)
def test_LOA_8PP_logical_maxBinary(self):
"""Test logic-based OA with max_binary initialization."""
exfile = import_file(join(exdir, 'eight_process', 'eight_proc_logical.py'))
Expand Down Expand Up @@ -1050,7 +1057,11 @@ def assert_correct_disjuncts_active(

self.assertTrue(fabs(value(eight_process.profit.expr) - 68) <= 1e-2)

@unittest.skipUnless(Gurobi().available(), "APPSI Gurobi solver is not available")
@unittest.skipUnless(
SolverFactory('appsi_gurobi').available(exception_flag=False)
and SolverFactory('appsi_gurobi').license_is_valid(),
"Legacy APPSI Gurobi solver is not available",
)
def test_auto_persistent_solver(self):
exfile = import_file(join(exdir, 'eight_process', 'eight_proc_model.py'))
m = exfile.build_eight_process_flowsheet()
Expand Down Expand Up @@ -1126,6 +1137,9 @@ def test_RIC_8PP_default_init(self):
)
ct.check_8PP_solution(self, eight_process, results)

@unittest.skipUnless(
license_available, "No BARON license--8PP logical problem exceeds demo size"
)
def test_RIC_8PP_logical_default_init(self):
"""Test logic-based outer approximation with 8PP."""
exfile = import_file(join(exdir, 'eight_process', 'eight_proc_logical.py'))
Expand Down
9 changes: 8 additions & 1 deletion pyomo/contrib/gdpopt/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,6 +553,13 @@ def _add_bigm_constraint_to_transformed_model(m, constraint, block):
# making a Reference to the ComponentData so that it will look like an
# indexed component for now. If I redesign bigm at some point, then this
# could be prettier.
bigm._transform_constraint(Reference(constraint), parent_disjunct, None, [], [])
bigm._transform_constraint(
Reference(constraint),
parent_disjunct,
None,
[],
[],
1 - parent_disjunct.binary_indicator_var,
)
# Now get rid of it because this is a class attribute!
del bigm._config
44 changes: 29 additions & 15 deletions pyomo/gdp/plugins/bigm.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,21 +213,15 @@ def _apply_to_impl(self, instance, **kwds):
bigM = self._config.bigM
for t in preprocessed_targets:
if t.ctype is Disjunction:
self._transform_disjunctionData(
t,
t.index(),
bigM,
parent_disjunct=gdp_tree.parent(t),
root_disjunct=gdp_tree.root_disjunct(t),
)
self._transform_disjunctionData(t, t.index(), bigM, gdp_tree)

# issue warnings about anything that was in the bigM args dict that we
# didn't use
_warn_for_unused_bigM_args(bigM, self.used_args, logger)

def _transform_disjunctionData(
self, obj, index, bigM, parent_disjunct=None, root_disjunct=None
):
def _transform_disjunctionData(self, obj, index, bigM, gdp_tree):
parent_disjunct = gdp_tree.parent(obj)
root_disjunct = gdp_tree.root_disjunct(obj)
(transBlock, xorConstraint) = self._setup_transform_disjunctionData(
obj, root_disjunct
)
Expand All @@ -236,7 +230,7 @@ def _transform_disjunctionData(
or_expr = 0
for disjunct in obj.disjuncts:
or_expr += disjunct.binary_indicator_var
self._transform_disjunct(disjunct, bigM, transBlock)
self._transform_disjunct(disjunct, bigM, transBlock, gdp_tree)

if obj.xor:
xorConstraint[index] = or_expr == 1
Expand All @@ -249,7 +243,7 @@ def _transform_disjunctionData(
# and deactivate for the writers
obj.deactivate()

def _transform_disjunct(self, obj, bigM, transBlock):
def _transform_disjunct(self, obj, bigM, transBlock, gdp_tree):
# We're not using the preprocessed list here, so this could be
# inactive. We've already done the error checking in preprocessing, so
# we just skip it here.
Expand All @@ -261,6 +255,12 @@ def _transform_disjunct(self, obj, bigM, transBlock):

relaxationBlock = self._get_disjunct_transformation_block(obj, transBlock)

indicator_expression = 0
node = obj
while node is not None:
indicator_expression += 1 - node.binary_indicator_var
node = gdp_tree.parent_disjunct(node)

# This is crazy, but if the disjunction has been previously
# relaxed, the disjunct *could* be deactivated. This is a big
# deal for Hull, as it uses the component_objects /
Expand All @@ -270,13 +270,21 @@ def _transform_disjunct(self, obj, bigM, transBlock):
# comparing the two relaxations.
#
# Transform each component within this disjunct
self._transform_block_components(obj, obj, bigM, arg_list, suffix_list)
self._transform_block_components(
obj, obj, bigM, arg_list, suffix_list, indicator_expression
)

# deactivate disjunct to keep the writers happy
obj._deactivate_without_fixing_indicator()

def _transform_constraint(
self, obj, disjunct, bigMargs, arg_list, disjunct_suffix_list
self,
obj,
disjunct,
bigMargs,
arg_list,
disjunct_suffix_list,
indicator_expression,
):
# add constraint to the transformation block, we'll transform it there.
transBlock = disjunct._transformation_block()
Expand Down Expand Up @@ -348,7 +356,13 @@ def _transform_constraint(
bigm_src[c] = (lower, upper)

self._add_constraint_expressions(
c, i, M, disjunct.binary_indicator_var, newConstraint, constraint_map
c,
i,
M,
disjunct.binary_indicator_var,
newConstraint,
constraint_map,
indicator_expression=indicator_expression,
)

# deactivate because we relaxed
Expand Down
15 changes: 12 additions & 3 deletions pyomo/gdp/plugins/bigm_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,14 @@ def _estimate_M(self, expr, constraint):
return tuple(M)

def _add_constraint_expressions(
self, c, i, M, indicator_var, newConstraint, constraint_map
self,
c,
i,
M,
indicator_var,
newConstraint,
constraint_map,
indicator_expression=None,
):
# Since we are both combining components from multiple blocks and using
# local names, we need to make sure that the first index for
Expand All @@ -244,14 +251,16 @@ def _add_constraint_expressions(
# over the constraint indices, but I don't think it matters a lot.)
unique = len(newConstraint)
name = c.local_name + "_%s" % unique
if indicator_expression is None:
indicator_expression = 1 - indicator_var

if c.lower is not None:
if M[0] is None:
raise GDP_Error(
"Cannot relax disjunctive constraint '%s' "
"because M is not defined." % name
)
M_expr = M[0] * (1 - indicator_var)
M_expr = M[0] * indicator_expression
newConstraint.add((name, i, 'lb'), c.lower <= c.body - M_expr)
constraint_map.transformed_constraints[c].append(
newConstraint[name, i, 'lb']
Expand All @@ -263,7 +272,7 @@ def _add_constraint_expressions(
"Cannot relax disjunctive constraint '%s' "
"because M is not defined." % name
)
M_expr = M[1] * (1 - indicator_var)
M_expr = M[1] * indicator_expression
newConstraint.add((name, i, 'ub'), c.body - M_expr <= c.upper)
constraint_map.transformed_constraints[c].append(
newConstraint[name, i, 'ub']
Expand Down
Loading