diff --git a/docs/src/manual/models.md b/docs/src/manual/models.md index f601222d843..7bff7f3c096 100644 --- a/docs/src/manual/models.md +++ b/docs/src/manual/models.md @@ -234,16 +234,12 @@ CachingOptimizer state: EMPTY_OPTIMIZER Solver name: GLPK julia> b = backend(model) -MOIU.CachingOptimizer{MOI.AbstractOptimizer, MOIU.UniversalFallback{MOIU.Model{Float64}}} +MOIU.CachingOptimizer{GLPK.Optimizer, MOIU.UniversalFallback{MOIU.Model{Float64}}} in state EMPTY_OPTIMIZER in mode AUTOMATIC with model cache MOIU.UniversalFallback{MOIU.Model{Float64}} fallback for MOIU.Model{Float64} -with optimizer MOIB.LazyBridgeOptimizer{GLPK.Optimizer} - with 0 variable bridges - with 0 constraint bridges - with 0 objective bridges - with inner model A GLPK model +with optimizer A GLPK model ``` The backend is a `MOIU.CachingOptimizer` in the state `EMPTY_OPTIMIZER` and mode @@ -280,17 +276,9 @@ It has two parts: 2. An optimizer, which is used to solve the problem ```jldoctest models_backends julia> b.optimizer - MOIB.LazyBridgeOptimizer{GLPK.Optimizer} - with 0 variable bridges - with 0 constraint bridges - with 0 objective bridges - with inner model A GLPK model + A GLPK model ``` -!!! info - The [LazyBridgeOptimizer](@ref) section explains what a - `LazyBridgeOptimizer` is. - The `CachingOptimizer` has logic to decide when to copy the problem from the cache to the optimizer, and when it can efficiently update the optimizer in-place. @@ -317,25 +305,10 @@ A `CachingOptimizer` has two modes of operation: an operation in the incorrect state results in an error. By default [`Model`](@ref) will create a `CachingOptimizer` in `AUTOMATIC` mode. -Use the `caching_mode` keyword to create a model in `MANUAL` mode: -```jldoctest -julia> Model(GLPK.Optimizer; caching_mode = MOI.Utilities.MANUAL) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: MANUAL -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: GLPK -``` - -!!! tip - Only use `MANUAL` mode if you have a very good reason. If you want to reduce - the overhead between JuMP and the underlying solver, consider - [Direct mode](@ref) instead. ### LazyBridgeOptimizer -The second layer that JuMP applies automatically is a `LazyBridgeOptimizer`. A +The second layer that JuMP may apply is a `LazyBridgeOptimizer`. A `LazyBridgeOptimizer` is an MOI layer that attempts to transform constraints added by the user into constraints supported by the solver. This may involve adding new variables and constraints to the optimizer. The transformations are @@ -345,9 +318,10 @@ A common example of a bridge is one that splits an interval constrait like `@constraint(model, 1 <= x + y <= 2)` into two constraints, `@constraint(model, x + y >= 1)` and `@constraint(model, x + y <= 2)`. -Use the `bridge_constraints=false` keyword to remove the bridging layer: +The `LazyBridgeOptimizer` is added only if necessary. However, you can use the +`force_bridge_formulation = true` keyword to add the bridging layer by default: ```jldoctest -julia> model = Model(GLPK.Optimizer; bridge_constraints = false) +julia> model = Model(GLPK.Optimizer; force_bridge_formulation = true) A JuMP Model Feasibility problem with: Variables: 0 @@ -356,19 +330,18 @@ CachingOptimizer state: EMPTY_OPTIMIZER Solver name: GLPK julia> backend(model) -MOIU.CachingOptimizer{MOI.AbstractOptimizer, MOIU.UniversalFallback{MOIU.Model{Float64}}} +MOIU.CachingOptimizer{MOIB.LazyBridgeOptimizer{GLPK.Optimizer}, MOIU.UniversalFallback{MOIU.Model{Float64}}} in state EMPTY_OPTIMIZER in mode AUTOMATIC with model cache MOIU.UniversalFallback{MOIU.Model{Float64}} fallback for MOIU.Model{Float64} -with optimizer A GLPK model +with optimizer MOIB.LazyBridgeOptimizer{GLPK.Optimizer} + with 0 variable bridges + with 0 constraint bridges + with 0 objective bridges + with inner model A GLPK model ``` -!!! tip - Only disable bridges if you have a very good reason. If you want to reduce - the overhead between JuMP and the underlying solver, consider - [Direct mode](@ref) instead. - ## Direct mode Using a `CachingOptimizer` results in an additional copy of the model being diff --git a/docs/src/reference/models.md b/docs/src/reference/models.md index d39323532d2..c68e5afa71e 100644 --- a/docs/src/reference/models.md +++ b/docs/src/reference/models.md @@ -75,7 +75,6 @@ MOIU.attach_optimizer(::JuMP.Model) ## Bridge tools ```@docs -bridge_constraints print_bridge_graph ``` diff --git a/docs/src/tutorials/Getting started/getting_started_with_JuMP.jl b/docs/src/tutorials/Getting started/getting_started_with_JuMP.jl index 1ebf8bdec3d..5e2737b64d1 100644 --- a/docs/src/tutorials/Getting started/getting_started_with_JuMP.jl +++ b/docs/src/tutorials/Getting started/getting_started_with_JuMP.jl @@ -72,6 +72,7 @@ using JuMP using GLPK model = Model(GLPK.Optimizer) +set_silent(model) @variable(model, x >= 0) @variable(model, 0 <= y <= 3) @objective(model, Min, 12x + 20y) @@ -108,6 +109,10 @@ using GLPK model = Model(GLPK.Optimizer) +# Turn off printing from GLPK: + +set_silent(model) + # Variables are modeled using [`@variable`](@ref): @variable(model, x >= 0) diff --git a/docs/src/tutorials/Getting started/performance_tips.jl b/docs/src/tutorials/Getting started/performance_tips.jl index b45af0d1137..191942442b0 100644 --- a/docs/src/tutorials/Getting started/performance_tips.jl +++ b/docs/src/tutorials/Getting started/performance_tips.jl @@ -34,7 +34,7 @@ using GLPK # hide # Similar to the infamous [time-to-first-plot](https://discourse.julialang.org/t/roadmap-for-a-faster-time-to-first-plot/22956) # plotting problem, JuMP suffers from time-to-first-solve latency. This latency -# occurs because the first time you call JuMP code in each session, Julia needs +# occurs because the first time you call JuMP code in each session, Julia needs # to compile a lot of code specific to your problem. This issue is actively being # worked on, but there are a few things you can do to improve things. @@ -48,15 +48,6 @@ using GLPK # hide # every time you run the script. Instead, use one of the [suggested workflows](https://docs.julialang.org/en/v1/manual/workflow-tips/) # from the Julia documentation. -# ### Disable bridges if none are being used - -# At present, the majority of the latency problems are caused by JuMP's bridging -# mechanism. If you only use constraints that are natively supported by the -# solver, you can disable bridges by passing `bridge_constraints = false` to -# [`Model`](@ref). - -model = Model(GLPK.Optimizer; bridge_constraints = false) - # ### Use PackageCompiler # As a final option, consider using [PackageCompiler.jl](https://julialang.github.io/PackageCompiler.jl/dev/) diff --git a/docs/src/tutorials/Getting started/solvers_and_solutions.jl b/docs/src/tutorials/Getting started/solvers_and_solutions.jl index 940cfe121ee..5718b40b499 100644 --- a/docs/src/tutorials/Getting started/solvers_and_solutions.jl +++ b/docs/src/tutorials/Getting started/solvers_and_solutions.jl @@ -53,8 +53,8 @@ # ## Constructing a model -# JuMP models can be created in three different modes: `AUTOMATIC`, `MANUAL` and -# `DIRECT`. We'll use the following LP to illustrate them. +# JuMP models can be created in a number of ways. We'll use the following LP to +# illustrate them. # ```math # \begin{aligned} @@ -67,55 +67,35 @@ using JuMP using GLPK -# ### `AUTOMATIC` Mode - -# #### With Optimizer +# ### With Optimizer # This is the easiest method to use a solver in JuMP. In order to do so, we # simply set the solver inside the Model constructor. -model_auto = Model(GLPK.Optimizer) -@variable(model_auto, 0 <= x <= 1) -@variable(model_auto, 0 <= y <= 1) -@constraint(model_auto, x + y <= 1) -@objective(model_auto, Max, x + 2y) -optimize!(model_auto) -objective_value(model_auto) +model_1 = Model(GLPK.Optimizer) +set_silent(model_1) +@variable(model_1, 0 <= x <= 1) +@variable(model_1, 0 <= y <= 1) +@constraint(model_1, x + y <= 1) +@objective(model_1, Max, x + 2y) +optimize!(model_1) +objective_value(model_1) -# #### No Optimizer (at first) +# ### No Optimizer (at first) # It is also possible to create a JuMP model with no optimizer attached. After # the model object is initialized empty and all its variables, constraints and # objective are set, then we can attach the solver at `optimize!` time. -model_auto_no = Model() -@variable(model_auto_no, 0 <= x <= 1) -@variable(model_auto_no, 0 <= y <= 1) -@constraint(model_auto_no, x + y <= 1) -@objective(model_auto_no, Max, x + 2y) -set_optimizer(model_auto_no, GLPK.Optimizer) -optimize!(model_auto_no) -objective_value(model_auto_no) - -# Note that we can also enforce the automatic mode by passing -# `caching_mode = MOIU.AUTOMATIC` in the Model function call. - -# ### `MANUAL` Mode - -# This mode is similar to the `AUTOMATIC` mode, but there are less protections -# from the user getting errors from the solver API. On the other side, nothing -# happens silently, which might give the user more control. It requires -# attaching the solver before the solve step using the `MOIU.attach_optimizer()` -# function. - -model_manual = Model(GLPK.Optimizer, caching_mode = MOIU.MANUAL) -@variable(model_manual, 0 <= x <= 1) -@variable(model_manual, 0 <= y <= 1) -@constraint(model_manual, x + y <= 1) -@objective(model_manual, Max, x + 2y) -MOIU.attach_optimizer(model_manual) -optimize!(model_manual) -objective_value(model_manual) +model = Model() +@variable(model, 0 <= x <= 1) +@variable(model, 0 <= y <= 1) +@constraint(model, x + y <= 1) +@objective(model, Max, x + 2y) +set_optimizer(model, GLPK.Optimizer) +set_silent(model) +optimize!(model) +objective_value(model) # ### `DIRECT` Mode @@ -124,13 +104,14 @@ objective_value(model_manual) # we do not set a optimizer, we set a backend which is more generic and is able # to hold data and not only solving a model. -model_direct = direct_model(GLPK.Optimizer()) -@variable(model_direct, 0 <= x <= 1) -@variable(model_direct, 0 <= y <= 1) -@constraint(model_direct, x + y <= 1) -@objective(model_direct, Max, x + 2y) -optimize!(model_direct) -objective_value(model_direct) +model = direct_model(GLPK.Optimizer()) +set_silent(model) +@variable(model, 0 <= x <= 1) +@variable(model, 0 <= y <= 1) +@constraint(model, x + y <= 1) +@objective(model, Max, x + 2y) +optimize!(model) +objective_value(model) # ### Solver Options @@ -143,15 +124,15 @@ using GLPK # To turn off printing (i.e. silence the solver), -model = Model(optimizer_with_attributes(GLPK.Optimizer, "msg_lev" => 0)); +Model(optimizer_with_attributes(GLPK.Optimizer, "msg_lev" => 0)); # To increase the maximum number of simplex iterations: -model = Model(optimizer_with_attributes(GLPK.Optimizer, "it_lim" => 10_000)); +Model(optimizer_with_attributes(GLPK.Optimizer, "it_lim" => 10_000)); # To set the solution timeout limit (in milliseconds): -model = Model(optimizer_with_attributes(GLPK.Optimizer, "tm_lim" => 5_000)); +Model(optimizer_with_attributes(GLPK.Optimizer, "tm_lim" => 5_000)); # ## How to querying the solution @@ -167,7 +148,7 @@ model = Model(optimizer_with_attributes(GLPK.Optimizer, "tm_lim" => 5_000)); # Termination statuses are meant to explain the reason why the optimizer stopped # executing in the most recent call to `optimize!`. -termination_status(model_auto) +termination_status(model_1) # You can view the different termination status codes by referring to the docs # or though checking the possible types using the below command. @@ -180,11 +161,11 @@ display(typeof(MOI.OPTIMAL)) # the model. It's possible that no result is available to be queried. We shall # discuss more on the dual status and solutions in the Duality tutorial. -primal_status(model_auto) +primal_status(model_1) #- -dual_status(model_auto) +dual_status(model_1) # As we saw before, the result (solution) status codes can be viewed directly # from Julia. @@ -204,19 +185,19 @@ value(y) #- -objective_value(model_auto) +objective_value(model_1) # Since it is possible that no solution is available to be queried from the # model, calls to [`value`](@ref) may throw errors. Hence, it is recommended to # check for the presence of solutions. -model_no_solution = Model(GLPK.Optimizer) -@variable(model_no_solution, 0 <= x <= 1) -@variable(model_no_solution, 0 <= y <= 1) -@constraint(model_no_solution, x + y >= 3) -@objective(model_no_solution, Max, x + 2y) - -optimize!(model_no_solution) +model = Model(GLPK.Optimizer) +@variable(model, 0 <= x <= 1) +@variable(model, 0 <= y <= 1) +@constraint(model, x + y >= 3) +@objective(model, Max, x + 2y) +set_silent(model) +optimize!(model) try #hide if termination_status(model_no_solution) == MOI.OPTIMAL diff --git a/docs/src/tutorials/Getting started/variables_constraints_objective.jl b/docs/src/tutorials/Getting started/variables_constraints_objective.jl index 3c52ec2790c..77e9dd98de8 100644 --- a/docs/src/tutorials/Getting started/variables_constraints_objective.jl +++ b/docs/src/tutorials/Getting started/variables_constraints_objective.jl @@ -223,7 +223,7 @@ model = Model(GLPK.Optimizer) @variable(model, y >= 0) set_objective_sense(model, MOI.MIN_SENSE) set_objective_function(model, x + y) - +set_silent(model) optimize!(model) #- @@ -272,7 +272,7 @@ c = [1; 3; 5; 2] @variable(vector_model, x[1:4] >= 0) @constraint(vector_model, A * x .== b) @objective(vector_model, Min, c' * x) - +set_silent(vector_model) optimize!(vector_model) #- diff --git a/docs/src/tutorials/Mixed-integer linear programs/callbacks.jl b/docs/src/tutorials/Mixed-integer linear programs/callbacks.jl index 7e52535f046..961dda32e6b 100644 --- a/docs/src/tutorials/Mixed-integer linear programs/callbacks.jl +++ b/docs/src/tutorials/Mixed-integer linear programs/callbacks.jl @@ -18,6 +18,7 @@ import Test #src function example_lazy_constraint() model = Model(GLPK.Optimizer) + set_silent(model) @variable(model, 0 <= x <= 2.5, Int) @variable(model, 0 <= y <= 2.5, Int) @objective(model, Max, y) @@ -68,6 +69,7 @@ function example_user_cut_constraint() N = 30 item_weights, item_values = rand(N), rand(N) model = Model(GLPK.Optimizer) + set_silent(model) @variable(model, x[1:N], Bin) @constraint(model, sum(item_weights[i] * x[i] for i in 1:N) <= 10) @objective(model, Max, sum(item_values[i] * x[i] for i in 1:N)) @@ -106,6 +108,7 @@ function example_heuristic_solution() N = 30 item_weights, item_values = rand(N), rand(N) model = Model(GLPK.Optimizer) + set_silent(model) @variable(model, x[1:N], Bin) @constraint(model, sum(item_weights[i] * x[i] for i in 1:N) <= 10) @objective(model, Max, sum(item_values[i] * x[i] for i in 1:N)) @@ -137,6 +140,7 @@ example_heuristic_solution() function example_solver_dependent_callback() model = Model(GLPK.Optimizer) + set_silent(model) @variable(model, 0 <= x <= 2.5, Int) @variable(model, 0 <= y <= 2.5, Int) @objective(model, Max, y) diff --git a/src/JuMP.jl b/src/JuMP.jl index 51cb4d7a879..0966853386b 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -236,59 +236,73 @@ mutable struct Model <: AbstractModel end """ - Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.AUTOMATIC) + Model(optimizer_factory = nothing; force_bridge_formulation::Bool = false) -Return a new JuMP model without any optimizer; the model is stored the model in -a cache. The mode of the `CachingOptimizer` storing this cache is -`caching_mode`. Use [`set_optimizer`](@ref) to set the optimizer before -calling [`optimize!`](@ref). -""" -function Model(; - caching_mode::MOIU.CachingOptimizerMode = MOIU.AUTOMATIC, - solver = nothing, -) - if solver !== nothing - error( - "The solver= keyword is no longer available in JuMP 0.19 and " * - "later. See the JuMP documentation " * - "(https://jump.dev/JuMP.jl/latest/) for latest syntax.", - ) - end - universal_fallback = MOIU.UniversalFallback(MOIU.Model{Float64}()) - caching_opt = MOIU.CachingOptimizer(universal_fallback, caching_mode) - return direct_model(caching_opt) -end +Construct a JuMP model with a `MOI.Utilities.CachingOptimizer` backend. -""" - Model(optimizer_factory; - caching_mode::MOIU.CachingOptimizerMode=MOIU.AUTOMATIC, - bridge_constraints::Bool=true) +See [`set_optimizer`](@ref) for a description of the arguments. + +## Examples -Return a new JuMP model with the provided optimizer and bridge settings. This -function is equivalent to: +Create a model with no backing optimizer. Call [`set_optimizer`](@ref) later to +add an optimizer prior to calling [`optimize!`](@ref). ```julia - model = Model() - set_optimizer(model, optimizer_factory, - bridge_constraints=bridge_constraints) - return model +model = Model() ``` -See [`set_optimizer`](@ref) for the description of the `optimizer_factory` and -`bridge_constraints` arguments. -## Examples +Pass a `.Optimizer` object from a supported package: +```julia +model = Model(GLPK.Optimizer) +``` -The following creates a model with the optimizer set to `Ipopt`: +Use [`optimizer_with_attributes`](@ref) to initialize the model with the +provided attributes: ```julia -model = Model(Ipopt.Optimizer) +model = Model( + optimizer_with_attributes(GLPK.Optimizer, "loglevel" => 0), +) +``` + +Create an anonymous function to pass positional arguments to the optimizer: +```julia +model = Model() do + AmplNLWriter.Optimizer(Bonmin_jll.amplexe) +end +``` + +Pass `force_bridge_formulation = true` to intialize the model with bridges. +Normally, bridges will be added only if necessary. Adding them here can have +performance benefits if you know that your model will use the bridges. +```julia +model = Model(SCS.Optimizer; force_bridge_formulation = true) ``` """ -function Model(optimizer_factory; bridge_constraints::Bool = true, kwargs...) - model = Model(; kwargs...) - set_optimizer( - model, - optimizer_factory, - bridge_constraints = bridge_constraints, +function Model( + optimizer_factory = nothing; + force_bridge_formulation::Bool = false, + bridge_constraints::Union{Nothing,Bool} = nothing, +) + if bridge_constraints !== nothing + @warn( + "`bridge_constraints` is deprecated. Use " * + "`force_bridge_formulation` instead.", + ) + force_bridge_formulation = bridge_constraints + end + + model = direct_model( + MOI.Utilities.CachingOptimizer( + MOIU.UniversalFallback(MOIU.Model{Float64}()), + MOI.Utilities.AUTOMATIC, + ), ) + if optimizer_factory !== nothing + set_optimizer( + model, + optimizer_factory; + force_bridge_formulation = force_bridge_formulation, + ) + end return model end @@ -461,6 +475,53 @@ end unsafe_backend(model::MOIB.LazyBridgeOptimizer) = unsafe_backend(model.model) unsafe_backend(model::MOI.ModelLike) = model +_needs_bridges(::MOI.ModelLike) = true +_needs_bridges(::MOI.Bridges.LazyBridgeOptimizer) = false +_needs_bridges(::Nothing) = false + +function _add_bridges_if_needed( + model::Model, + backend::MOI.Utilities.CachingOptimizer, +) + if !_needs_bridges(backend.optimizer) + return false + end + # We might have detected that we need bridges part-way through copying the + # optimizer. Therefore, it might not be empty. + if !MOI.is_empty(backend.optimizer) + MOI.empty!(backend.optimizer) + end + new_optimizer = if MOI.supports_incremental_interface(backend.optimizer) + backend.optimizer + else + MOI.Utilities.CachingOptimizer( + MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()), + backend.optimizer, + ) + end + # We don't have to worry about adding `model.bridge_types` here, because + # calling `add_bridge` will force a `LazyBridgeOptimizer` backend. + model.moi_backend = MOI.Utilities.CachingOptimizer( + backend.model_cache, + MOI.Bridges.full_bridge_optimizer(new_optimizer, Float64), + ) + MOIU.reset_optimizer(model) + return true +end + +_add_bridges_if_needed(::Model, ::MOI.ModelLike) = false + +""" + _add_bridges_if_needed(model::Model) + +Add a `MOI.Bridges.LazyBridgeOptimizer` to the backend of `model` if one does +not already exist. Returns `true` if a new `MOI.Bridges.LazyBridgeOptimizer` is +added, and `false` otherwise. +""" +function _add_bridges_if_needed(model::Model) + return _add_bridges_if_needed(model, backend(model)) +end + """ moi_mode(model::MOI.ModelLike) @@ -487,16 +548,6 @@ function mode(model::Model) return moi_mode(backend(model)) end -""" - moi_bridge_constraints(model::MOI.ModelLike) - -Return `true` if `model` will bridge constraints. -""" -moi_bridge_constraints(model::MOI.ModelLike) = false -function moi_bridge_constraints(model::MOIU.CachingOptimizer) - return model.optimizer isa MOI.Bridges.LazyBridgeOptimizer -end - # Internal function. function _try_get_solver_name(model_like) try @@ -529,37 +580,17 @@ function solver_name(model::Model) end end -""" - bridge_constraints(model::Model) - -When in direct mode, return `false`. -When in manual or automatic mode, return a `Bool` indicating whether the -optimizer is set and unsupported constraints are automatically bridged -to equivalent supported constraints when an appropriate transformation is -available. -""" -function bridge_constraints(model::Model) - # The type of `backend(model)` is not type-stable, so we use a function - # barrier (`moi_bridge_constraints`) to improve performance. - return moi_bridge_constraints(backend(model)) -end +# No optimizer is attached, the bridge will be added when one is attached +_moi_add_bridge(::Nothing, ::Type{<:MOI.Bridges.AbstractBridge}) = nothing -function _moi_add_bridge( - model::Nothing, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}, -) - # No optimizer is attached, the bridge will be added when one is attached - return -end -function _moi_add_bridge( - model::MOI.ModelLike, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}, -) +function _moi_add_bridge(::MOI.ModelLike, ::Type{<:MOI.Bridges.AbstractBridge}) return error( - "Cannot add bridge if `bridge_constraints` was set to `false` in the", - " `Model` constructor.", + "`In order to add a bridge, you must pass " * + "`force_bridge_formulation = true` to `Model`, i.e., " * + "`Model(optimizer; force_bridge_formulation = true)`.", ) end + function _moi_add_bridge( bridge_opt::MOI.Bridges.LazyBridgeOptimizer, BridgeType::Type{<:MOI.Bridges.AbstractBridge}, @@ -567,6 +598,7 @@ function _moi_add_bridge( MOI.Bridges.add_bridge(bridge_opt, BridgeType{Float64}) return end + function _moi_add_bridge( caching_opt::MOIU.CachingOptimizer, BridgeType::Type{<:MOI.Bridges.AbstractBridge}, @@ -576,8 +608,10 @@ function _moi_add_bridge( end """ - add_bridge(model::Model, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}) + add_bridge( + model::Model, + BridgeType::Type{<:MOI.Bridges.AbstractBridge}, + ) Add `BridgeType` to the list of bridges that can be used to transform unsupported constraints into an equivalent formulation using only constraints @@ -588,6 +622,8 @@ function add_bridge( BridgeType::Type{<:MOI.Bridges.AbstractBridge}, ) push!(model.bridge_types, BridgeType) + # Make sure we force a `LazyBridgeOptimizer` backend! + _add_bridges_if_needed(model) # The type of `backend(model)` is not type-stable, so we use a function # barrier (`_moi_add_bridge`) to improve performance. _moi_add_bridge(JuMP.backend(model), BridgeType) @@ -635,8 +671,9 @@ end function _moi_print_bridge_graph(::IO, ::MOI.ModelLike) return error( - "Cannot print bridge graph if `bridge_constraints` was set to " * - "`false` in the `Model` constructor.", + "`In order to print the bridge graph, you must pass " * + "`force_bridge_formulation = true` to `Model`, i.e., " * + "`Model(optimizer; force_bridge_formulation = true)`.", ) end diff --git a/src/constraints.jl b/src/constraints.jl index 7564defba18..d35d7f6882c 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -524,21 +524,36 @@ function check_belongs_to_model(con::VectorConstraint, model) end function moi_add_constraint( - model::MOI.ModelLike, + model::JuMP.Model, f::F, s::S, ) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} - if !MOI.supports_constraint(model, F, S) - error( - "Constraints of type $(F)-in-$(S) are not supported by the " * - "solver.\n\nIf you expected the solver to support your problem, " * - "you may have an error in our formulation. Otherwise, consider " * - "using a different solver.\n\nThe list of available solvers, " * - "along with the problem types they support, is available at " * - "https://jump.dev/JuMP.jl/stable/installation/#Supported-solvers.", - ) + return moi_add_constraint(model, backend(model), f, s) +end + +function moi_add_constraint( + model::JuMP.Model, + moi_backend::MOI.ModelLike, + f::F, + s::S, +) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + if MOI.supports_constraint(moi_backend, F, S) + # Our backend supports the constraint. Go ahead and add it. + return MOI.add_constraint(moi_backend, f, s) + elseif _add_bridges_if_needed(model) + # In here, we added some bridges. Call again with the new backend. + return moi_add_constraint(model, f, s) end - return MOI.add_constraint(model, f, s) + # Our backend doesn't support the constraint, and even after we added + # bridges it still didn't! + return error( + "Constraints of type $(F)-in-$(S) are not supported by the " * + "solver.\n\nIf you expected the solver to support your problem, " * + "you may have an error in our formulation. Otherwise, consider " * + "using a different solver.\n\nThe list of available solvers, " * + "along with the problem types they support, is available at " * + "https://jump.dev/JuMP.jl/stable/installation/#Supported-solvers.", + ) end """ @@ -556,7 +571,7 @@ function add_constraint( check_belongs_to_model(con, model) func, set = moi_function(con), moi_set(con) cindex = moi_add_constraint( - backend(model), + model, func, set, )::MOI.ConstraintIndex{typeof(func),typeof(set)} diff --git a/src/copy.jl b/src/copy.jl index a2ed2b05bf9..6fa92ca2332 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -131,8 +131,7 @@ function copy_model( "able to copy the constructed model.", ) end - caching_mode = backend(model).mode - new_model = Model(caching_mode = caching_mode) + new_model = Model() # At JuMP's level, filter_constraints should work with JuMP.ConstraintRef, # whereas MOI.copy_to's filter_constraints works with MOI.ConstraintIndex. diff --git a/src/objective.jl b/src/objective.jl index 4f2232c5849..e49f77e14d4 100644 --- a/src/objective.jl +++ b/src/objective.jl @@ -90,22 +90,25 @@ functions; the recommended way to set the objective is with the """ function set_objective_function end -function set_objective_function(model::Model, func::MOI.AbstractScalarFunction) - attr = MOI.ObjectiveFunction{typeof(func)}() - if !MOI.supports(backend(model), attr) - error( - "The solver does not support an objective function of type ", - typeof(func), - ".", - ) - end - MOI.set(model, attr, func) - # Nonlinear objectives override regular objectives, so if there was a - # nonlinear objective set, we must clear it. - if model.nlp_data !== nothing - model.nlp_data.nlobj = nothing +function set_objective_function( + model::Model, + f::F, +) where {F<:MOI.AbstractScalarFunction} + attr = MOI.ObjectiveFunction{F}() + if MOI.supports(backend(model), attr) + MOI.set(model, attr, f) + # Nonlinear objectives override regular objectives, so if there was a + # nonlinear objective set, we must clear it. + if model.nlp_data !== nothing + model.nlp_data.nlobj = nothing + end + return + elseif _add_bridges_if_needed(model) + return set_objective_function(model, f) end - return + return error( + "The solver does not support an objective function of type $F.", + ) end function set_objective_function(model::Model, func::AbstractJuMPScalar) @@ -114,20 +117,15 @@ function set_objective_function(model::Model, func::AbstractJuMPScalar) end function set_objective_function(model::Model, func::Real) - return set_objective_function( - model, - MOI.ScalarAffineFunction( - MOI.ScalarAffineTerm{Float64}[], - Float64(func), - ), - ) + f = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm{Float64}[], Float64(func)) + return set_objective_function(model, f) end function set_objective_function(model::AbstractModel, ::MutableArithmetics.Zero) return set_objective_function(model, 0.0) end -function set_objective_function(model::AbstractModel, func) +function set_objective_function(::AbstractModel, func) return error("The objective function `$(func)` is not supported by JuMP.") end diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 01d797e81a0..61c974eff59 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -73,45 +73,64 @@ function MOIU.attach_optimizer(model::Model) end """ - set_optimizer(model::Model, optimizer_factory; - bridge_constraints::Bool=true) - + set_optimizer( + model::Model, + optimizer_factory; + force_bridge_formulation::Bool = length(model.bridge_types) > 0, + ) Creates an empty `MathOptInterface.AbstractOptimizer` instance by calling -`optimizer_factory()` and sets it as the optimizer of `model`. Specifically, -`optimizer_factory` must be callable with zero arguments and return an empty -`MathOptInterface.AbstractOptimizer`. +`MOI.instantiate(optimizer_factory)` and sets it as the optimizer of `model`. -If `bridge_constraints` is true, constraints that are not supported by the -optimizer are automatically bridged to equivalent supported constraints when -an appropriate transformation is defined in the `MathOptInterface.Bridges` -module or is defined in another module and is explicitly added. +If `force_bridge_formulation = true`, add a `MOI.Bridges.LazyBridgeOptimizer` +layer around the constructed optimizer. -See [`set_optimizer_attributes`](@ref) and [`set_optimizer_attribute`](@ref) for setting -solver-specific parameters of the optimizer. +See [`set_optimizer_attributes`](@ref) and [`set_optimizer_attribute`](@ref) for +setting solver-specific parameters of the optimizer. ## Examples + ```julia model = Model() + set_optimizer(model, GLPK.Optimizer) + +set_optimizer(model, () -> Gurobi.Optimizer(); force_bridge_formulation = true) + +factory = optimizer_with_attributes(Gurobi.Optimizer, "OutputFlag" => 0) +set_optimizer(model, factory) ``` """ function set_optimizer( model::Model, optimizer_constructor; - bridge_constraints::Bool = true, + # By default, add bridges only if the user has manually added bridges. + force_bridge_formulation::Bool = length(model.bridge_types) > 0, + bridge_constraints::Union{Nothing,Bool} = nothing, ) + if bridge_constraints !== nothing + @warn( + "`bridge_constraints` is deprecated. Use " * + "`force_bridge_formulation` instead.", + ) + force_bridge_formulation = bridge_constraints + end error_if_direct_mode(model, :set_optimizer) - if bridge_constraints - optimizer = - MOI.instantiate(optimizer_constructor, with_bridge_type = Float64) + optimizer = if force_bridge_formulation + opt = MOI.instantiate(optimizer_constructor; with_bridge_type = Float64) + # Make sure to add the bridges in `model.bridge_types`! These may have + # been added when no optimizer was present. for bridge_type in model.bridge_types - _moi_add_bridge(optimizer, bridge_type) + _moi_add_bridge(opt, bridge_type) end + opt else - optimizer = MOI.instantiate(optimizer_constructor) + MOI.instantiate(optimizer_constructor) end - return MOIU.reset_optimizer(model, optimizer) + model.moi_backend = + MOI.Utilities.CachingOptimizer(backend(model).model_cache, optimizer) + MOIU.reset_optimizer(model) + return end # Deprecation for JuMP v0.18 -> JuMP v0.19 transition @@ -186,6 +205,16 @@ function optimize!( "The solver does not support nonlinear problems " * "(i.e., NLobjective and NLconstraint).", ) + elseif err isa MOI.UnsupportedConstraint + if _add_bridges_if_needed(model) + optimize!( + model; + ignore_optimize_hook = ignore_optimize_hook, + kwargs..., + ) + else + rethrow(err) + end else rethrow(err) end diff --git a/src/variables.jl b/src/variables.jl index daf349691b4..7a6e44a493e 100644 --- a/src/variables.jl +++ b/src/variables.jl @@ -504,17 +504,18 @@ function set_lower_bound(v::VariableRef, lower::Number) "`delete_lower_bound`.", ) end - return _moi_set_lower_bound(backend(owner_model(v)), v, lower) + model = owner_model(v) + return _moi_set_lower_bound(model, backend(model), v, lower) end -function _moi_set_lower_bound(moi_backend, v::VariableRef, lower::Number) +function _moi_set_lower_bound(model, moi_backend, v::VariableRef, lower::Number) new_set = MOI.GreaterThan(convert(Float64, lower)) if _moi_has_lower_bound(moi_backend, v) cindex = _lower_bound_index(v) MOI.set(moi_backend, MOI.ConstraintSet(), cindex, new_set) else @assert !_moi_is_fixed(moi_backend, v) - moi_add_constraint(moi_backend, index(v), new_set) + moi_add_constraint(model, index(v), new_set) end return end @@ -609,17 +610,18 @@ function set_upper_bound(v::VariableRef, upper::Number) "`delete_upper_bound`.", ) end - return _moi_set_upper_bound(backend(owner_model(v)), v, upper) + model = owner_model(v) + return _moi_set_upper_bound(model, backend(model), v, upper) end -function _moi_set_upper_bound(moi_backend, v::VariableRef, upper::Number) +function _moi_set_upper_bound(model, moi_backend, v::VariableRef, upper::Number) new_set = MOI.LessThan(convert(Float64, upper)) if _moi_has_upper_bound(moi_backend, v) cindex = _upper_bound_index(v) MOI.set(moi_backend, MOI.ConstraintSet(), cindex, new_set) else @assert !_moi_is_fixed(moi_backend, v) - moi_add_constraint(moi_backend, index(v), new_set) + moi_add_constraint(model, index(v), new_set) end return end @@ -715,10 +717,12 @@ function fix(variable::VariableRef, value::Number; force::Bool = false) if !isfinite(value) error("Unable to fix variable to $(value)") end - return _moi_fix(backend(owner_model(variable)), variable, value, force) + model = owner_model(variable) + return _moi_fix(model, backend(model), variable, value, force) end function _moi_fix( + model, moi_backend, variable::VariableRef, value::Number, @@ -746,7 +750,7 @@ function _moi_fix( MOI.delete(moi_backend, _lower_bound_index(variable)) end end - moi_add_constraint(moi_backend, index(variable), new_set) + moi_add_constraint(model, index(variable), new_set) end return end @@ -825,10 +829,11 @@ Add an integrality constraint on the variable `variable_ref`. See also [`IntegerRef`](@ref), [`is_integer`](@ref), [`unset_integer`](@ref). """ function set_integer(variable_ref::VariableRef) - return _moi_set_integer(backend(owner_model(variable_ref)), variable_ref) + model = owner_model(variable_ref) + return _moi_set_integer(model, backend(model), variable_ref) end -function _moi_set_integer(moi_backend, variable_ref::VariableRef) +function _moi_set_integer(model, moi_backend, variable_ref::VariableRef) if _moi_is_integer(moi_backend, variable_ref) return elseif _moi_is_binary(moi_backend, variable_ref) @@ -837,7 +842,7 @@ function _moi_set_integer(moi_backend, variable_ref::VariableRef) "is already binary.", ) end - moi_add_constraint(moi_backend, index(variable_ref), MOI.Integer()) + moi_add_constraint(model, index(variable_ref), MOI.Integer()) return end @@ -898,10 +903,11 @@ Add a constraint on the variable `v` that it must take values in the set See also [`BinaryRef`](@ref), [`is_binary`](@ref), [`unset_binary`](@ref). """ function set_binary(variable_ref::VariableRef) - return _moi_set_binary(backend(owner_model(variable_ref)), variable_ref) + model = owner_model(variable_ref) + return _moi_set_binary(model, backend(model), variable_ref) end -function _moi_set_binary(moi_backend, variable_ref) +function _moi_set_binary(model, moi_backend, variable_ref) if _moi_is_binary(moi_backend, variable_ref) return elseif _moi_is_integer(moi_backend, variable_ref) @@ -910,7 +916,7 @@ function _moi_set_binary(moi_backend, variable_ref) "is already integer.", ) end - moi_add_constraint(moi_backend, index(variable_ref), MOI.ZeroOne()) + moi_add_constraint(model, index(variable_ref), MOI.ZeroOne()) return end @@ -1020,13 +1026,18 @@ Add a variable `v` to `Model m` and sets its name. function add_variable end function add_variable(model::Model, v::ScalarVariable, name::String = "") - return _moi_add_variable(backend(model), model, v, name) + return _moi_add_variable(model, backend(model), v, name) end -function _moi_add_variable(moi_backend, model, v::ScalarVariable, name::String) +function _moi_add_variable( + model::Model, + moi_backend::MOI.ModelLike, + v::ScalarVariable, + name::String, +) index = MOI.add_variable(moi_backend) var_ref = VariableRef(model, index) - _moi_constrain_variable(moi_backend, index, v.info) + _moi_constrain_variable(model, moi_backend, index, v.info) if !isempty(name) && MOI.supports(moi_backend, MOI.VariableName(), MOI.VariableIndex) set_name(var_ref, name) @@ -1034,35 +1045,36 @@ function _moi_add_variable(moi_backend, model, v::ScalarVariable, name::String) return var_ref end -function _moi_constrain_variable(moi_backend::MOI.ModelLike, index, info) +function _moi_constrain_variable( + model::Model, + moi_backend::MOI.ModelLike, + index, + info, +) # We don't call the _moi* versions (e.g., _moi_set_lower_bound) because they # have extra checks that are not necessary for newly created variables. if info.has_lb moi_add_constraint( - moi_backend, + model, index, MOI.GreaterThan{Float64}(info.lower_bound), ) end if info.has_ub moi_add_constraint( - moi_backend, + model, index, MOI.LessThan{Float64}(info.upper_bound), ) end if info.has_fix - moi_add_constraint( - moi_backend, - index, - MOI.EqualTo{Float64}(info.fixed_value), - ) + moi_add_constraint(model, index, MOI.EqualTo{Float64}(info.fixed_value)) end if info.binary - moi_add_constraint(moi_backend, index, MOI.ZeroOne()) + moi_add_constraint(model, index, MOI.ZeroOne()) end if info.integer - moi_add_constraint(moi_backend, index, MOI.Integer()) + moi_add_constraint(model, index, MOI.Integer()) end if info.has_start MOI.set( @@ -1072,6 +1084,7 @@ function _moi_constrain_variable(moi_backend::MOI.ModelLike, index, info) Float64(info.start), ) end + return end """ @@ -1105,6 +1118,7 @@ function add_variable( name::String, ) var_index = _moi_add_constrained_variable( + model, backend(model), variable.scalar_variable, variable.set, @@ -1114,17 +1128,34 @@ function add_variable( end function _moi_add_constrained_variable( + model::Model, moi_backend::MOI.ModelLike, scalar_variable::ScalarVariable, - set::MOI.AbstractScalarSet, + set::S, name::String, -) - var_index, con_index = MOI.add_constrained_variable(moi_backend, set) - _moi_constrain_variable(moi_backend, var_index, scalar_variable.info) - if !isempty(name) - MOI.set(moi_backend, MOI.VariableName(), var_index, name) +) where {S<:MOI.AbstractScalarSet} + if MOI.supports_add_constrained_variable(moi_backend, S) + var_index, _ = MOI.add_constrained_variable(moi_backend, set) + _moi_constrain_variable( + model, + moi_backend, + var_index, + scalar_variable.info, + ) + if !isempty(name) + MOI.set(moi_backend, MOI.VariableName(), var_index, name) + end + return var_index + elseif _add_bridges_if_needed(model) + return _moi_add_constrained_variable( + model, + backend(model), + scalar_variable, + set, + name, + ) end - return var_index + return error("Model does not support constrained variable in $(set).") end """ @@ -1168,6 +1199,7 @@ function add_variable( names, ) var_indices = _moi_add_constrained_variables( + model, backend(model), variable.scalar_variables, variable.set, @@ -1178,18 +1210,37 @@ function add_variable( end function _moi_add_constrained_variables( + ::Model, moi_backend::MOI.ModelLike, - scalar_variables::Vector{<:ScalarVariable}, - set::MOI.AbstractVectorSet, - names::Vector{String}, + set::MOI.Reals, ) - if set isa MOI.Reals - var_indices = MOI.add_variables(moi_backend, MOI.dimension(set)) - else - var_indices, con_index = MOI.add_constrained_variables(moi_backend, set) + return MOI.add_variables(moi_backend, MOI.dimension(set)) +end + +function _moi_add_constrained_variables( + model::Model, + moi_backend::MOI.ModelLike, + set::S, +) where {S<:MOI.AbstractVectorSet} + if MOI.supports_add_constrained_variables(moi_backend, S) + var_indices, _ = MOI.add_constrained_variables(moi_backend, set) + return var_indices + elseif _add_bridges_if_needed(model) + return _moi_add_constrained_variables(model, backend(model), set) end + return error("Model does not support constrained variables in $(set).") +end + +function _moi_add_constrained_variables( + model::Model, + moi_backend::MOI.ModelLike, + scalar_variables::Vector{<:ScalarVariable}, + set::S, + names::Vector{String}, +) where {S<:MOI.AbstractVectorSet} + var_indices = _moi_add_constrained_variables(model, moi_backend, set) for (index, variable) in zip(var_indices, scalar_variables) - _moi_constrain_variable(moi_backend, index, variable.info) + _moi_constrain_variable(model, moi_backend, index, variable.info) end for (var_index, name) in zip(var_indices, names) if !isempty(name) diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index f892aa7e1aa..0ffb45ab23d 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -202,7 +202,6 @@ using JuMP MOIU.Model{Float64}(), eval_objective_value = false, ), - caching_mode = MOIU.AUTOMATIC, ) @variable(m, x == 1.0, Int) @variable(m, y, Bin) diff --git a/test/model.jl b/test/model.jl index 7cf91708c4b..1de46f4e100 100644 --- a/test/model.jl +++ b/test/model.jl @@ -203,11 +203,13 @@ end function test_bridges_automatic() # optimizer not supporting Interval - model = Model(() -> MOIU.MockOptimizer(SimpleLPModel{Float64}())) - @test JuMP.bridge_constraints(model) + model = Model( + () -> MOIU.MockOptimizer(SimpleLPModel{Float64}()); + force_bridge_formulation = true, + ) @test JuMP.backend(model) isa MOIU.CachingOptimizer @test JuMP.backend(model).optimizer isa MOI.Bridges.LazyBridgeOptimizer - @test JuMP.backend(model).optimizer.model isa MOIU.MockOptimizer + @test JuMP.unsafe_backend(model) isa MOIU.MockOptimizer @variable model x cref = @constraint model 0 <= x + 1 <= 1 @test cref isa JuMP.ConstraintRef{ @@ -220,34 +222,10 @@ function test_bridges_automatic() return JuMP.optimize!(model) end -function test_bridges_automatic_disabled() - # Automatic bridging disabled with `bridge_constraints` keyword - model = Model( - () -> MOIU.MockOptimizer(SimpleLPModel{Float64}()), - bridge_constraints = false, - ) - @test !JuMP.bridge_constraints(model) - @test JuMP.backend(model) isa MOIU.CachingOptimizer - @test !(JuMP.backend(model).optimizer isa MOI.Bridges.LazyBridgeOptimizer) - @variable model x - F = MOI.ScalarAffineFunction{Float64} - S = MOI.Interval{Float64} - err = ErrorException( - "Constraints of type $(F)-in-$(S) are not supported by the " * - "solver.\n\nIf you expected the solver to support your problem, " * - "you may have an error in our formulation. Otherwise, consider " * - "using a different solver.\n\nThe list of available solvers, " * - "along with the problem types they support, is available at " * - "https://jump.dev/JuMP.jl/stable/installation/#Supported-solvers.", - ) - @test_throws err @constraint model 0 <= x + 1 <= 1 -end - function test_bridges_direct() # No bridge automatically added in Direct mode optimizer = MOIU.MockOptimizer(SimpleLPModel{Float64}()) model = JuMP.direct_model(optimizer) - @test !JuMP.bridge_constraints(model) @variable model x F = MOI.ScalarAffineFunction{Float64} S = MOI.Interval{Float64} @@ -279,7 +257,7 @@ function mock_factory() end function test_bridges_add_before_con_model_optimizer() - model = Model(mock_factory) + model = Model(mock_factory; force_bridge_formulation = true) @variable(model, x) JuMP.add_bridge(model, NonnegativeBridge) c = @constraint(model, x in Nonnegative()) @@ -357,7 +335,7 @@ function test_bridges_add_bridgeable_con_set_optimizer() constraint = ScalarConstraint(x, Nonnegative()) bc = BridgeableConstraint(constraint, NonnegativeBridge) c = add_constraint(model, bc) - set_optimizer(model, mock_factory) + set_optimizer(model, mock_factory; force_bridge_formulation = true) JuMP.optimize!(model) @test 1.0 == @inferred JuMP.value(x) @test 1.0 == @inferred JuMP.value(c) @@ -365,19 +343,13 @@ function test_bridges_add_bridgeable_con_set_optimizer() end function test_bridge_graph_false() - model = Model(mock_factory, bridge_constraints = false) + model = Model(mock_factory) @variable(model, x) @test_throws( ErrorException( - "Cannot add bridge if `bridge_constraints` was set to `false` in " * - "the `Model` constructor.", - ), - add_bridge(model, NonnegativeBridge) - ) - @test_throws( - ErrorException( - "Cannot print bridge graph if `bridge_constraints` was set to " * - "`false` in the `Model` constructor.", + "`In order to print the bridge graph, you must pass " * + "`force_bridge_formulation = true` to `Model`, i.e., " * + "`Model(optimizer; force_bridge_formulation = true)`.", ), print_bridge_graph(model) ) @@ -485,7 +457,7 @@ end function dummy_optimizer_hook(::JuMP.AbstractModel) end function copy_model_style_mode(use_copy_model, caching_mode, filter_mode) - model = Model(caching_mode = caching_mode) + model = Model() model.optimize_hook = dummy_optimizer_hook data = DummyExtensionData(model) model.ext[:dummy] = data @@ -572,12 +544,6 @@ end function test_copy_model_base_auto() return copy_model_style_mode(false, MOIU.AUTOMATIC, false) end -function test_copy_model_jump_manual() - return copy_model_style_mode(true, MOIU.MANUAL, false) -end -function test_copy_model_base_manual() - return copy_model_style_mode(false, MOIU.MANUAL, false) -end function test_copy_direct_mode() mock = MOIU.MockOptimizer(MOIU.Model{Float64}()) @@ -781,7 +747,7 @@ function test_copy_conflict() ) JuMP.optimize!(model) - mockoptimizer = JuMP.backend(model).optimizer.model + mockoptimizer = JuMP.unsafe_backend(model) MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.INFEASIBLE) MOI.set(mockoptimizer, MOI.ConflictStatus(), MOI.CONFLICT_FOUND) MOI.set( @@ -813,6 +779,25 @@ function test_copy_conflict() @test "cref[1]" == @inferred JuMP.name(cref_1_new) end +function test_bridge_formulation() + optimizer = () -> MOIU.MockOptimizer(MOIU.Model{Float64}()) + model = @test_logs (:warn,) Model(optimizer, bridge_constraints = true) + @test isa( + backend(model), + MOI.Utilities.CachingOptimizer{<:MOI.Bridges.LazyBridgeOptimizer}, + ) + return +end + +function test_bridge_constraints() + model = @test_logs (:warn,) Model(mock_factory, bridge_constraints = true) + @test isa( + backend(model), + MOI.Utilities.CachingOptimizer{<:MOI.Bridges.LazyBridgeOptimizer}, + ) + return +end + function runtests() for name in names(@__MODULE__; all = true) if !startswith("$(name)", "test_")