Skip to content

Commit

Permalink
Update to new MOI nonlinear interface (#1052)
Browse files Browse the repository at this point in the history
* Update to new MOI nonlinear interface

* Fix

* Fixes

* Fixes

* Fix tests
  • Loading branch information
blegat authored Dec 12, 2023
1 parent 2970e1b commit 75ff19c
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 49 deletions.
100 changes: 64 additions & 36 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import MathOptInterface
const MOI = MathOptInterface
import MathOptInterface as MOI

mutable struct Optimizer{T} <: MOI.AbstractOptimizer
# Problem data.
variables::MOI.Utilities.VariablesContainer{T}
starting_values::Vector{Union{Nothing,T}}
nlp_data::Union{MOI.NLPBlockData,Nothing}
nlp_model::Union{MOI.Nonlinear.Model,Nothing}
sense::MOI.OptimizationSense

# Parameters.
Expand Down Expand Up @@ -33,7 +32,9 @@ Optimizer() = Optimizer{Float64}()

MOI.supports(::Optimizer, ::MOI.NLPBlock) = true

MOI.supports(::Optimizer, ::MOI.ObjectiveSense) = true
function MOI.supports(::Optimizer, ::Union{MOI.ObjectiveSense,MOI.ObjectiveFunction})
return true
end
MOI.supports(::Optimizer, ::MOI.Silent) = true
function MOI.supports(::Optimizer, p::MOI.RawOptimizerAttribute)
return p.name == "method" || hasfield(Options, Symbol(p.name))
Expand All @@ -44,6 +45,7 @@ function MOI.supports(::Optimizer, ::MOI.VariablePrimalStart, ::Type{MOI.Variabl
end

const BOUNDS{T} = Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T},MOI.Interval{T}}
const _SETS{T} = Union{MOI.GreaterThan{T},MOI.LessThan{T},MOI.EqualTo{T}}

function MOI.supports_constraint(
::Optimizer{T},
Expand All @@ -53,6 +55,14 @@ function MOI.supports_constraint(
return true
end

function MOI.supports_constraint(
::Optimizer{T},
::Type{MOI.ScalarNonlinearFunction},
::Type{<:_SETS{T}},
) where {T}
return true
end

MOI.supports_incremental_interface(::Optimizer) = true

function MOI.copy_to(model::Optimizer, src::MOI.ModelLike)
Expand All @@ -65,6 +75,14 @@ function MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.Optimization
model.sense = sense
return
end
function MOI.set(model::Optimizer, ::MOI.ObjectiveFunction{F}, func::F) where {F}
nl = convert(MOI.ScalarNonlinearFunction, func)
if isnothing(model.nlp_model)
model.nlp_model = MOI.Nonlinear.Model()
end
MOI.Nonlinear.set_objective(model.nlp_model, nl)
return nothing
end

function MOI.set(model::Optimizer, ::MOI.Silent, value::Bool)
model.silent = value
Expand Down Expand Up @@ -112,7 +130,7 @@ MOI.get(model::Optimizer, ::MOI.SolveTimeSec) = time_run(model.results)
function MOI.empty!(model::Optimizer)
MOI.empty!(model.variables)
empty!(model.starting_values)
model.nlp_data = nothing
model.nlp_model = nothing
model.sense = MOI.FEASIBILITY_SENSE
model.results = nothing
return
Expand All @@ -121,12 +139,12 @@ end
function MOI.is_empty(model::Optimizer)
return MOI.is_empty(model.variables) &&
isempty(model.starting_values) &&
model.nlp_data === nothing &&
isnothing(model.nlp_model) &&
model.sense == MOI.FEASIBILITY_SENSE
end

function MOI.add_variable(model::Optimizer{T}) where {T}
push!(model.starting_values, NaN)
push!(model.starting_values, nothing)
return MOI.add_variable(model.variables)
end
function MOI.is_valid(model::Optimizer, index::Union{MOI.VariableIndex,MOI.ConstraintIndex})
Expand All @@ -137,6 +155,18 @@ function MOI.add_constraint(model::Optimizer{T}, vi::MOI.VariableIndex, set::BOU
return MOI.add_constraint(model.variables, vi, set)
end

function MOI.add_constraint(
model::Optimizer{T},
f::MOI.ScalarNonlinearFunction,
s::_SETS{T},
) where {T}
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
end
index = MOI.Nonlinear.add_constraint(model.nlp_model, f, s)
return MOI.ConstraintIndex{typeof(f),typeof(s)}(index.value)
end

function starting_value(optimizer::Optimizer{T}, i) where {T}
if optimizer.starting_values[i] !== nothing
return optimizer.starting_values[i]
Expand All @@ -157,11 +187,6 @@ function MOI.set(
return
end

function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData)
model.nlp_data = nlp_data
return
end

function requested_features(::ZerothOrderOptimizer, has_constraints)
return Symbol[]
end
Expand Down Expand Up @@ -198,40 +223,47 @@ function sym_sparse_to_dense!(A, I::Vector, nzval)
end

function MOI.optimize!(model::Optimizer{T}) where {T}
num_variables = length(model.starting_values)
backend = MOI.Nonlinear.SparseReverseMode()
vars = MOI.get(model.variables, MOI.ListOfVariableIndices())
evaluator = MOI.Nonlinear.Evaluator(model.nlp_model, backend, vars)
nlp_data = MOI.NLPBlockData(evaluator)

# load parameters
if model.nlp_data === nothing || !model.nlp_data.has_objective
error("An objective should be provided to Optim with `@NLobjective`.")
if isnothing(model.nlp_model)
error("An objective should be provided to Optim with `@objective`.")
end
objective_scale = model.sense == MOI.MAX_SENSE ? -one(T) : one(T)
zero_μ = zeros(T, length(model.nlp_data.constraint_bounds))
zero_μ = zeros(T, length(nlp_data.constraint_bounds))
function f(x)
return objective_scale * MOI.eval_objective(model.nlp_data.evaluator, x)
return objective_scale * MOI.eval_objective(evaluator, x)
end
function g!(G, x)
fill!(G, zero(T))
MOI.eval_objective_gradient(model.nlp_data.evaluator, G, x)
MOI.eval_objective_gradient(evaluator, G, x)
if model.sense == MOI.MAX_SENSE
rmul!(G, objective_scale)
end
return G
end
function h!(H, x)
fill!(H, zero(T))
MOI.eval_hessian_lagrangian(model.nlp_data.evaluator, H_nzval, x, objective_scale, zero_μ)
MOI.eval_hessian_lagrangian(evaluator, H_nzval, x, objective_scale, zero_μ)
sym_sparse_to_dense!(H, hessian_structure, H_nzval)
return H
end

method = model.method
nl_constrained = !isempty(model.nlp_data.constraint_bounds)
features = MOI.features_available(model.nlp_data.evaluator)
nl_constrained = !isempty(nlp_data.constraint_bounds)
features = MOI.features_available(evaluator)
has_bounds = any(vi -> isfinite(model.variables.lower[vi.value]) || isfinite(model.variables.upper[vi.value]), vars)
if method === nothing
if nl_constrained
method = IPNewton()
elseif :Grad in features
if :Hess in features
# FIXME `fallback_method(f, g!, h!)` returns `Newton` but if there
# are variable bounds, `Newton` is not supported. On the other hand,
# `fallback_method(f, g!)` returns `LBFGS` which is supported if `has_bounds`.
if :Hess in features && !has_bounds
method = fallback_method(f, g!, h!)
else
method = fallback_method(f, g!)
Expand All @@ -241,20 +273,15 @@ function MOI.optimize!(model::Optimizer{T}) where {T}
end
end
used_features = requested_features(method, nl_constrained)
MOI.initialize(model.nlp_data.evaluator, used_features)
MOI.initialize(evaluator, used_features)

if :Hess in used_features
hessian_structure = MOI.hessian_lagrangian_structure(model.nlp_data.evaluator)
hessian_structure = MOI.hessian_lagrangian_structure(evaluator)
H_nzval = zeros(T, length(hessian_structure))
end

initial_x = starting_value.(model, eachindex(model.starting_values))
options = copy(model.options)
has_bounds = any(i -> isfinite(model.variables.lower[i]) || isfinite(model.variables.upper[i]), eachindex(model.starting_values))
if has_bounds
lower = [info.lower_bound for info in model.variable_info]
upper = [info.upper_bound for info in model.variable_info]
end
if !nl_constrained && has_bounds && !(method isa IPNewton)
options = Options(; options...)
model.results = optimize(f, g!, model.variables.lower, model.variables.upper, initial_x, Fminbox(method), options; inplace = true)
Expand All @@ -264,26 +291,26 @@ function MOI.optimize!(model::Optimizer{T}) where {T}
options = Options(; options...)
if nl_constrained || has_bounds
if nl_constrained
lc = [b.lower for b in model.nlp_data.constraint_bounds]
uc = [b.upper for b in model.nlp_data.constraint_bounds]
c!(c, x) = MOI.eval_constraint(model.nlp_data.evaluator, c, x)
lc = [b.lower for b in nlp_data.constraint_bounds]
uc = [b.upper for b in nlp_data.constraint_bounds]
c!(c, x) = MOI.eval_constraint(evaluator, c, x)
if !(:Jac in features)
error("Nonlinear constraints should be differentiable to be used with Optim.")
end
if !(:Hess in features)
error("Nonlinear constraints should be twice differentiable to be used with Optim.")
end
jacobian_structure = MOI.jacobian_structure(model.nlp_data.evaluator)
jacobian_structure = MOI.jacobian_structure(evaluator)
J_nzval = zeros(T, length(jacobian_structure))
function jacobian!(J, x)
fill!(J, zero(T))
MOI.eval_constraint_jacobian(model.nlp_data.evaluator, J_nzval, x)
MOI.eval_constraint_jacobian(evaluator, J_nzval, x)
sparse_to_dense!(J, jacobian_structure, J_nzval)
return J
end
function con_hessian!(H, x, λ)
fill!(H, zero(T))
MOI.eval_hessian_lagrangian(model.nlp_data.evaluator, H_nzval, x, zero(T), λ)
MOI.eval_hessian_lagrangian(evaluator, H_nzval, x, zero(T), λ)
sym_sparse_to_dense!(H, hessian_structure, H_nzval)
return H
end
Expand All @@ -293,7 +320,8 @@ function MOI.optimize!(model::Optimizer{T}) where {T}
)
else
@assert has_bounds
c = TwiceDifferentiableConstraints(model.variables.lower, model.variables.upper)
c = TwiceDifferentiableConstraints(
model.variables.lower, model.variables.upper)
end
model.results = optimize(d, c, initial_x, method, options)
else
Expand Down
28 changes: 15 additions & 13 deletions test/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -44,27 +44,29 @@ function test_MOI_Test()
MOI.ObjectiveBound,
MOI.DualObjectiveValue,
MOI.SolverVersion,
MOI.ConstraintDual,
],
),
exclude = String[
# No objective
"test_attribute_SolveTimeSec",
"test_attribute_RawStatusString",
"test_nonlinear_without_objective",
# FIXME INVALID_MODEL should be returned
"test_nonlinear_invalid",
# FIXME The hessian callback for constraints is called with
# `λ = [-Inf, 0.0]` and then we get `NaN`, ...
"hs071",
# There are nonlinear constraints so we need `IPNewton` but `IPNewton` needs a hessian.
"test_nonlinear_hs071_no_hessian",
# FIXME Here there is no hessian but there is a hessian-vector product, can `IPNewton` work with that ?
"test_nonlinear_hs071_hessian_vector_product",
# FIXME needs https://github.com/jump-dev/MathOptInterface.jl/pull/1625
"test_nonlinear_hs071_NLPBlockDual",
# - CachingOptimizer does not throw if optimizer not attached
"test_model_copy_to_UnsupportedAttribute",
"test_model_copy_to_UnsupportedConstraint",
"expression_hs071",
# Terminates with `OTHER_ERROR`
"test_objective_ObjectiveFunction_duplicate_terms",
"test_objective_ObjectiveFunction_constant",
"test_objective_ObjectiveFunction_VariableIndex",
"test_objective_FEASIBILITY_SENSE_clears_objective",
"test_nonlinear_expression_hs109",
"test_objective_qp_ObjectiveFunction_zero_ofdiag",
"test_objective_qp_ObjectiveFunction_edge_cases",
"test_solve_TerminationStatus_DUAL_INFEASIBLE",
"test_solve_result_index",
"test_modification_transform_singlevariable_lessthan",
"test_modification_delete_variables_in_a_batch",
"test_modification_delete_variable_with_single_variable_obj",
],
)
return
Expand Down

0 comments on commit 75ff19c

Please sign in to comment.