From 772d30c95071c7dc709f505ae63422f2455c594f Mon Sep 17 00:00:00 2001 From: odow Date: Thu, 14 Jul 2022 16:30:38 +1200 Subject: [PATCH 1/7] WIP --- src/MOI_wrapper.jl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 3657626..7701c46 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -25,6 +25,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer variable_lower_start::Vector{Union{Nothing,Float64}} variable_upper_start::Vector{Union{Nothing,Float64}} nlp_data::MOI.NLPBlockData + nlp_data_needs_initialize::Bool sense::MOI.OptimizationSense objective::Union{ Nothing, @@ -81,6 +82,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer Union{Nothing,Float64}[], Union{Nothing,Float64}[], MOI.NLPBlockData([], _EmptyNLPEvaluator(), false), + true, MOI.FEASIBILITY_SENSE, nothing, _ConstraintInfo{ @@ -693,6 +695,7 @@ MOI.supports(::Optimizer, ::MOI.NLPBlock) = true function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData) model.nlp_data = nlp_data + model.nlp_data_needs_initialize = true return end @@ -1095,7 +1098,10 @@ function MOI.optimize!(model::Optimizer) if num_nlp_constraints > 0 push!(init_feat, :Jac) end - MOI.initialize(model.nlp_data.evaluator, init_feat) + if model.nlp_data_needs_initialize + MOI.initialize(model.nlp_data.evaluator, init_feat) + model.nlp_data_needs_initialize = false + end jacobian_sparsity = _jacobian_structure(model) hessian_sparsity = Tuple{Int,Int}[] if has_hessian From 92dcf801361090b6953d39dd7816a32d6812c79e Mon Sep 17 00:00:00 2001 From: odow Date: Fri, 15 Jul 2022 15:46:36 +1200 Subject: [PATCH 2/7] Refactor LQ components --- src/MOI_wrapper.jl | 902 ++++++++------------------------------------- src/utils.jl | 496 +++++++++++++++++++++++++ 2 files changed, 656 insertions(+), 742 deletions(-) create mode 100644 src/utils.jl diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 7701c46..6865243 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -3,13 +3,7 @@ # Use of this source code is governed by an MIT-style license that can be found # in the LICENSE.md file or at https://opensource.org/licenses/MIT. -mutable struct _ConstraintInfo{F,S} - func::F - set::S - dual_start::Union{Nothing,Float64} -end - -_ConstraintInfo(func, set) = _ConstraintInfo(func, set, nothing) +include("utils.jl") """ Optimizer() @@ -20,56 +14,22 @@ mutable struct Optimizer <: MOI.AbstractOptimizer inner::Union{Nothing,IpoptProblem} name::String invalid_model::Bool + silent::Bool + options::Dict{String,Any} + solve_time::Float64 + sense::MOI.OptimizationSense + variables::MOI.Utilities.VariablesContainer{Float64} variable_primal_start::Vector{Union{Nothing,Float64}} - variable_lower_start::Vector{Union{Nothing,Float64}} - variable_upper_start::Vector{Union{Nothing,Float64}} + mult_x_L::Vector{Union{Nothing,Float64}} + mult_x_U::Vector{Union{Nothing,Float64}} + nlp_data::MOI.NLPBlockData nlp_data_needs_initialize::Bool - sense::MOI.OptimizationSense - objective::Union{ - Nothing, - MOI.VariableIndex, - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - } - linear_le_constraints::Vector{ - _ConstraintInfo{ - MOI.ScalarAffineFunction{Float64}, - MOI.LessThan{Float64}, - }, - } - linear_ge_constraints::Vector{ - _ConstraintInfo{ - MOI.ScalarAffineFunction{Float64}, - MOI.GreaterThan{Float64}, - }, - } - linear_eq_constraints::Vector{ - _ConstraintInfo{MOI.ScalarAffineFunction{Float64},MOI.EqualTo{Float64}}, - } - quadratic_le_constraints::Vector{ - _ConstraintInfo{ - MOI.ScalarQuadraticFunction{Float64}, - MOI.LessThan{Float64}, - }, - } - quadratic_ge_constraints::Vector{ - _ConstraintInfo{ - MOI.ScalarQuadraticFunction{Float64}, - MOI.GreaterThan{Float64}, - }, - } - quadratic_eq_constraints::Vector{ - _ConstraintInfo{ - MOI.ScalarQuadraticFunction{Float64}, - MOI.EqualTo{Float64}, - }, - } nlp_dual_start::Union{Nothing,Vector{Float64}} - silent::Bool - options::Dict{String,Any} - solve_time::Float64 + + linear::LinearQuadraticConstraintBlock{Float64} + callback::Union{Nothing,Function} function Optimizer() @@ -77,47 +37,31 @@ mutable struct Optimizer <: MOI.AbstractOptimizer nothing, "", false, + false, + Dict{String,Any}(), + NaN, + MOI.FEASIBILITY_SENSE, MOI.Utilities.VariablesContainer{Float64}(), Union{Nothing,Float64}[], Union{Nothing,Float64}[], Union{Nothing,Float64}[], MOI.NLPBlockData([], _EmptyNLPEvaluator(), false), true, - MOI.FEASIBILITY_SENSE, - nothing, - _ConstraintInfo{ - MOI.ScalarAffineFunction{Float64}, - MOI.LessThan{Float64}, - }[], - _ConstraintInfo{ - MOI.ScalarAffineFunction{Float64}, - MOI.GreaterThan{Float64}, - }[], - _ConstraintInfo{ - MOI.ScalarAffineFunction{Float64}, - MOI.EqualTo{Float64}, - }[], - _ConstraintInfo{ - MOI.ScalarQuadraticFunction{Float64}, - MOI.LessThan{Float64}, - }[], - _ConstraintInfo{ - MOI.ScalarQuadraticFunction{Float64}, - MOI.GreaterThan{Float64}, - }[], - _ConstraintInfo{ - MOI.ScalarQuadraticFunction{Float64}, - MOI.EqualTo{Float64}, - }[], nothing, - false, - Dict{String,Any}(), - NaN, + LinearQuadraticConstraintBlock{Float64}(), nothing, ) end end +const _SETS = + Union{MOI.GreaterThan{Float64},MOI.LessThan{Float64},MOI.EqualTo{Float64}} + +const _FUNCTIONS = Union{ + MOI.ScalarAffineFunction{Float64}, + MOI.ScalarQuadraticFunction{Float64}, +} + MOI.get(::Optimizer, ::MOI.SolverVersion) = "3.14.4" ### _EmptyNLPEvaluator @@ -135,36 +79,26 @@ MOI.eval_hessian_lagrangian(::_EmptyNLPEvaluator, H, x, σ, μ) = nothing function MOI.empty!(model::Optimizer) model.inner = nothing model.invalid_model = false + model.sense = MOI.FEASIBILITY_SENSE MOI.empty!(model.variables) empty!(model.variable_primal_start) - empty!(model.variable_lower_start) - empty!(model.variable_upper_start) + empty!(model.mult_x_L) + empty!(model.mult_x_U) model.nlp_data = MOI.NLPBlockData([], _EmptyNLPEvaluator(), false) - model.sense = MOI.FEASIBILITY_SENSE - model.objective = nothing - empty!(model.linear_le_constraints) - empty!(model.linear_ge_constraints) - empty!(model.linear_eq_constraints) - empty!(model.quadratic_le_constraints) - empty!(model.quadratic_ge_constraints) - empty!(model.quadratic_eq_constraints) + model.nlp_data_needs_initialize = true model.nlp_dual_start = nothing + model.linear = LinearQuadraticConstraintBlock{Float64}() + model.callback = nothing return end function MOI.is_empty(model::Optimizer) return MOI.is_empty(model.variables) && isempty(model.variable_primal_start) && - isempty(model.variable_lower_start) && - isempty(model.variable_upper_start) && + isempty(model.mult_x_L) && + isempty(model.mult_x_U) && model.nlp_data.evaluator isa _EmptyNLPEvaluator && - model.sense == MOI.FEASIBILITY_SENSE && - isempty(model.linear_le_constraints) && - isempty(model.linear_ge_constraints) && - isempty(model.linear_eq_constraints) && - isempty(model.quadratic_le_constraints) && - isempty(model.quadratic_ge_constraints) && - isempty(model.quadratic_eq_constraints) + model.sense == MOI.FEASIBILITY_SENSE end MOI.supports_incremental_interface(::Optimizer) = true @@ -177,42 +111,18 @@ MOI.get(::Optimizer, ::MOI.SolverName) = "Ipopt" function MOI.supports_constraint( ::Optimizer, - ::Type{ - <:Union{ - MOI.VariableIndex, - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - }, - ::Type{ - <:Union{ - MOI.LessThan{Float64}, - MOI.GreaterThan{Float64}, - MOI.EqualTo{Float64}, - }, - }, + ::Type{<:Union{MOI.VariableIndex,_FUNCTIONS}}, + ::Type{<:_SETS}, ) return true end -function MOI.get(model::Optimizer, ::MOI.ListOfConstraintTypesPresent) - ret = MOI.get(model.variables, MOI.ListOfConstraintTypesPresent()) - constraints = Set{Tuple{Type,Type}}() - for F in ( - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - ) - for S in ( - MOI.LessThan{Float64}, - MOI.GreaterThan{Float64}, - MOI.EqualTo{Float64}, - ) - if !isempty(_constraints(model, F, S)) - push!(constraints, (F, S)) - end - end - end - return append!(ret, collect(constraints)) +### MOI.ListOfConstraintTypesPresent + +function MOI.get(model::Optimizer, attr::MOI.ListOfConstraintTypesPresent) + ret = MOI.get(model.variables, attr) + append!(MOI.get(model.linear, attr)) + return ret end ### MOI.Name @@ -282,8 +192,8 @@ column(x::MOI.VariableIndex) = x.value function MOI.add_variable(model::Optimizer) push!(model.variable_primal_start, nothing) - push!(model.variable_lower_start, nothing) - push!(model.variable_upper_start, nothing) + push!(model.mult_x_L, nothing) + push!(model.mult_x_U, nothing) return MOI.add_variable(model.variables) end @@ -300,38 +210,30 @@ end function MOI.is_valid( model::Optimizer, - ci::MOI.ConstraintIndex{MOI.VariableIndex,S}, -) where {S<:Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}} + ci::MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}, +) return MOI.is_valid(model.variables, ci) end function MOI.get( model::Optimizer, attr::Union{ - MOI.NumberOfConstraints{MOI.VariableIndex,S}, - MOI.ListOfConstraintIndices{MOI.VariableIndex,S}, + MOI.NumberOfConstraints{MOI.VariableIndex,<:_SETS}, + MOI.ListOfConstraintIndices{MOI.VariableIndex,<:_SETS}, }, -) where {S<:Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}} +) return MOI.get(model.variables, attr) end function MOI.get( model::Optimizer, attr::Union{MOI.ConstraintFunction,MOI.ConstraintSet}, - c::MOI.ConstraintIndex{MOI.VariableIndex,S}, -) where {S<:Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}} + c::MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}, +) return MOI.get(model.variables, attr, c) end -function MOI.add_constraint( - model::Optimizer, - x::MOI.VariableIndex, - set::Union{ - MOI.LessThan{Float64}, - MOI.GreaterThan{Float64}, - MOI.EqualTo{Float64}, - }, -) +function MOI.add_constraint(model::Optimizer, x::MOI.VariableIndex, set::_SETS) return MOI.add_constraint(model.variables, x, set) end @@ -340,15 +242,15 @@ function MOI.set( ::MOI.ConstraintSet, ci::MOI.ConstraintIndex{MOI.VariableIndex,S}, set::S, -) where {S<:Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}} +) where {S<:_SETS} MOI.set(model.variables, MOI.ConstraintSet(), ci, set) return end function MOI.delete( model::Optimizer, - ci::MOI.ConstraintIndex{MOI.VariableIndex,S}, -) where {S<:Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}} + ci::MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}, +) MOI.delete(model.variables, ci) return end @@ -357,210 +259,53 @@ end function MOI.is_valid( model::Optimizer, - ci::MOI.ConstraintIndex{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S<:Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}, -} - return 1 <= ci.value <= length(_constraints(model, F, S)) -end - -function _constraints( - model::Optimizer, - ::Type{MOI.ScalarAffineFunction{Float64}}, - ::Type{MOI.LessThan{Float64}}, -) - return model.linear_le_constraints -end - -function _constraints( - model::Optimizer, - ::Type{MOI.ScalarAffineFunction{Float64}}, - ::Type{MOI.GreaterThan{Float64}}, -) - return model.linear_ge_constraints -end - -function _constraints( - model::Optimizer, - ::Type{MOI.ScalarAffineFunction{Float64}}, - ::Type{MOI.EqualTo{Float64}}, -) - return model.linear_eq_constraints -end - -function _constraints( - model::Optimizer, - ::Type{MOI.ScalarQuadraticFunction{Float64}}, - ::Type{MOI.LessThan{Float64}}, -) - return model.quadratic_le_constraints -end - -function _constraints( - model::Optimizer, - ::Type{MOI.ScalarQuadraticFunction{Float64}}, - ::Type{MOI.GreaterThan{Float64}}, -) - return model.quadratic_ge_constraints -end - -function _constraints( - model::Optimizer, - ::Type{MOI.ScalarQuadraticFunction{Float64}}, - ::Type{MOI.EqualTo{Float64}}, + ci::MOI.ConstraintIndex{<:_FUNCTIONS,<:_SETS}, ) - return model.quadratic_eq_constraints -end - -function _check_inbounds(model::Optimizer, var::MOI.VariableIndex) - MOI.throw_if_not_valid(model, var) - return + return MOI.is_valid(model.linear, ci) end -function _check_inbounds(model::Optimizer, aff::MOI.ScalarAffineFunction) - for term in aff.terms - MOI.throw_if_not_valid(model, term.variable) - end - return -end - -function _check_inbounds(model::Optimizer, quad::MOI.ScalarQuadraticFunction) - for term in quad.affine_terms - MOI.throw_if_not_valid(model, term.variable) - end - for term in quad.quadratic_terms - MOI.throw_if_not_valid(model, term.variable_1) - MOI.throw_if_not_valid(model, term.variable_2) - end - return -end - -function MOI.add_constraint( - model::Optimizer, - func::F, - set::S, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S<:MOI.AbstractScalarSet, -} - _check_inbounds(model, func) - constraints = _constraints(model, F, S) - push!(constraints, _ConstraintInfo(func, set)) - return MOI.ConstraintIndex{F,S}(length(constraints)) -end - -function MOI.get( - model::Optimizer, - ::MOI.NumberOfConstraints{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} - return length(_constraints(model, F, S)) +function MOI.add_constraint(model::Optimizer, func::_FUNCTIONS, set::_SETS) + return MOI.add_constraint(model.linear, func, set) end function MOI.get( model::Optimizer, - ::MOI.ListOfConstraintIndices{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} - return MOI.ConstraintIndex{F,S}[ - MOI.ConstraintIndex{F,S}(i) for - i in eachindex(_constraints(model, F, S)) - ] + attr::Union{MOI.NumberOfConstraints{F,S},MOI.ListOfConstraintIndices{F,S}}, +) where {F<:_FUNCTIONS,S<:_SETS} + return MOI.get(model.linear, attr) end function MOI.get( model::Optimizer, - ::MOI.ConstraintFunction, - c::MOI.ConstraintIndex{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, + attr::Union{ + MOI.ConstraintFunction, + MOI.ConstraintSet, + MOI.ConstraintDualStart, }, - S, -} - return _constraints(model, F, S)[c.value].func -end - -function MOI.get( - model::Optimizer, - ::MOI.ConstraintSet, c::MOI.ConstraintIndex{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} - return _constraints(model, F, S)[c.value].set +) where {F<:_FUNCTIONS,S<:_SETS} + return MOI.get(model.linear, attr, c) end function MOI.supports( ::Optimizer, ::MOI.ConstraintDualStart, ::Type{MOI.ConstraintIndex{F,S}}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} +) where {F<:_FUNCTIONS,S<:_SETS} return true end function MOI.set( model::Optimizer, - ::MOI.ConstraintDualStart, + attr::MOI.ConstraintDualStart, ci::MOI.ConstraintIndex{F,S}, value::Union{Real,Nothing}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} +) where {F<:_FUNCTIONS,S<:_SETS} MOI.throw_if_not_valid(model, ci) - constraints = _constraints(model, F, S) - constraints[ci.value].dual_start = value + MOI.set(model.linear, attr, ci, value) return end -function MOI.get( - model::Optimizer, - ::MOI.ConstraintDualStart, - ci::MOI.ConstraintIndex{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} - MOI.throw_if_not_valid(model, ci) - constraints = _constraints(model, F, S) - return constraints[ci.value].dual_start -end - ### MOI.VariablePrimalStart function MOI.supports( @@ -593,12 +338,7 @@ end function MOI.supports( ::Optimizer, ::MOI.ConstraintDualStart, - ::Type{ - MOI.ConstraintIndex{ - MOI.VariableIndex, - <:Union{MOI.GreaterThan,MOI.LessThan,MOI.EqualTo}, - }, - }, + ::Type{MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}}, ) return true end @@ -610,7 +350,7 @@ function MOI.set( value::Union{Real,Nothing}, ) MOI.throw_if_not_valid(model, ci) - model.variable_lower_start[ci.value] = value + model.mult_x_L[ci.value] = value return end @@ -620,7 +360,7 @@ function MOI.get( ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.GreaterThan{Float64}}, ) MOI.throw_if_not_valid(model, ci) - return model.variable_lower_start[ci.value] + return model.mult_x_L[ci.value] end function MOI.set( @@ -630,7 +370,7 @@ function MOI.set( value::Union{Real,Nothing}, ) MOI.throw_if_not_valid(model, ci) - model.variable_upper_start[ci.value] = value + model.mult_x_U[ci.value] = value return end @@ -640,7 +380,7 @@ function MOI.get( ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.LessThan{Float64}}, ) MOI.throw_if_not_valid(model, ci) - return model.variable_upper_start[ci.value] + return model.mult_x_U[ci.value] end function MOI.set( @@ -651,14 +391,14 @@ function MOI.set( ) MOI.throw_if_not_valid(model, ci) if value === nothing - model.variable_lower_start[ci.value] = nothing - model.variable_upper_start[ci.value] = nothing + model.mult_x_L[ci.value] = nothing + model.mult_x_U[ci.value] = nothing elseif value >= 0.0 - model.variable_lower_start[ci.value] = value - model.variable_upper_start[ci.value] = 0.0 + model.mult_x_L[ci.value] = value + model.mult_x_U[ci.value] = 0.0 else - model.variable_lower_start[ci.value] = 0.0 - model.variable_upper_start[ci.value] = value + model.mult_x_L[ci.value] = 0.0 + model.mult_x_U[ci.value] = value end return end @@ -669,8 +409,8 @@ function MOI.get( ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.EqualTo{Float64}}, ) MOI.throw_if_not_valid(model, ci) - l = model.variable_lower_start[ci.value] - u = model.variable_upper_start[ci.value] + l = model.mult_x_L[ci.value] + u = model.mult_x_U[ci.value] return (l === u === nothing) ? nothing : (l + u) end @@ -716,379 +456,104 @@ MOI.get(model::Optimizer, ::MOI.ObjectiveSense) = model.sense ### ObjectiveFunction -MOI.get(model::Optimizer, ::MOI.ObjectiveFunctionType) = typeof(model.objective) - -function MOI.get(model::Optimizer, ::MOI.ObjectiveFunction{F}) where {F} - return convert(F, model.objective)::F +function MOI.get( + model::Optimizer, + attr::Union{MOI.ObjectiveFunctionType,MOI.ObjectiveFunction}, +) + return MOI.get(model.linear, attr) end function MOI.supports( ::Optimizer, - ::MOI.ObjectiveFunction{ - <:Union{ - MOI.VariableIndex, - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - }, + ::MOI.ObjectiveFunction{<:Union{MOI.VariableIndex,<:_FUNCTIONS}}, ) return true end function MOI.set( model::Optimizer, - ::MOI.ObjectiveFunction{F}, + attr::MOI.ObjectiveFunction{F}, func::F, -) where { - F<:Union{ - MOI.VariableIndex, - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, -} - _check_inbounds(model, func) - model.objective = func +) where {F<:Union{MOI.VariableIndex,<:_FUNCTIONS}} + MOI.set(model.linear, attr, func) return end -### Ipopt callback functions -### In setting up the data for Ipopt, we order the constraints as follows: -### - linear_le_constraints -### - linear_ge_constraints -### - linear_eq_constraints -### - quadratic_le_constraints -### - quadratic_ge_constraints -### - quadratic_eq_constraints -### - nonlinear constraints from nlp_data - -const _CONSTRAINT_ORDERING = ( - :linear_le_constraints, - :linear_ge_constraints, - :linear_eq_constraints, - :quadratic_le_constraints, - :quadratic_ge_constraints, - :quadratic_eq_constraints, -) - -function _offset( - ::Optimizer, - ::Type{<:MOI.ScalarAffineFunction}, - ::Type{<:MOI.LessThan}, -) - return 0 -end - -function _offset( - model::Optimizer, - ::Type{<:MOI.ScalarAffineFunction}, - ::Type{<:MOI.GreaterThan}, -) - return length(model.linear_le_constraints) -end - -function _offset( - model::Optimizer, - F::Type{<:MOI.ScalarAffineFunction}, - ::Type{<:MOI.EqualTo}, -) - return _offset(model, F, MOI.GreaterThan{Float64}) + - length(model.linear_ge_constraints) -end - -function _offset( - model::Optimizer, - ::Type{<:MOI.ScalarQuadraticFunction}, - ::Type{<:MOI.LessThan}, -) - x = _offset(model, MOI.ScalarAffineFunction{Float64}, MOI.EqualTo{Float64}) - return x + length(model.linear_eq_constraints) -end - -function _offset( - model::Optimizer, - F::Type{<:MOI.ScalarQuadraticFunction}, - ::Type{<:MOI.GreaterThan}, -) - return _offset(model, F, MOI.LessThan{Float64}) + - length(model.quadratic_le_constraints) -end - -function _offset( - model::Optimizer, - F::Type{<:MOI.ScalarQuadraticFunction}, - ::Type{<:MOI.EqualTo}, -) - return _offset(model, F, MOI.GreaterThan{Float64}) + - length(model.quadratic_ge_constraints) -end - -function _nlp_constraint_offset(model::Optimizer) - x = _offset( - model, - MOI.ScalarQuadraticFunction{Float64}, - MOI.EqualTo{Float64}, - ) - return x + length(model.quadratic_eq_constraints) -end - -_eval_function(::Nothing, ::Any) = 0.0 - -_eval_function(f, x) = MOI.Utilities.eval_variables(xi -> x[xi.value], f) - ### Eval_F_CB -function _eval_objective(model::Optimizer, x) - if model.nlp_data.has_objective +function MOI.eval_objective(model::Optimizer, x) + # TODO(odow): FEASIBILITY_SENSE could produce confusing solver output if + # a nonzero objective is set. + if model.sense == MOI.FEASIBILITY_SENSE + return 0.0 + elseif model.nlp_data.has_objective return MOI.eval_objective(model.nlp_data.evaluator, x) end - return _eval_function(model.objective, x) + return MOI.eval_objective(model.linear, x) end ### Eval_Grad_F_CB -_fill_gradient(::Any, ::Any, ::Nothing) = nothing - -function _fill_gradient(grad, ::Vector, f::MOI.VariableIndex) - grad[f.value] = 1.0 - return -end - -function _fill_gradient(grad, ::Vector, f::MOI.ScalarAffineFunction{Float64}) - for term in f.terms - grad[term.variable.value] += term.coefficient - end - return -end - -function _fill_gradient( - grad, - x::Vector, - quad::MOI.ScalarQuadraticFunction{Float64}, -) - for term in quad.affine_terms - grad[term.variable.value] += term.coefficient - end - for term in quad.quadratic_terms - row_idx = term.variable_1 - col_idx = term.variable_2 - if row_idx == col_idx - grad[row_idx.value] += term.coefficient * x[row_idx.value] - else - grad[row_idx.value] += term.coefficient * x[col_idx.value] - grad[col_idx.value] += term.coefficient * x[row_idx.value] - end - end - return -end - -function _eval_objective_gradient(model::Optimizer, grad, x) - if model.nlp_data.has_objective +function MOI.eval_objective_gradient(model::Optimizer, grad, x) + if model.sense == MOI.FEASIBILITY_SENSE + grad .= zero(eltype(grad)) + elseif model.nlp_data.has_objective MOI.eval_objective_gradient(model.nlp_data.evaluator, grad, x) else - fill!(grad, 0.0) - _fill_gradient(grad, x, model.objective) + MOI.eval_objective_gradient(model.linear, grad, x) end return end ### Eval_G_CB -function _eval_constraint(model::Optimizer, g, x) - row = 1 - for key in _CONSTRAINT_ORDERING - for info in getfield(model, key) - g[row] = _eval_function(info.func, x) - row += 1 - end - end - nlp_g = view(g, row:length(g)) - MOI.eval_constraint(model.nlp_data.evaluator, nlp_g, x) +function MOI.eval_constraint(model::Optimizer, g, x) + MOI.eval_constraint(model.linear, g, x) + g_nlp = view(g, (length(model.linear)+1):length(g)) + MOI.eval_constraint(model.nlp_data.evaluator, g_nlp, x) return end ### Eval_Jac_G_CB -function _append_to_jacobian_sparsity(J, f::MOI.ScalarAffineFunction, row) - for term in f.terms - push!(J, (row, term.variable.value)) - end - return -end - -function _append_to_jacobian_sparsity(J, f::MOI.ScalarQuadraticFunction, row) - for term in f.affine_terms - push!(J, (row, term.variable.value)) - end - for term in f.quadratic_terms - row_idx = term.variable_1 - col_idx = term.variable_2 - if row_idx == col_idx - push!(J, (row, row_idx.value)) - else - push!(J, (row, row_idx.value)) - push!(J, (row, col_idx.value)) - end - end - return -end - -function _jacobian_structure(model::Optimizer) - J = Tuple{Int64,Int64}[] - row = 1 - for key in _CONSTRAINT_ORDERING - for info in getfield(model, key) - _append_to_jacobian_sparsity(J, info.func, row) - row += 1 - end - end +function MOI.jacobian_structure(model::Optimizer) + J = MOI.jacobian_structure(model.linear) + offset = length(model.linear) if length(model.nlp_data.constraint_bounds) > 0 - for (nlp_row, col) in MOI.jacobian_structure(model.nlp_data.evaluator) - push!(J, (nlp_row + row - 1, col)) + for (row, col) in MOI.jacobian_structure(model.nlp_data.evaluator) + push!(J, (row + offset, col)) end end return J end -function _fill_constraint_jacobian( - values, - offset, - ::Vector, - f::MOI.ScalarAffineFunction, -) - num_coefficients = length(f.terms) - for i in 1:num_coefficients - values[offset+i] = f.terms[i].coefficient - end - return num_coefficients -end - -function _fill_constraint_jacobian( - values, - offset, - x, - f::MOI.ScalarQuadraticFunction, -) - nterms = 0 - for term in f.affine_terms - nterms += 1 - values[offset+nterms] = term.coefficient - end - for term in f.quadratic_terms - row_idx = term.variable_1 - col_idx = term.variable_2 - if row_idx == col_idx - nterms += 1 - values[offset+nterms] = term.coefficient * x[col_idx.value] - else - # Note that the order matches the Jacobian sparsity pattern. - nterms += 2 - values[offset+nterms-1] = term.coefficient * x[col_idx.value] - values[offset+nterms] = term.coefficient * x[row_idx.value] - end - end - return nterms -end - -function _eval_constraint_jacobian(model::Optimizer, values, x) - offset = 0 - for key in _CONSTRAINT_ORDERING - for info in getfield(model, key) - offset += _fill_constraint_jacobian(values, offset, x, info.func) - end - end - nlp_values = view(values, (1+offset):length(values)) +function MOI.eval_constraint_jacobian(model::Optimizer, values, x) + offset = MOI.eval_constraint_jacobian(model.linear, values, x) + nlp_values = view(values, (offset+1):length(values)) MOI.eval_constraint_jacobian(model.nlp_data.evaluator, nlp_values, x) return end ### Eval_H_CB -_append_to_hessian_sparsity(::Any, ::Any) = nothing - -function _append_to_hessian_sparsity(H, f::MOI.ScalarQuadraticFunction) - for term in f.quadratic_terms - push!(H, (term.variable_1.value, term.variable_2.value)) - end - return -end - -function _append_hessian_lagrangian_structure(H, model::Optimizer) - if !model.nlp_data.has_objective - _append_to_hessian_sparsity(H, model.objective) - end - for info in model.quadratic_le_constraints - _append_to_hessian_sparsity(H, info.func) - end - for info in model.quadratic_ge_constraints - _append_to_hessian_sparsity(H, info.func) - end - for info in model.quadratic_eq_constraints - _append_to_hessian_sparsity(H, info.func) - end +function MOI.hessian_lagrangian_structure(model::Optimizer) + H = MOI.hessian_lagrangian_structure(model.linear) append!(H, MOI.hessian_lagrangian_structure(model.nlp_data.evaluator)) - return -end - -_fill_hessian_lagrangian(::Any, ::Any, ::Any, ::Any) = 0 - -function _fill_hessian_lagrangian(H, offset, λ, f::MOI.ScalarQuadraticFunction) - for term in f.quadratic_terms - H[offset+1] = λ * term.coefficient - offset += 1 - end - return length(f.quadratic_terms) -end - -function _eval_hessian_lagrangian( - ::Type{S}, - model::Optimizer, - H, - μ, - offset, -) where {S} - F = MOI.ScalarQuadraticFunction{Float64} - offset_start = _offset(model, F, S) - for (i, info) in enumerate(_constraints(model, F, S)) - offset += - _fill_hessian_lagrangian(H, offset, μ[offset_start+i], info.func) - end - return offset + return H end -function _eval_hessian_lagrangian(model::Optimizer, H, x, σ, μ) - offset = 0 - if !model.nlp_data.has_objective - offset += _fill_hessian_lagrangian(H, 0, σ, model.objective) - end - # Handles any quadratic constraints that are present. The order matters. - offset = - _eval_hessian_lagrangian(MOI.LessThan{Float64}, model, H, μ, offset) - offset = - _eval_hessian_lagrangian(MOI.GreaterThan{Float64}, model, H, μ, offset) - offset = _eval_hessian_lagrangian(MOI.EqualTo{Float64}, model, H, μ, offset) - # Handles the Hessian in the nonlinear block - MOI.eval_hessian_lagrangian( - model.nlp_data.evaluator, - view(H, 1+offset:length(H)), - x, - σ, - view(μ, 1+_nlp_constraint_offset(model):length(μ)), - ) +function MOI.eval_hessian_lagrangian(model::Optimizer, H, x, σ, μ) + offset = MOI.eval_hessian_lagrangian(model.linear, H, x, σ, μ) + H_nlp = view(H, (offset+1):length(H)) + μ_nlp = view(μ, (length(model.linear)+1):length(μ)) + MOI.eval_hessian_lagrangian(model.nlp_data.evaluator, H_nlp, x, σ, μ_nlp) return end ### MOI.optimize! -_bounds(s::MOI.LessThan) = (-Inf, s.upper) -_bounds(s::MOI.GreaterThan) = (s.lower, Inf) -_bounds(s::MOI.EqualTo) = (s.value, s.value) - function MOI.optimize!(model::Optimizer) - # TODO: Reuse model.inner for incremental solves if possible. - num_quadratic_constraints = - length(model.quadratic_le_constraints) + - length(model.quadratic_ge_constraints) + - length(model.quadratic_eq_constraints) + num_quadratic_constraints = length(model.linear.hessian_structure) > 0 num_nlp_constraints = length(model.nlp_data.constraint_bounds) has_hessian = :Hess in MOI.features_available(model.nlp_data.evaluator) init_feat = [:Grad] @@ -1102,35 +567,23 @@ function MOI.optimize!(model::Optimizer) MOI.initialize(model.nlp_data.evaluator, init_feat) model.nlp_data_needs_initialize = false end - jacobian_sparsity = _jacobian_structure(model) - hessian_sparsity = Tuple{Int,Int}[] - if has_hessian - _append_hessian_lagrangian_structure(hessian_sparsity, model) - end - function eval_f_cb(x) - # TODO(odow): FEASIBILITY_SENSE could produce confusing solver output if - # a nonzero objective is set. - if model.sense == MOI.FEASIBILITY_SENSE - return 0.0 - end - return _eval_objective(model, x) - end - function eval_grad_f_cb(x, grad_f) - if model.sense == MOI.FEASIBILITY_SENSE - grad_f .= zero(eltype(grad_f)) - else - _eval_objective_gradient(model, grad_f, x) - end - return + jacobian_sparsity = MOI.jacobian_structure(model) + hessian_sparsity = if has_hessian + MOI.hessian_lagrangian_structure(model) + else + Tuple{Int,Int}[] end - eval_g_cb(x, g) = _eval_constraint(model, g, x) + + eval_f_cb(x) = MOI.eval_objective(model, x) + eval_grad_f_cb(x, grad_f) = MOI.eval_objective_gradient(model, grad_f, x) + eval_g_cb(x, g) = MOI.eval_constraint(model, g, x) function eval_jac_g_cb(x, rows, cols, values) if values === nothing for i in 1:length(jacobian_sparsity) rows[i], cols[i] = jacobian_sparsity[i] end else - _eval_constraint_jacobian(model, values, x) + MOI.eval_constraint_jacobian(model, values, x) end return end @@ -1140,18 +593,11 @@ function MOI.optimize!(model::Optimizer) rows[i], cols[i] = hessian_sparsity[i] end else - _eval_hessian_lagrangian(model, values, x, obj_factor, lambda) + MOI.eval_hessian_lagrangian(model, values, x, obj_factor, lambda) end return end - g_L, g_U = Float64[], Float64[] - for key in _CONSTRAINT_ORDERING - for info in getfield(model, key) - l, u = _bounds(info.set) - push!(g_L, l) - push!(g_U, u) - end - end + g_L, g_U = copy(model.linear.g_L), copy(model.linear.g_U) for bound in model.nlp_data.constraint_bounds push!(g_L, bound.lower) push!(g_U, bound.upper) @@ -1222,43 +668,34 @@ function MOI.optimize!(model::Optimizer) end # Initialize the starting point, projecting variables from 0 onto their # bounds if VariablePrimalStart is not provided. - for (i, v) in enumerate(model.variable_primal_start) - if v !== nothing - model.inner.x[i] = v + for i in 1:length(model.variable_primal_start) + model.inner.x[i] = if model.variable_primal_start[i] !== nothing + model.variable_primal_start[i] else - model.inner.x[i] = max(0.0, model.variables.lower[i]) - model.inner.x[i] = min(model.inner.x[i], model.variables.upper[i]) + clamp(0.0, model.variables.lower[i], model.variables.upper[i]) end end - # Initialize the dual start to 0.0 if NLPBlockDualStart is not provided. if model.nlp_dual_start === nothing + # Initialize the dual start to 0.0 if NLPBlockDualStart is not provided. model.nlp_dual_start = zeros(Float64, num_nlp_constraints) end - # ConstraintDualStart - row = 1 - for key in _CONSTRAINT_ORDERING - for info in getfield(model, key) - model.inner.mult_g[row] = _dual_start(model, info.dual_start, -1) - row += 1 - end + row = 0 + for start in model.linear.mult_g + row += 1 + model.inner.mult_g[row] = _dual_start(model, start, -1) end - for dual_start in model.nlp_dual_start - model.inner.mult_g[row] = _dual_start(model, dual_start, -1) + for start in model.nlp_dual_start row += 1 + model.inner.mult_g[row] = _dual_start(model, start, -1) end - # ConstraintDualStart for variable bounds for i in 1:length(model.inner.n) - model.inner.mult_x_L[i] = - _dual_start(model, model.variable_lower_start[i]) - model.inner.mult_x_U[i] = - _dual_start(model, model.variable_upper_start[i], -1) + model.inner.mult_x_L[i] = _dual_start(model, model.mult_x_L[i]) + model.inner.mult_x_U[i] = _dual_start(model, model.mult_x_U[i], -1) end - # Initialize CallbackFunction. if model.callback !== nothing SetIntermediateCallback(model.inner, model.callback) end IpoptSolve(model.inner) - # Store SolveTimeSec. model.solve_time = time() - start_time return end @@ -1425,30 +862,17 @@ end function MOI.get( model::Optimizer, attr::MOI.ConstraintPrimal, - ci::MOI.ConstraintIndex{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} + ci::MOI.ConstraintIndex{<:_FUNCTIONS,<:_SETS}, +) MOI.check_result_index_bounds(model, attr) MOI.throw_if_not_valid(model, ci) - return model.inner.g[_offset(model, F, S)+ci.value] + return model.inner.g[ci.value] end function MOI.get( model::Optimizer, attr::MOI.ConstraintPrimal, - ci::MOI.ConstraintIndex{ - MOI.VariableIndex, - <:Union{ - MOI.LessThan{Float64}, - MOI.GreaterThan{Float64}, - MOI.EqualTo{Float64}, - }, - }, + ci::MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}, ) MOI.check_result_index_bounds(model, attr) MOI.throw_if_not_valid(model, ci) @@ -1462,18 +886,12 @@ _dual_multiplier(model::Optimizer) = model.sense == MOI.MIN_SENSE ? 1.0 : -1.0 function MOI.get( model::Optimizer, attr::MOI.ConstraintDual, - ci::MOI.ConstraintIndex{F,S}, -) where { - F<:Union{ - MOI.ScalarAffineFunction{Float64}, - MOI.ScalarQuadraticFunction{Float64}, - }, - S, -} + ci::MOI.ConstraintIndex{<:_FUNCTIONS,<:_SETS}, +) MOI.check_result_index_bounds(model, attr) MOI.throw_if_not_valid(model, ci) s = -_dual_multiplier(model) - return s * model.inner.mult_g[_offset(model, F, S)+ci.value] + return s * model.inner.mult_g[ci.value] end function MOI.get( @@ -1514,7 +932,7 @@ end function MOI.get(model::Optimizer, attr::MOI.NLPBlockDual) MOI.check_result_index_bounds(model, attr) s = -_dual_multiplier(model) - return s .* model.inner.mult_g[(1+_nlp_constraint_offset(model)):end] + return s .* model.inner.mult_g[(length(model.linear)+1):end] end ### Ipopt.CallbackFunction diff --git a/src/utils.jl b/src/utils.jl new file mode 100644 index 0000000..d418db7 --- /dev/null +++ b/src/utils.jl @@ -0,0 +1,496 @@ +# Copyright (c) 2013: Iain Dunning, Miles Lubin, and contributors +# +# Use of this source code is governed by an MIT-style license that can be found +# in the LICENSE.md file or at https://opensource.org/licenses/MIT. + +@enum( + FunctionType, + kFunctionTypeVariableIndex, + kFunctionTypeScalarAffine, + kFunctionTypeScalarQuadratic, +) + +@enum(BoundType, kBoundTypeLessThan, kBoundTypeGreaterThan, kBoundTypeEqualTo,) + +mutable struct LinearQuadraticConstraintBlock{T} + objective_type::FunctionType + objective_constant::T + objective_linear_columns::Vector{Int} + objective_linear_coefficients::Vector{T} + objective_hessian_structure::Vector{Tuple{Int,Int}} + objective_hessian_coefficients::Vector{T} + + linear_row_ends::Vector{Int} + linear_jacobian_structure::Vector{Tuple{Int,Int}} + linear_coefficients::Vector{T} + + quadratic_row_ends::Vector{Int} + hessian_structure::Vector{Tuple{Int,Int}} + quadratic_coefficients::Vector{T} + + g_L::Vector{T} + g_U::Vector{T} + mult_g::Vector{Union{Nothing,T}} + function_type::Vector{FunctionType} + bound_type::Vector{BoundType} + + function LinearQuadraticConstraintBlock{T}() where {T} + return new( + # Objective coefficients + kFunctionTypeScalarAffine, + zero(T), + Int[], + T[], + Tuple{Int,Int}[], + T[], + # Linear constraints + Int[], + Tuple{Int,Int}[], + T[], + # Affine constraints + Int[], + Tuple{Int,Int}[], + T[], + # Bounds + T[], + T[], + T[], + FunctionType[], + BoundType[], + ) + end +end + +Base.length(block::LinearQuadraticConstraintBlock) = length(block.bound_type) + +function _set_objective( + block::LinearQuadraticConstraintBlock{T}, + f::MOI.VariableIndex, +) where {T} + push!(block.objective_linear_columns, f.value) + push!(block.objective_linear_coefficients, one(T)) + return zero(T) +end + +function _set_objective( + block::LinearQuadraticConstraintBlock{T}, + f::MOI.ScalarAffineFunction{T}, +) where {T} + _set_objective(block, f.terms) + return f.constant +end + +function _set_objective( + block::LinearQuadraticConstraintBlock{T}, + f::MOI.ScalarQuadraticFunction{T}, +) where {T} + _set_objective(block, f.affine_terms) + for term in f.quadratic_terms + i, j = term.variable_1.value, term.variable_2.value + push!(block.objective_hessian_structure, (i, j)) + push!(block.objective_hessian_coefficients, term.coefficient) + end + return f.constant +end + +function _set_objective( + block::LinearQuadraticConstraintBlock{T}, + terms::Vector{MOI.ScalarAffineTerm{T}}, +) where {T} + for term in terms + push!(block.objective_linear_columns, term.variable.value) + push!(block.objective_linear_coefficients, term.coefficient) + end + return +end + +function MOI.set( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ObjectiveFunction{F}, + func::F, +) where { + T, + F<:Union{ + MOI.VariableIndex, + MOI.ScalarAffineFunction{T}, + MOI.ScalarQuadraticFunction{T}, + }, +} + empty!(block.objective_hessian_structure) + empty!(block.objective_hessian_coefficients) + empty!(block.objective_linear_columns) + empty!(block.objective_linear_coefficients) + block.objective_constant = _set_objective(block, func) + block.objective_type = _function_info(func) + return +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ObjectiveFunctionType, +) where {T} + return _function_type_to_set(T, block.objective_type) +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ObjectiveFunction{F}, +) where {T,F} + affine_terms = MOI.ScalarAffineTerm{T}[ + MOI.ScalarAffineTerm( + block.objective_linear_coefficients[i], + MOI.VariableIndex(x), + ) for (i, x) in enumerate(block.objective_linear_columns) + ] + quadratic_terms = MOI.ScalarQuadraticTerm{T}[] + for (i, coef) in enumerate(block.objective_hessian_coefficients) + r, c = block.objective_hessian_structure[i] + push!( + quadratic_terms, + MOI.ScalarQuadraticTerm( + coef, + MOI.VariableIndex(r), + MOI.VariableIndex(c), + ), + ) + end + obj = MOI.ScalarQuadraticFunction( + quadratic_terms, + affine_terms, + block.objective_constant, + ) + return convert(F, obj) +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ListOfConstraintTypesPresent, +) where {T} + constraints = Set{Tuple{Type,Type}}() + for i in 1:length(block) + F = _function_type_to_set(T, block.function_type[i]) + S = _bound_type_to_set(T, block.bound_type[i]) + push!(constraints, (F, S)) + end + return collect(constraints) +end + +function MOI.is_valid( + block::LinearQuadraticConstraintBlock{T}, + ci::MOI.ConstraintIndex{F,S}, +) where { + T, + F<:Union{MOI.ScalarAffineFunction{T},MOI.ScalarQuadraticFunction{T}}, + S<:Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, +} + return 1 <= ci.value <= length(block) +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ListOfConstraintIndices{F,S}, +) where { + T, + F<:Union{MOI.ScalarAffineFunction{T},MOI.ScalarQuadraticFunction{T}}, + S<:Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, +} + ret = MOI.ConstraintIndex{F,S}[] + for i in 1:length(block) + if _bound_type_to_set(T, block.bound_type[i]) != S + continue + elseif _function_type_to_set(T, block.function_type[i]) != F + continue + end + push!(ret, MOI.ConstraintIndex{F,S}(i)) + end + return ret +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.NumberOfConstraints{F,S}, +) where { + T, + F<:Union{MOI.ScalarAffineFunction{T},MOI.ScalarQuadraticFunction{T}}, + S<:Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, +} + return length(MOI.get(block, MOI.ListOfConstraintIndices{F,S}())) +end + +function _bound_type_to_set(::Type{T}, k::BoundType) where {T} + if k == kBoundTypeEqualTo + return MOI.EqualTo{T} + elseif k == kBoundTypeLessThan + return MOI.LessThan{T} + else + @assert k == kBoundTypeGreaterThan + return MOI.GreaterThan{T} + end +end + +function _function_type_to_set(::Type{T}, k::FunctionType) where {T} + if k == kFunctionTypeVariableIndex + return MOI.VariableIndex + elseif k == kFunctionTypeScalarAffine + return MOI.ScalarAffineFunction{T} + else + @assert k == kFunctionTypeScalarQuadratic + return MOI.ScalarQuadraticFunction{T} + end +end + +_function_info(::MOI.VariableIndex) = kFunctionTypeVariableIndex +_function_info(::MOI.ScalarAffineFunction) = kFunctionTypeScalarAffine +_function_info(::MOI.ScalarQuadraticFunction) = kFunctionTypeScalarQuadratic + +_set_info(s::MOI.LessThan) = kBoundTypeLessThan, -Inf, s.upper +_set_info(s::MOI.GreaterThan) = kBoundTypeGreaterThan, s.lower, Inf +_set_info(s::MOI.EqualTo) = kBoundTypeEqualTo, s.value, s.value +_set_info(s::MOI.Interval) = kBoundTypeInterval, s.lower, s.upper + +function _add_function( + block::LinearQuadraticConstraintBlock{T}, + f::MOI.ScalarAffineFunction{T}, +) where {T} + _add_function(block, f.terms) + push!(block.quadratic_row_ends, length(block.quadratic_coefficients)) + return kFunctionTypeScalarAffine, f.constant +end + +function _add_function( + block::LinearQuadraticConstraintBlock{T}, + f::MOI.ScalarQuadraticFunction{T}, +) where {T} + _add_function(block, f.affine_terms) + for term in f.quadratic_terms + i, j = term.variable_1.value, term.variable_2.value + push!(block.hessian_structure, (i, j)) + push!(block.quadratic_coefficients, term.coefficient) + end + push!(block.quadratic_row_ends, length(block.quadratic_coefficients)) + return kFunctionTypeScalarQuadratic, f.constant +end + +function _add_function( + block::LinearQuadraticConstraintBlock{T}, + terms::Vector{MOI.ScalarAffineTerm{T}}, +) where {T} + row = length(block) + 1 + for term in terms + push!(block.linear_jacobian_structure, (row, term.variable.value)) + push!(block.linear_coefficients, term.coefficient) + end + push!(block.linear_row_ends, length(block.linear_jacobian_structure)) + return +end + +function MOI.add_constraint( + block::LinearQuadraticConstraintBlock{T}, + f::Union{MOI.ScalarAffineFunction{T},MOI.ScalarQuadraticFunction{T}}, + set::Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, +) where {T} + function_type, constant = _add_function(block, f) + bound_type, l, u = _set_info(set) + push!(block.g_L, l - constant) + push!(block.g_U, u - constant) + push!(block.mult_g, nothing) + push!(block.bound_type, bound_type) + push!(block.function_type, function_type) + return MOI.ConstraintIndex{typeof(f),typeof(set)}(length(block.bound_type)) +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ConstraintFunction, + c::MOI.ConstraintIndex{F,S}, +) where {T,F,S} + offset = row == 1 ? 1 : block.linear_row_ends[row-1] + affine_terms = MOI.ScalarAffineTerm{T}[ + MOI.ScalarAffineTerm( + block.linear_coefficients[i], + MOI.VariableIndex(block.linear_jacobian_structure[i][2]), + ) for i in offset:block.linear_row_ends[row] + ] + quadratic_terms = MOI.ScalarQuadraticTerm{T}[] + offset = row == 1 ? 1 : block.linear_row_ends[row-1] + for i in offset:block.quadratic_row_ends[row] + r, c = block.hessian_structure[i] + push!( + quadratic_terms, + MOI.ScalarQuadraticTerm( + block.quadratic_coefficients[i], + MOI.VariableIndex(r), + MOI.VariableIndex(c), + ), + ) + end + if length(quadratic_terms) == 0 + return MOI.ScalarAffineFunction(affine_terms, zero(T)) + end + return MOI.ScalarQuadraticFunction(quadratic_terms, affine_terms, zero(T)) +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ConstraintSet, + c::MOI.ConstraintIndex{F,S}, +) where {T,F,S} + row = c.value + if block.bound_type[row] == kBoundTypeEqualTo + return MOI.EqualTo(block.g_L[row]) + elseif block.bound_type[row] == kBoundTypeLessThan + return MOI.LessThan(block.g_U[row]) + else + @assert block.bound_type[row] == kBoundTypeGreaterThan + return MOI.GreaterThan(block.g_L[row]) + end +end + +function MOI.get( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ConstraintDualStart, + c::MOI.ConstraintIndex{F,S}, +) where {T,F,S} + return block.mult_g[c.value] +end + +function MOI.set( + block::LinearQuadraticConstraintBlock{T}, + ::MOI.ConstraintDualStart, + c::MOI.ConstraintIndex{F,S}, + value, +) where {T,F,S} + block.mult_g[c.value] = value + return +end + +function MOI.eval_objective( + block::LinearQuadraticConstraintBlock{T}, + x::AbstractVector{T}, +) where {T} + y = block.objective_constant + for (i, c) in enumerate(block.objective_linear_columns) + y += block.objective_linear_coefficients[i] * x[c] + end + for (i, (r, c)) in enumerate(block.objective_hessian_structure) + if r == c + y += block.objective_hessian_coefficients[i] * x[r] * x[c] / 2 + else + y += block.objective_hessian_coefficients[i] * x[r] * x[c] + end + end + return y +end + +function MOI.eval_objective_gradient( + block::LinearQuadraticConstraintBlock{T}, + g::AbstractVector{T}, + x::AbstractVector{T}, +) where {T} + g .= zero(T) + for (i, c) in enumerate(block.objective_linear_columns) + g[c] += block.objective_linear_coefficients[i] + end + for (i, (r, c)) in enumerate(block.objective_hessian_structure) + g[r] += block.objective_hessian_coefficients[i] * x[c] + if r != c + g[c] += block.objective_hessian_coefficients[i] * x[r] + end + end + return +end + +function MOI.eval_constraint( + block::LinearQuadraticConstraintBlock{T}, + g::AbstractVector{T}, + x::AbstractVector{T}, +) where {T} + for i in 1:length(g) + g[i] = zero(T) + end + for (i, (r, c)) in enumerate(block.linear_jacobian_structure) + g[r] += block.linear_coefficients[i] * x[c] + end + i = 0 + for row in 1:length(block.quadratic_row_ends) + while i < block.quadratic_row_ends[row] + i += 1 + r, c = block.hessian_structure[i] + if r == c + g[row] += block.quadratic_coefficients[i] * x[r] * x[c] / 2 + else + g[row] += block.quadratic_coefficients[i] * x[r] * x[c] + end + end + end + return +end + +function MOI.jacobian_structure(block::LinearQuadraticConstraintBlock) + J = copy(block.linear_jacobian_structure) + i = 0 + for row in 1:length(block.quadratic_row_ends) + while i < block.quadratic_row_ends[row] + i += 1 + r, c = block.hessian_structure[i] + push!(J, (row, r)) + if r != c + push!(J, (row, c)) + end + end + end + return J +end + +function MOI.eval_constraint_jacobian( + block::LinearQuadraticConstraintBlock{T}, + J::AbstractVector{T}, + x::AbstractVector{T}, +) where {T} + nterms = 0 + for coef in block.linear_coefficients + nterms += 1 + J[nterms] = coef + end + i = 0 + for row in 1:length(block.quadratic_row_ends) + while i < block.quadratic_row_ends[row] + i += 1 + r, c = block.hessian_structure[i] + nterms += 1 + J[nterms] = block.quadratic_coefficients[i] * x[c] + if r != c + nterms += 1 + J[nterms] = block.quadratic_coefficients[i] * x[r] + end + end + end + return nterms +end + +function MOI.hessian_lagrangian_structure(block::LinearQuadraticConstraintBlock) + return vcat(block.objective_hessian_structure, block.hessian_structure) +end + +function MOI.eval_hessian_lagrangian( + block::LinearQuadraticConstraintBlock{T}, + H::AbstractVector{T}, + ::AbstractVector{T}, + σ::T, + μ::AbstractVector{T}, +) where {T} + nterms = 0 + for c in block.objective_hessian_coefficients + nterms += 1 + H[nterms] = σ * c + end + i = 0 + for row in 1:length(block.quadratic_row_ends) + while i < block.quadratic_row_ends[row] + i += 1 + nterms += 1 + H[nterms] = μ[row] * block.quadratic_coefficients[i] + end + end + return nterms +end From eacc8ba943640dcc1825e924173a4b85e21dc9d9 Mon Sep 17 00:00:00 2001 From: odow Date: Fri, 15 Jul 2022 16:36:06 +1200 Subject: [PATCH 3/7] Add needs_initialize --- src/MOI_wrapper.jl | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 6865243..e987f81 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -194,6 +194,7 @@ function MOI.add_variable(model::Optimizer) push!(model.variable_primal_start, nothing) push!(model.mult_x_L, nothing) push!(model.mult_x_U, nothing) + model.nlp_data_needs_initialize = true return MOI.add_variable(model.variables) end @@ -234,6 +235,7 @@ function MOI.get( end function MOI.add_constraint(model::Optimizer, x::MOI.VariableIndex, set::_SETS) + model.nlp_data_needs_initialize = true return MOI.add_constraint(model.variables, x, set) end @@ -243,6 +245,7 @@ function MOI.set( ci::MOI.ConstraintIndex{MOI.VariableIndex,S}, set::S, ) where {S<:_SETS} + model.nlp_data_needs_initialize = true MOI.set(model.variables, MOI.ConstraintSet(), ci, set) return end @@ -251,6 +254,7 @@ function MOI.delete( model::Optimizer, ci::MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}, ) + model.nlp_data_needs_initialize = true MOI.delete(model.variables, ci) return end @@ -265,6 +269,7 @@ function MOI.is_valid( end function MOI.add_constraint(model::Optimizer, func::_FUNCTIONS, set::_SETS) + model.nlp_data_needs_initialize = true return MOI.add_constraint(model.linear, func, set) end @@ -449,6 +454,7 @@ function MOI.set( sense::MOI.OptimizationSense, ) model.sense = sense + model.nlp_data_needs_initialize = true return end @@ -476,6 +482,7 @@ function MOI.set( func::F, ) where {F<:Union{MOI.VariableIndex,<:_FUNCTIONS}} MOI.set(model.linear, attr, func) + model.nlp_data_needs_initialize = true return end @@ -552,7 +559,7 @@ end ### MOI.optimize! -function MOI.optimize!(model::Optimizer) +function _setup_model(model::Optimizer) num_quadratic_constraints = length(model.linear.hessian_structure) > 0 num_nlp_constraints = length(model.nlp_data.constraint_bounds) has_hessian = :Hess in MOI.features_available(model.nlp_data.evaluator) @@ -563,17 +570,13 @@ function MOI.optimize!(model::Optimizer) if num_nlp_constraints > 0 push!(init_feat, :Jac) end - if model.nlp_data_needs_initialize - MOI.initialize(model.nlp_data.evaluator, init_feat) - model.nlp_data_needs_initialize = false - end + MOI.initialize(model.nlp_data.evaluator, init_feat) jacobian_sparsity = MOI.jacobian_structure(model) hessian_sparsity = if has_hessian MOI.hessian_lagrangian_structure(model) else Tuple{Int,Int}[] end - eval_f_cb(x) = MOI.eval_objective(model, x) eval_grad_f_cb(x, grad_f) = MOI.eval_objective_gradient(model, grad_f, x) eval_g_cb(x, g) = MOI.eval_constraint(model, g, x) @@ -602,7 +605,6 @@ function MOI.optimize!(model::Optimizer) push!(g_L, bound.lower) push!(g_U, bound.upper) end - start_time = time() if length(model.variables.lower) == 0 # Don't attempt to create a problem because Ipopt will error. model.invalid_model = true @@ -652,6 +654,18 @@ function MOI.optimize!(model::Optimizer) AddIpoptStrOption(model.inner, "hessian_constant", "yes") end end + model.nlp_data_needs_initialize = false + return +end + +function MOI.optimize!(model::Optimizer) + start_time = time() + if model.nlp_data_needs_initialize + _setup_model(model) + end + if model.invalid_model + return + end if model.silent AddIpoptIntOption(model.inner, "print_level", 0) end @@ -677,7 +691,8 @@ function MOI.optimize!(model::Optimizer) end if model.nlp_dual_start === nothing # Initialize the dual start to 0.0 if NLPBlockDualStart is not provided. - model.nlp_dual_start = zeros(Float64, num_nlp_constraints) + model.nlp_dual_start = + zeros(Float64, length(model.nlp_data.constraint_bounds)) end row = 0 for start in model.linear.mult_g From 1ece8720fab47e960fe5fce2eac799e7629e4a72 Mon Sep 17 00:00:00 2001 From: odow Date: Mon, 18 Jul 2022 11:04:50 +1200 Subject: [PATCH 4/7] Remove needs_initialize --- src/MOI_wrapper.jl | 66 +++++++++------------- src/utils.jl | 136 ++++++++++++++++++++++----------------------- 2 files changed, 92 insertions(+), 110 deletions(-) diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index e987f81..6588719 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -25,10 +25,9 @@ mutable struct Optimizer <: MOI.AbstractOptimizer mult_x_U::Vector{Union{Nothing,Float64}} nlp_data::MOI.NLPBlockData - nlp_data_needs_initialize::Bool nlp_dual_start::Union{Nothing,Vector{Float64}} - linear::LinearQuadraticConstraintBlock{Float64} + qp_data::QPBlockData{Float64} callback::Union{Nothing,Function} @@ -46,9 +45,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer Union{Nothing,Float64}[], Union{Nothing,Float64}[], MOI.NLPBlockData([], _EmptyNLPEvaluator(), false), - true, nothing, - LinearQuadraticConstraintBlock{Float64}(), + QPBlockData{Float64}(), nothing, ) end @@ -85,9 +83,8 @@ function MOI.empty!(model::Optimizer) empty!(model.mult_x_L) empty!(model.mult_x_U) model.nlp_data = MOI.NLPBlockData([], _EmptyNLPEvaluator(), false) - model.nlp_data_needs_initialize = true model.nlp_dual_start = nothing - model.linear = LinearQuadraticConstraintBlock{Float64}() + model.qp_data = QPBlockData{Float64}() model.callback = nothing return end @@ -121,7 +118,7 @@ end function MOI.get(model::Optimizer, attr::MOI.ListOfConstraintTypesPresent) ret = MOI.get(model.variables, attr) - append!(MOI.get(model.linear, attr)) + append!(MOI.get(model.qp_data, attr)) return ret end @@ -194,7 +191,6 @@ function MOI.add_variable(model::Optimizer) push!(model.variable_primal_start, nothing) push!(model.mult_x_L, nothing) push!(model.mult_x_U, nothing) - model.nlp_data_needs_initialize = true return MOI.add_variable(model.variables) end @@ -235,7 +231,6 @@ function MOI.get( end function MOI.add_constraint(model::Optimizer, x::MOI.VariableIndex, set::_SETS) - model.nlp_data_needs_initialize = true return MOI.add_constraint(model.variables, x, set) end @@ -245,7 +240,6 @@ function MOI.set( ci::MOI.ConstraintIndex{MOI.VariableIndex,S}, set::S, ) where {S<:_SETS} - model.nlp_data_needs_initialize = true MOI.set(model.variables, MOI.ConstraintSet(), ci, set) return end @@ -254,7 +248,6 @@ function MOI.delete( model::Optimizer, ci::MOI.ConstraintIndex{MOI.VariableIndex,<:_SETS}, ) - model.nlp_data_needs_initialize = true MOI.delete(model.variables, ci) return end @@ -265,19 +258,18 @@ function MOI.is_valid( model::Optimizer, ci::MOI.ConstraintIndex{<:_FUNCTIONS,<:_SETS}, ) - return MOI.is_valid(model.linear, ci) + return MOI.is_valid(model.qp_data, ci) end function MOI.add_constraint(model::Optimizer, func::_FUNCTIONS, set::_SETS) - model.nlp_data_needs_initialize = true - return MOI.add_constraint(model.linear, func, set) + return MOI.add_constraint(model.qp_data, func, set) end function MOI.get( model::Optimizer, attr::Union{MOI.NumberOfConstraints{F,S},MOI.ListOfConstraintIndices{F,S}}, ) where {F<:_FUNCTIONS,S<:_SETS} - return MOI.get(model.linear, attr) + return MOI.get(model.qp_data, attr) end function MOI.get( @@ -289,7 +281,7 @@ function MOI.get( }, c::MOI.ConstraintIndex{F,S}, ) where {F<:_FUNCTIONS,S<:_SETS} - return MOI.get(model.linear, attr, c) + return MOI.get(model.qp_data, attr, c) end function MOI.supports( @@ -307,7 +299,7 @@ function MOI.set( value::Union{Real,Nothing}, ) where {F<:_FUNCTIONS,S<:_SETS} MOI.throw_if_not_valid(model, ci) - MOI.set(model.linear, attr, ci, value) + MOI.set(model.qp_data, attr, ci, value) return end @@ -440,7 +432,6 @@ MOI.supports(::Optimizer, ::MOI.NLPBlock) = true function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData) model.nlp_data = nlp_data - model.nlp_data_needs_initialize = true return end @@ -454,7 +445,6 @@ function MOI.set( sense::MOI.OptimizationSense, ) model.sense = sense - model.nlp_data_needs_initialize = true return end @@ -466,7 +456,7 @@ function MOI.get( model::Optimizer, attr::Union{MOI.ObjectiveFunctionType,MOI.ObjectiveFunction}, ) - return MOI.get(model.linear, attr) + return MOI.get(model.qp_data, attr) end function MOI.supports( @@ -481,8 +471,7 @@ function MOI.set( attr::MOI.ObjectiveFunction{F}, func::F, ) where {F<:Union{MOI.VariableIndex,<:_FUNCTIONS}} - MOI.set(model.linear, attr, func) - model.nlp_data_needs_initialize = true + MOI.set(model.qp_data, attr, func) return end @@ -496,7 +485,7 @@ function MOI.eval_objective(model::Optimizer, x) elseif model.nlp_data.has_objective return MOI.eval_objective(model.nlp_data.evaluator, x) end - return MOI.eval_objective(model.linear, x) + return MOI.eval_objective(model.qp_data, x) end ### Eval_Grad_F_CB @@ -507,7 +496,7 @@ function MOI.eval_objective_gradient(model::Optimizer, grad, x) elseif model.nlp_data.has_objective MOI.eval_objective_gradient(model.nlp_data.evaluator, grad, x) else - MOI.eval_objective_gradient(model.linear, grad, x) + MOI.eval_objective_gradient(model.qp_data, grad, x) end return end @@ -515,8 +504,8 @@ end ### Eval_G_CB function MOI.eval_constraint(model::Optimizer, g, x) - MOI.eval_constraint(model.linear, g, x) - g_nlp = view(g, (length(model.linear)+1):length(g)) + MOI.eval_constraint(model.qp_data, g, x) + g_nlp = view(g, (length(model.qp_data)+1):length(g)) MOI.eval_constraint(model.nlp_data.evaluator, g_nlp, x) return end @@ -524,8 +513,8 @@ end ### Eval_Jac_G_CB function MOI.jacobian_structure(model::Optimizer) - J = MOI.jacobian_structure(model.linear) - offset = length(model.linear) + J = MOI.jacobian_structure(model.qp_data) + offset = length(model.qp_data) if length(model.nlp_data.constraint_bounds) > 0 for (row, col) in MOI.jacobian_structure(model.nlp_data.evaluator) push!(J, (row + offset, col)) @@ -535,7 +524,7 @@ function MOI.jacobian_structure(model::Optimizer) end function MOI.eval_constraint_jacobian(model::Optimizer, values, x) - offset = MOI.eval_constraint_jacobian(model.linear, values, x) + offset = MOI.eval_constraint_jacobian(model.qp_data, values, x) nlp_values = view(values, (offset+1):length(values)) MOI.eval_constraint_jacobian(model.nlp_data.evaluator, nlp_values, x) return @@ -544,15 +533,15 @@ end ### Eval_H_CB function MOI.hessian_lagrangian_structure(model::Optimizer) - H = MOI.hessian_lagrangian_structure(model.linear) + H = MOI.hessian_lagrangian_structure(model.qp_data) append!(H, MOI.hessian_lagrangian_structure(model.nlp_data.evaluator)) return H end function MOI.eval_hessian_lagrangian(model::Optimizer, H, x, σ, μ) - offset = MOI.eval_hessian_lagrangian(model.linear, H, x, σ, μ) + offset = MOI.eval_hessian_lagrangian(model.qp_data, H, x, σ, μ) H_nlp = view(H, (offset+1):length(H)) - μ_nlp = view(μ, (length(model.linear)+1):length(μ)) + μ_nlp = view(μ, (length(model.qp_data)+1):length(μ)) MOI.eval_hessian_lagrangian(model.nlp_data.evaluator, H_nlp, x, σ, μ_nlp) return end @@ -560,7 +549,7 @@ end ### MOI.optimize! function _setup_model(model::Optimizer) - num_quadratic_constraints = length(model.linear.hessian_structure) > 0 + num_quadratic_constraints = length(model.qp_data.hessian_structure) > 0 num_nlp_constraints = length(model.nlp_data.constraint_bounds) has_hessian = :Hess in MOI.features_available(model.nlp_data.evaluator) init_feat = [:Grad] @@ -600,7 +589,7 @@ function _setup_model(model::Optimizer) end return end - g_L, g_U = copy(model.linear.g_L), copy(model.linear.g_U) + g_L, g_U = copy(model.qp_data.g_L), copy(model.qp_data.g_U) for bound in model.nlp_data.constraint_bounds push!(g_L, bound.lower) push!(g_U, bound.upper) @@ -654,15 +643,12 @@ function _setup_model(model::Optimizer) AddIpoptStrOption(model.inner, "hessian_constant", "yes") end end - model.nlp_data_needs_initialize = false return end function MOI.optimize!(model::Optimizer) start_time = time() - if model.nlp_data_needs_initialize - _setup_model(model) - end + _setup_model(model) if model.invalid_model return end @@ -695,7 +681,7 @@ function MOI.optimize!(model::Optimizer) zeros(Float64, length(model.nlp_data.constraint_bounds)) end row = 0 - for start in model.linear.mult_g + for start in model.qp_data.mult_g row += 1 model.inner.mult_g[row] = _dual_start(model, start, -1) end @@ -947,7 +933,7 @@ end function MOI.get(model::Optimizer, attr::MOI.NLPBlockDual) MOI.check_result_index_bounds(model, attr) s = -_dual_multiplier(model) - return s .* model.inner.mult_g[(length(model.linear)+1):end] + return s .* model.inner.mult_g[(length(model.qp_data)+1):end] end ### Ipopt.CallbackFunction diff --git a/src/utils.jl b/src/utils.jl index d418db7..b5488ec 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -4,16 +4,21 @@ # in the LICENSE.md file or at https://opensource.org/licenses/MIT. @enum( - FunctionType, - kFunctionTypeVariableIndex, - kFunctionTypeScalarAffine, - kFunctionTypeScalarQuadratic, + _FunctionType, + _kFunctionTypeVariableIndex, + _kFunctionTypeScalarAffine, + _kFunctionTypeScalarQuadratic, ) -@enum(BoundType, kBoundTypeLessThan, kBoundTypeGreaterThan, kBoundTypeEqualTo,) +@enum( + _BoundType, + _kBoundTypeLessThan, + _kBoundTypeGreaterThan, + _kBoundTypeEqualTo, +) -mutable struct LinearQuadraticConstraintBlock{T} - objective_type::FunctionType +mutable struct QPBlockData{T} + objective_type::_FunctionType objective_constant::T objective_linear_columns::Vector{Int} objective_linear_coefficients::Vector{T} @@ -31,13 +36,13 @@ mutable struct LinearQuadraticConstraintBlock{T} g_L::Vector{T} g_U::Vector{T} mult_g::Vector{Union{Nothing,T}} - function_type::Vector{FunctionType} - bound_type::Vector{BoundType} + function_type::Vector{_FunctionType} + bound_type::Vector{_BoundType} - function LinearQuadraticConstraintBlock{T}() where {T} + function QPBlockData{T}() where {T} return new( # Objective coefficients - kFunctionTypeScalarAffine, + _kFunctionTypeScalarAffine, zero(T), Int[], T[], @@ -54,26 +59,23 @@ mutable struct LinearQuadraticConstraintBlock{T} # Bounds T[], T[], - T[], - FunctionType[], - BoundType[], + Union{Nothing,T}[], + _FunctionType[], + _BoundType[], ) end end -Base.length(block::LinearQuadraticConstraintBlock) = length(block.bound_type) +Base.length(block::QPBlockData) = length(block.bound_type) -function _set_objective( - block::LinearQuadraticConstraintBlock{T}, - f::MOI.VariableIndex, -) where {T} +function _set_objective(block::QPBlockData{T}, f::MOI.VariableIndex) where {T} push!(block.objective_linear_columns, f.value) push!(block.objective_linear_coefficients, one(T)) return zero(T) end function _set_objective( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, f::MOI.ScalarAffineFunction{T}, ) where {T} _set_objective(block, f.terms) @@ -81,7 +83,7 @@ function _set_objective( end function _set_objective( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, f::MOI.ScalarQuadraticFunction{T}, ) where {T} _set_objective(block, f.affine_terms) @@ -94,7 +96,7 @@ function _set_objective( end function _set_objective( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, terms::Vector{MOI.ScalarAffineTerm{T}}, ) where {T} for term in terms @@ -105,7 +107,7 @@ function _set_objective( end function MOI.set( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ObjectiveFunction{F}, func::F, ) where { @@ -125,17 +127,11 @@ function MOI.set( return end -function MOI.get( - block::LinearQuadraticConstraintBlock{T}, - ::MOI.ObjectiveFunctionType, -) where {T} +function MOI.get(block::QPBlockData{T}, ::MOI.ObjectiveFunctionType) where {T} return _function_type_to_set(T, block.objective_type) end -function MOI.get( - block::LinearQuadraticConstraintBlock{T}, - ::MOI.ObjectiveFunction{F}, -) where {T,F} +function MOI.get(block::QPBlockData{T}, ::MOI.ObjectiveFunction{F}) where {T,F} affine_terms = MOI.ScalarAffineTerm{T}[ MOI.ScalarAffineTerm( block.objective_linear_coefficients[i], @@ -163,7 +159,7 @@ function MOI.get( end function MOI.get( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ListOfConstraintTypesPresent, ) where {T} constraints = Set{Tuple{Type,Type}}() @@ -176,7 +172,7 @@ function MOI.get( end function MOI.is_valid( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ci::MOI.ConstraintIndex{F,S}, ) where { T, @@ -187,7 +183,7 @@ function MOI.is_valid( end function MOI.get( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ListOfConstraintIndices{F,S}, ) where { T, @@ -207,7 +203,7 @@ function MOI.get( end function MOI.get( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.NumberOfConstraints{F,S}, ) where { T, @@ -217,48 +213,48 @@ function MOI.get( return length(MOI.get(block, MOI.ListOfConstraintIndices{F,S}())) end -function _bound_type_to_set(::Type{T}, k::BoundType) where {T} - if k == kBoundTypeEqualTo +function _bound_type_to_set(::Type{T}, k::_BoundType) where {T} + if k == _kBoundTypeEqualTo return MOI.EqualTo{T} - elseif k == kBoundTypeLessThan + elseif k == _kBoundTypeLessThan return MOI.LessThan{T} else - @assert k == kBoundTypeGreaterThan + @assert k == _kBoundTypeGreaterThan return MOI.GreaterThan{T} end end -function _function_type_to_set(::Type{T}, k::FunctionType) where {T} - if k == kFunctionTypeVariableIndex +function _function_type_to_set(::Type{T}, k::_FunctionType) where {T} + if k == _kFunctionTypeVariableIndex return MOI.VariableIndex - elseif k == kFunctionTypeScalarAffine + elseif k == _kFunctionTypeScalarAffine return MOI.ScalarAffineFunction{T} else - @assert k == kFunctionTypeScalarQuadratic + @assert k == _kFunctionTypeScalarQuadratic return MOI.ScalarQuadraticFunction{T} end end -_function_info(::MOI.VariableIndex) = kFunctionTypeVariableIndex -_function_info(::MOI.ScalarAffineFunction) = kFunctionTypeScalarAffine -_function_info(::MOI.ScalarQuadraticFunction) = kFunctionTypeScalarQuadratic +_function_info(::MOI.VariableIndex) = _kFunctionTypeVariableIndex +_function_info(::MOI.ScalarAffineFunction) = _kFunctionTypeScalarAffine +_function_info(::MOI.ScalarQuadraticFunction) = _kFunctionTypeScalarQuadratic -_set_info(s::MOI.LessThan) = kBoundTypeLessThan, -Inf, s.upper -_set_info(s::MOI.GreaterThan) = kBoundTypeGreaterThan, s.lower, Inf -_set_info(s::MOI.EqualTo) = kBoundTypeEqualTo, s.value, s.value -_set_info(s::MOI.Interval) = kBoundTypeInterval, s.lower, s.upper +_set_info(s::MOI.LessThan) = _kBoundTypeLessThan, -Inf, s.upper +_set_info(s::MOI.GreaterThan) = _kBoundTypeGreaterThan, s.lower, Inf +_set_info(s::MOI.EqualTo) = _kBoundTypeEqualTo, s.value, s.value +_set_info(s::MOI.Interval) = _kBoundTypeInterval, s.lower, s.upper function _add_function( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, f::MOI.ScalarAffineFunction{T}, ) where {T} _add_function(block, f.terms) push!(block.quadratic_row_ends, length(block.quadratic_coefficients)) - return kFunctionTypeScalarAffine, f.constant + return _kFunctionTypeScalarAffine, f.constant end function _add_function( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, f::MOI.ScalarQuadraticFunction{T}, ) where {T} _add_function(block, f.affine_terms) @@ -268,11 +264,11 @@ function _add_function( push!(block.quadratic_coefficients, term.coefficient) end push!(block.quadratic_row_ends, length(block.quadratic_coefficients)) - return kFunctionTypeScalarQuadratic, f.constant + return _kFunctionTypeScalarQuadratic, f.constant end function _add_function( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, terms::Vector{MOI.ScalarAffineTerm{T}}, ) where {T} row = length(block) + 1 @@ -285,7 +281,7 @@ function _add_function( end function MOI.add_constraint( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, f::Union{MOI.ScalarAffineFunction{T},MOI.ScalarQuadraticFunction{T}}, set::Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, ) where {T} @@ -300,7 +296,7 @@ function MOI.add_constraint( end function MOI.get( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ConstraintFunction, c::MOI.ConstraintIndex{F,S}, ) where {T,F,S} @@ -331,23 +327,23 @@ function MOI.get( end function MOI.get( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ConstraintSet, c::MOI.ConstraintIndex{F,S}, ) where {T,F,S} row = c.value - if block.bound_type[row] == kBoundTypeEqualTo + if block.bound_type[row] == _kBoundTypeEqualTo return MOI.EqualTo(block.g_L[row]) - elseif block.bound_type[row] == kBoundTypeLessThan + elseif block.bound_type[row] == _kBoundTypeLessThan return MOI.LessThan(block.g_U[row]) else - @assert block.bound_type[row] == kBoundTypeGreaterThan + @assert block.bound_type[row] == _kBoundTypeGreaterThan return MOI.GreaterThan(block.g_L[row]) end end function MOI.get( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ConstraintDualStart, c::MOI.ConstraintIndex{F,S}, ) where {T,F,S} @@ -355,7 +351,7 @@ function MOI.get( end function MOI.set( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, ::MOI.ConstraintDualStart, c::MOI.ConstraintIndex{F,S}, value, @@ -365,7 +361,7 @@ function MOI.set( end function MOI.eval_objective( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, x::AbstractVector{T}, ) where {T} y = block.objective_constant @@ -383,7 +379,7 @@ function MOI.eval_objective( end function MOI.eval_objective_gradient( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, g::AbstractVector{T}, x::AbstractVector{T}, ) where {T} @@ -401,7 +397,7 @@ function MOI.eval_objective_gradient( end function MOI.eval_constraint( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, g::AbstractVector{T}, x::AbstractVector{T}, ) where {T} @@ -426,7 +422,7 @@ function MOI.eval_constraint( return end -function MOI.jacobian_structure(block::LinearQuadraticConstraintBlock) +function MOI.jacobian_structure(block::QPBlockData) J = copy(block.linear_jacobian_structure) i = 0 for row in 1:length(block.quadratic_row_ends) @@ -443,7 +439,7 @@ function MOI.jacobian_structure(block::LinearQuadraticConstraintBlock) end function MOI.eval_constraint_jacobian( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, J::AbstractVector{T}, x::AbstractVector{T}, ) where {T} @@ -468,12 +464,12 @@ function MOI.eval_constraint_jacobian( return nterms end -function MOI.hessian_lagrangian_structure(block::LinearQuadraticConstraintBlock) +function MOI.hessian_lagrangian_structure(block::QPBlockData) return vcat(block.objective_hessian_structure, block.hessian_structure) end function MOI.eval_hessian_lagrangian( - block::LinearQuadraticConstraintBlock{T}, + block::QPBlockData{T}, H::AbstractVector{T}, ::AbstractVector{T}, σ::T, From 3117bcf0add49e16db0227bd1248c7502eb0ce17 Mon Sep 17 00:00:00 2001 From: odow Date: Mon, 18 Jul 2022 11:48:27 +1200 Subject: [PATCH 5/7] Improve stability of optimize! --- src/MOI_wrapper.jl | 42 ++++++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 6588719..9aef0e2 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -652,51 +652,49 @@ function MOI.optimize!(model::Optimizer) if model.invalid_model return end + inner = model.inner::IpoptProblem if model.silent - AddIpoptIntOption(model.inner, "print_level", 0) + AddIpoptIntOption(inner, "print_level", 0) end # Other misc options that over-ride the ones set above. for (name, value) in model.options if value isa String - AddIpoptStrOption(model.inner, name, value) + AddIpoptStrOption(inner, name, value) elseif value isa Integer - AddIpoptIntOption(model.inner, name, value) + AddIpoptIntOption(inner, name, value) else @assert value isa Float64 - AddIpoptNumOption(model.inner, name, value) + AddIpoptNumOption(inner, name, value) end end # Initialize the starting point, projecting variables from 0 onto their # bounds if VariablePrimalStart is not provided. for i in 1:length(model.variable_primal_start) - model.inner.x[i] = if model.variable_primal_start[i] !== nothing + inner.x[i] = if model.variable_primal_start[i] !== nothing model.variable_primal_start[i] else clamp(0.0, model.variables.lower[i], model.variables.upper[i]) end end - if model.nlp_dual_start === nothing - # Initialize the dual start to 0.0 if NLPBlockDualStart is not provided. - model.nlp_dual_start = - zeros(Float64, length(model.nlp_data.constraint_bounds)) - end - row = 0 - for start in model.qp_data.mult_g - row += 1 - model.inner.mult_g[row] = _dual_start(model, start, -1) + for (i, start) in enumerate(model.qp_data.mult_g) + inner.mult_g[i] = _dual_start(model, start, -1) end - for start in model.nlp_dual_start - row += 1 - model.inner.mult_g[row] = _dual_start(model, start, -1) + offset = length(model.qp_data.mult_g) + if model.nlp_dual_start === nothing + inner.mult_g[(offset+1):end] .= 0.0 + else + for (i, start) in enumerate(model.nlp_dual_start::Vector{Float64}) + inner.mult_g[offset+i] = _dual_start(model, start, -1) + end end - for i in 1:length(model.inner.n) - model.inner.mult_x_L[i] = _dual_start(model, model.mult_x_L[i]) - model.inner.mult_x_U[i] = _dual_start(model, model.mult_x_U[i], -1) + for i in 1:inner.n + inner.mult_x_L[i] = _dual_start(model, model.mult_x_L[i]) + inner.mult_x_U[i] = _dual_start(model, model.mult_x_U[i], -1) end if model.callback !== nothing - SetIntermediateCallback(model.inner, model.callback) + SetIntermediateCallback(inner, model.callback) end - IpoptSolve(model.inner) + IpoptSolve(inner) model.solve_time = time() - start_time return end From 5f3f5ac4e3fdc048a87dbe9726f242d3be5b9f99 Mon Sep 17 00:00:00 2001 From: odow Date: Mon, 18 Jul 2022 12:29:54 +1200 Subject: [PATCH 6/7] Fix getters and add test --- src/MOI_wrapper.jl | 2 +- src/utils.jl | 5 ++-- test/MOI_wrapper.jl | 60 +++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 3 deletions(-) diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 9aef0e2..fd238e6 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -118,7 +118,7 @@ end function MOI.get(model::Optimizer, attr::MOI.ListOfConstraintTypesPresent) ret = MOI.get(model.variables, attr) - append!(MOI.get(model.qp_data, attr)) + append!(ret, MOI.get(model.qp_data, attr)) return ret end diff --git a/src/utils.jl b/src/utils.jl index b5488ec..3393082 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -300,7 +300,8 @@ function MOI.get( ::MOI.ConstraintFunction, c::MOI.ConstraintIndex{F,S}, ) where {T,F,S} - offset = row == 1 ? 1 : block.linear_row_ends[row-1] + row = c.value + offset = row == 1 ? 1 : (block.linear_row_ends[row-1] + 1) affine_terms = MOI.ScalarAffineTerm{T}[ MOI.ScalarAffineTerm( block.linear_coefficients[i], @@ -308,7 +309,7 @@ function MOI.get( ) for i in offset:block.linear_row_ends[row] ] quadratic_terms = MOI.ScalarQuadraticTerm{T}[] - offset = row == 1 ? 1 : block.linear_row_ends[row-1] + offset = row == 1 ? 1 : (block.quadratic_row_ends[row-1] + 1) for i in offset:block.quadratic_row_ends[row] r, c = block.hessian_structure[i] push!( diff --git a/test/MOI_wrapper.jl b/test/MOI_wrapper.jl index 1924d3a..e33d288 100644 --- a/test/MOI_wrapper.jl +++ b/test/MOI_wrapper.jl @@ -242,6 +242,66 @@ function test_empty_optimize() return end +""" + test_get_model() + +Test various getters for ConstraintFunction etc. We need this test because the +normal MOI ones require the solver to support VariableName and ConstraintName. +""" +function test_get_model() + model = MOI.Utilities.Model{Float64}() + MOI.Utilities.loadfromstring!( + model, + """ + variables: x, y, z + minobjective: 1.0 * x * x + 2.0 * y + 3.0 + x >= 1.0 + y <= 2.0 + z == 3.0 + 1.0 * x >= 1.0 + 2.0 * y <= 4.0 + 3.0 * z == 9.0 + 1.0 * x * x + x >= 1.0 + 2.0 * y * y + y <= 8.0 + 3.0 * z * z + z == 27.0 + """, + ) + ipopt = Ipopt.Optimizer() + index_map = MOI.copy_to(ipopt, model) + attr = MOI.ListOfConstraintTypesPresent() + @test sort(MOI.get(model, attr); by = string) == + sort(MOI.get(ipopt, attr); by = string) + for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent()) + cis = MOI.get(model, MOI.ListOfConstraintIndices{F,S}()) + @test length(cis) == 1 + f_model = MOI.get(model, MOI.ConstraintFunction(), cis[1]) + s_model = MOI.get(model, MOI.ConstraintSet(), cis[1]) + cis = MOI.get(ipopt, MOI.ListOfConstraintIndices{F,S}()) + @test length(cis) == 1 + f_ipopt = MOI.get(ipopt, MOI.ConstraintFunction(), cis[1]) + s_ipopt = MOI.get(ipopt, MOI.ConstraintSet(), cis[1]) + @test s_model == s_ipopt + if F == MOI.VariableIndex + @test index_map[f_model] == f_ipopt + else + @test ≈( + MOI.Utilities.substitute_variables(x -> index_map[x], f_model), + f_ipopt, + ) + end + end + F_model = MOI.get(model, MOI.ObjectiveFunctionType()) + F_ipopt = MOI.get(ipopt, MOI.ObjectiveFunctionType()) + @test F_model == F_ipopt + obj_model = MOI.get(model, MOI.ObjectiveFunction{F_model}()) + obj_ipopt = MOI.get(ipopt, MOI.ObjectiveFunction{F_ipopt}()) + @test ≈( + MOI.Utilities.substitute_variables(x -> index_map[x], obj_model), + obj_ipopt, + ) + return +end + end # module TestMOIWrapper TestMOIWrapper.runtests() From fdb557e4753d39fa59c043facbbafac2538d7642 Mon Sep 17 00:00:00 2001 From: odow Date: Mon, 18 Jul 2022 13:01:57 +1200 Subject: [PATCH 7/7] Improve coverage --- src/utils.jl | 1 - test/MOI_wrapper.jl | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/utils.jl b/src/utils.jl index 3393082..917f340 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -242,7 +242,6 @@ _function_info(::MOI.ScalarQuadraticFunction) = _kFunctionTypeScalarQuadratic _set_info(s::MOI.LessThan) = _kBoundTypeLessThan, -Inf, s.upper _set_info(s::MOI.GreaterThan) = _kBoundTypeGreaterThan, s.lower, Inf _set_info(s::MOI.EqualTo) = _kBoundTypeEqualTo, s.value, s.value -_set_info(s::MOI.Interval) = _kBoundTypeInterval, s.lower, s.upper function _add_function( block::QPBlockData{T}, diff --git a/test/MOI_wrapper.jl b/test/MOI_wrapper.jl index e33d288..27453b0 100644 --- a/test/MOI_wrapper.jl +++ b/test/MOI_wrapper.jl @@ -277,7 +277,7 @@ function test_get_model() f_model = MOI.get(model, MOI.ConstraintFunction(), cis[1]) s_model = MOI.get(model, MOI.ConstraintSet(), cis[1]) cis = MOI.get(ipopt, MOI.ListOfConstraintIndices{F,S}()) - @test length(cis) == 1 + @test length(cis) == MOI.get(ipopt, MOI.NumberOfConstraints{F,S}()) == 1 f_ipopt = MOI.get(ipopt, MOI.ConstraintFunction(), cis[1]) s_ipopt = MOI.get(ipopt, MOI.ConstraintSet(), cis[1]) @test s_model == s_ipopt