diff --git a/Project.toml b/Project.toml index ff99b0ec9..2270c1ae2 100644 --- a/Project.toml +++ b/Project.toml @@ -7,6 +7,7 @@ Compat = "34da2185-b29b-5c13-b0c7-acf172513d20" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" NLSolversBase = "d41bc354-129a-5804-8e4c-c37616107c6c" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a" @@ -19,6 +20,7 @@ StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" Compat = "3.2.0, 3.3.0, 3.4.0, 3.5.0, 3.6.0" FillArrays = "0.6.2, 0.7, 0.8, 0.9, 0.10, 0.11" LineSearches = "7.0.1" +MathOptInterface = "0.9.22" NLSolversBase = "7.8.0" NaNMath = "0.3.2" Parameters = "0.10, 0.11, 0.12" diff --git a/README.md b/README.md index 0feb61bc7..e6a6331c3 100644 --- a/README.md +++ b/README.md @@ -138,6 +138,24 @@ If you use `Optim.jl` in your work, please cite the following. } ``` +# Use with JuMP + +We can use Optim.jl with [JuMP.jl](https://github.com/jump-dev/JuMP.jl). + +This can be done using the `Optim.Optimizer` object. Here is how to create a JuMP +model that uses Optim as the solver to minimize the rosenbrock function. + +```julia +using JuMP, Optim + +model = Model(Optim.Optimizer) +set_optimizer_attribute(model, "method", BFGS()) + +@variable(model, x[1:2]) +@NLobjective(model, (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2) +optimize!(model) +``` + [docs-latest-img]: https://img.shields.io/badge/docs-latest-blue.svg [docs-latest-url]: https://julianlsolvers.github.io/Optim.jl/latest diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl new file mode 100644 index 000000000..796dd4fb0 --- /dev/null +++ b/src/MOI_wrapper.jl @@ -0,0 +1,444 @@ +import MathOptInterface +const MOI = MathOptInterface +const MOIU = MathOptInterface.Utilities + +mutable struct VariableInfo{T} + lower_bound::T # May be -Inf even if has_lower_bound == true + has_lower_bound::Bool # Implies lower_bound == Inf + upper_bound::T # May be Inf even if has_upper_bound == true + has_upper_bound::Bool # Implies upper_bound == Inf + is_fixed::Bool # Implies lower_bound == upper_bound and !has_lower_bound and !has_upper_bound. + start::Union{Nothing,T} +end + +VariableInfo{T}() where {T} = VariableInfo(typemin(T), false, typemax(T), false, false, nothing) + +function starting_value(v::VariableInfo{T}) where {T} + if v.start !== nothing + return v.start + else + if v.has_lower_bound && v.has_upper_bound + if zero(T) <= v.lower_bound + return v.lower_bound + elseif v.upper_bound <= zero(T) + return v.upper_bound + else + return zero(T) + end + elseif v.has_lower_bound + return max(zero(T), v.lower_bound) + else + return min(zero(T), v.upper_bound) + end + end +end + +const LE{T} = MOI.LessThan{T} +const GE{T} = MOI.GreaterThan{T} +const EQ{T} = MOI.EqualTo{T} + +mutable struct Optimizer{T} <: MOI.AbstractOptimizer + # Problem data. + variable_info::Vector{VariableInfo{T}} + nlp_data::Union{MOI.NLPBlockData,Nothing} + sense::MOI.OptimizationSense + + # Parameters. + method::Union{AbstractOptimizer,Nothing} + silent::Bool + options::Dict{Symbol,Any} + + # Solution attributes. + results::Union{Nothing,MultivariateOptimizationResults} +end + +function Optimizer{T}() where {T} + return Optimizer{T}( + VariableInfo{T}[], + nothing, + MOI.FEASIBILITY_SENSE, + nothing, + false, + Dict{Symbol,Any}(), + nothing, + ) +end +Optimizer() = Optimizer{Float64}() + +MOI.supports(::Optimizer, ::MOI.NLPBlock) = true + +MOI.supports(::Optimizer, ::MOI.ObjectiveSense) = true +MOI.supports(::Optimizer, ::MOI.Silent) = true +function MOI.supports(::Optimizer, p::MOI.RawParameter) + return p.name == "method" || hasfield(Options, Symbol(p.name)) +end + +function MOI.supports(::Optimizer, ::MOI.VariablePrimalStart, ::Type{MOI.VariableIndex}) + return true +end + +MOI.supports_constraint(::Optimizer{T}, ::Type{MOI.SingleVariable}, ::Type{LE{T}}) where {T} = true +MOI.supports_constraint(::Optimizer{T}, ::Type{MOI.SingleVariable}, ::Type{GE{T}}) where {T} = true +MOI.supports_constraint(::Optimizer{T}, ::Type{MOI.SingleVariable}, ::Type{EQ{T}}) where {T} = true + +MOIU.supports_default_copy_to(::Optimizer, copy_names::Bool) = !copy_names + +function MOI.copy_to(model::Optimizer, src::MOI.ModelLike; copy_names = false) + return MOIU.default_copy_to(model, src, copy_names) +end + +MOI.get(::Optimizer, ::MOI.SolverName) = "Optim" + +function MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense) + model.sense = sense + return +end + +function MOI.set(model::Optimizer, ::MOI.Silent, value::Bool) + model.silent = value + return +end + +MOI.get(model::Optimizer, ::MOI.Silent) = model.silent + +const TIME_LIMIT = "time_limit" +MOI.supports(::Optimizer, ::MOI.TimeLimitSec) = true +function MOI.set(model::Optimizer, ::MOI.TimeLimitSec, value::Real) + MOI.set(model, MOI.RawParameter(TIME_LIMIT), Float64(value)) +end +function MOI.set(model::Optimizer, attr::MOI.TimeLimitSec, ::Nothing) + delete!(model.options, TIME_LIMIT) +end +function MOI.get(model::Optimizer, ::MOI.TimeLimitSec) + return get(model.options, TIME_LIMIT, nothing) +end + +function MOI.set(model::Optimizer, p::MOI.RawParameter, value) + model.options[p.name] = value + return +end + +function MOI.get(model::Optimizer, p::MOI.RawParameter) + if haskey(model.options, p.name) + return model.options[p.name] + end + error("RawParameter with name $(p.name) is not set.") +end + +MOI.get(model::Optimizer, ::MOI.SolveTime) = model.solve_time + +function MOI.empty!(model::Optimizer) + empty!(model.variable_info) + model.nlp_data = nothing + model.sense = MOI.FEASIBILITY_SENSE + model.results = nothing + return +end + +function MOI.is_empty(model::Optimizer) + return isempty(model.variable_info) && + model.nlp_data === nothing && + model.sense == MOI.FEASIBILITY_SENSE +end + +function MOI.add_variable(model::Optimizer{T}) where {T} + push!(model.variable_info, VariableInfo{T}()) + return MOI.VariableIndex(length(model.variable_info)) +end +function MOI.is_valid(model::Optimizer, vi::MOI.VariableIndex) + return vi.value in eachindex(model.variable_info) +end +function MOI.is_valid(model::Optimizer{T}, ci::MOI.ConstraintIndex{MOI.SingleVariable,LE{T}}) where {T} + vi = MOI.VariableIndex(ci.value) + return MOI.is_valid(model, vi) && has_upper_bound(model, vi) +end +function MOI.is_valid(model::Optimizer{T}, ci::MOI.ConstraintIndex{MOI.SingleVariable,GE{T}}) where {T} + vi = MOI.VariableIndex(ci.value) + return MOI.is_valid(model, vi) && has_lower_bound(model, vi) +end +function MOI.is_valid(model::Optimizer{T}, ci::MOI.ConstraintIndex{MOI.SingleVariable,EQ{T}}) where {T} + vi = MOI.VariableIndex(ci.value) + return MOI.is_valid(model, vi) && is_fixed(model, vi) +end + +function has_upper_bound(model::Optimizer, vi::MOI.VariableIndex) + return model.variable_info[vi.value].has_upper_bound +end + +function has_lower_bound(model::Optimizer, vi::MOI.VariableIndex) + return model.variable_info[vi.value].has_lower_bound +end + +function is_fixed(model::Optimizer, vi::MOI.VariableIndex) + return model.variable_info[vi.value].is_fixed +end + +function MOI.add_constraint(model::Optimizer{T}, v::MOI.SingleVariable, lt::LE{T}) where {T} + vi = v.variable + MOI.throw_if_not_valid(model, vi) + if isnan(lt.upper) + error("Invalid upper bound value $(lt.upper).") + end + if has_upper_bound(model, vi) + throw(MOI.UpperBoundAlreadySet{typeof(lt),typeof(lt)}(vi)) + end + if is_fixed(model, vi) + throw(MOI.UpperBoundAlreadySet{EQ{T},typeof(lt)}(vi)) + end + model.variable_info[vi.value].upper_bound = lt.upper + model.variable_info[vi.value].has_upper_bound = true + return MOI.ConstraintIndex{MOI.SingleVariable,LE{T}}(vi.value) +end + +function MOI.add_constraint(model::Optimizer{T}, v::MOI.SingleVariable, gt::MOI.GreaterThan{T}) where {T} + vi = v.variable + MOI.throw_if_not_valid(model, vi) + if isnan(gt.lower) + error("Invalid lower bound value $(gt.lower).") + end + if has_lower_bound(model, vi) + throw(MOI.LowerBoundAlreadySet{typeof(gt),typeof(gt)}(vi)) + end + if is_fixed(model, vi) + throw(MOI.LowerBoundAlreadySet{EQ{T},typeof(gt)}(vi)) + end + model.variable_info[vi.value].lower_bound = gt.lower + model.variable_info[vi.value].has_lower_bound = true + return MOI.ConstraintIndex{MOI.SingleVariable,GE{T}}(vi.value) +end + +function MOI.add_constraint(model::Optimizer{T}, v::MOI.SingleVariable, eq::EQ{T}) where {T} + vi = v.variable + MOI.throw_if_not_valid(model, vi) + if isnan(eq.value) + error("Invalid fixed value $(gt.lower).") + end + if has_lower_bound(model, vi) + throw(MOI.LowerBoundAlreadySet{GE{T},typeof(eq)}(vi)) + end + if has_upper_bound(model, vi) + throw(MOI.UpperBoundAlreadySet{LE{T},typeof(eq)}(vi)) + end + if is_fixed(model, vi) + throw(MOI.LowerBoundAlreadySet{typeof(eq),typeof(eq)}(vi)) + end + model.variable_info[vi.value].lower_bound = eq.value + model.variable_info[vi.value].upper_bound = eq.value + model.variable_info[vi.value].is_fixed = true + return MOI.ConstraintIndex{MOI.SingleVariable,EQ{T}}(vi.value) +end + +function MOI.set( + model::Optimizer, + ::MOI.VariablePrimalStart, + vi::MOI.VariableIndex, + value::Union{Real,Nothing}, +) + MOI.throw_if_not_valid(model, vi) + model.variable_info[vi.value].start = value + return +end + +function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData) + model.nlp_data = nlp_data + return +end + +function requested_features(::ZerothOrderOptimizer, has_constraints) + return Symbol[] +end +function requested_features(::FirstOrderOptimizer, has_constraints) + features = [:Grad] + if has_constraints + push!(features, :Jac) + end + return features +end +function requested_features(::Union{IPNewton,SecondOrderOptimizer}, has_constraints) + features = [:Grad, :Hess] + if has_constraints + push!(features, :Jac) + end + return features +end + +function sparse_to_dense!(A, I::Vector, nzval) + for k in eachindex(I) + i, j = I[k] + A[i, j] += nzval[k] + end + return A +end + +function sym_sparse_to_dense!(A, I::Vector, nzval) + for k in eachindex(I) + i, j = I[k] + A[i, j] += nzval[k] + A[j, i] = A[i, j] + end + return A +end + +function MOI.optimize!(model::Optimizer{T}) where {T} + num_variables = length(model.variable_info) + + # load parameters + if model.nlp_data === nothing || !model.nlp_data.has_objective + error("An objective should be provided to Optim with `@NLobjective`.") + end + objective_scale = model.sense == MOI.MAX_SENSE ? -one(T) : one(T) + zero_μ = zeros(T, length(model.nlp_data.constraint_bounds)) + function f(x) + return objective_scale * MOI.eval_objective(model.nlp_data.evaluator, x) + end + function g!(G, x) + fill!(G, zero(T)) + MOI.eval_objective_gradient(model.nlp_data.evaluator, G, x) + if model.sense == MOI.MAX_SENSE + rmul!(G, objective_scale) + end + return G + end + function h!(H, x) + fill!(H, zero(T)) + MOI.eval_hessian_lagrangian(model.nlp_data.evaluator, H_nzval, x, objective_scale, zero_μ) + sym_sparse_to_dense!(H, hessian_structure, H_nzval) + return H + end + + method = model.method + nl_constrained = !isempty(model.nlp_data.constraint_bounds) + features = MOI.features_available(model.nlp_data.evaluator) + if method === nothing + if nl_constrained + method = IPNewton() + elseif :Grad in features + if :Hess in features + method = fallback_method(f, g!, h!) + else + method = fallback_method(f, g!) + end + else + method = fallback_method(f) + end + end + MOI.initialize(model.nlp_data.evaluator, requested_features(method, nl_constrained)) + + if :Hess in features + hessian_structure = MOI.hessian_lagrangian_structure(model.nlp_data.evaluator) + H_nzval = zeros(T, length(hessian_structure)) + end + + initial_x = starting_value.(model.variable_info) + options = copy(model.options) + has_bounds = any(info -> info.is_fixed || info.has_upper_bound || info.has_lower_bound, model.variable_info) + if has_bounds + lower = [info.lower_bound for info in model.variable_info] + upper = [info.upper_bound for info in model.variable_info] + end + if !nl_constrained && has_bounds && !(method isa IPNewton) + options = Options(; options) + model.results = optimize(f, g!, lower, upper, initial_x, Fminbox(method), options; inplace = true) + else + d = promote_objtype(method, initial_x, :finite, true, f, g!, h!) + add_default_opts!(options, method) + options = Options(; options...) + if nl_constrained || has_bounds + if nl_constrained + lc = [b.lower for b in model.nlp_data.constraint_bounds] + uc = [b.upper for b in model.nlp_data.constraint_bounds] + c!(c, x) = MOI.eval_constraint(model.nlp_data.evaluator, c, x) + if !(:Jac in features) + error("Nonlinear constraints should be differentiable to be used with Optim.") + end + if !(:Hess in features) + error("Nonlinear constraints should be twice differentiable to be used with Optim.") + end + jacobian_structure = MOI.jacobian_structure(model.nlp_data.evaluator) + J_nzval = zeros(T, length(jacobian_structure)) + function jacobian!(J, x) + fill!(J, zero(T)) + MOI.eval_constraint_jacobian(model.nlp_data.evaluator, J_nzval, x) + sparse_to_dense!(J, jacobian_structure, J_nzval) + return J + end + function con_hessian!(H, x, λ) + fill!(H, zero(T)) + MOI.eval_hessian_lagrangian(model.nlp_data.evaluator, H_nzval, x, zero(T), λ) + sym_sparse_to_dense!(H, hessian_structure, H_nzval) + return H + end + if !has_bounds + lower = fill(typemin(T), num_variables) + upper = fill(typemax(T), num_variables) + end + c = TwiceDifferentiableConstraints( + c!, jacobian!, con_hessian!, + lower, upper, lc, uc, + ) + else + @assert has_bounds + c = TwiceDifferentiableConstraints(lower, upper) + end + model.results = optimize(d, c, initial_x, method, options) + else + model.results = optimize(d, initial_x, method, options) + end + end + return +end + +function MOI.get(model::Optimizer, ::MOI.TerminationStatus) + if model.results === nothing + return MOI.OPTIMIZE_NOT_CALLED + elseif converged(model.results) + return MOI.LOCALLY_SOLVED + else + return MOI.OTHER_ERROR + end +end + +function MOI.get(model::Optimizer, ::MOI.RawStatusString) + return summary(model.results) +end + +# Ipopt always has an iterate available. +function MOI.get(model::Optimizer, ::MOI.ResultCount) + return model.results === nothing ? 0 : 1 +end + +function MOI.get(model::Optimizer, attr::MOI.PrimalStatus) + if !(1 <= attr.N <= MOI.get(model, MOI.ResultCount())) + return MOI.NO_SOLUTION + end + if converged(model.results) + return MOI.FEASIBLE_POINT + else + return MOI.UNKNOWN_RESULT_STATUS + end +end + +function MOI.get(model::Optimizer, attr::MOI.ObjectiveValue) + MOI.check_result_index_bounds(model, attr) + val = minimum(model.results) + if model.sense == MOI.MAX_SENSE + val = -val + end + return val +end + +function MOI.get(model::Optimizer, attr::MOI.VariablePrimal, vi::MOI.VariableIndex) + MOI.check_result_index_bounds(model, attr) + MOI.throw_if_not_valid(model, vi) + return minimizer(model.results)[vi.value] +end + +function MOI.get( + model::Optimizer, + attr::MOI.ConstraintPrimal, + ci::MOI.ConstraintIndex{MOI.SingleVariable,<:Union{LE,MOI.GreaterThan{Float64},EQ}}, +) + MOI.check_result_index_bounds(model, attr) + MOI.throw_if_not_valid(model, ci) + return minimizer(model.results)[ci.value] +end diff --git a/src/Optim.jl b/src/Optim.jl index cde1b3e10..71c415963 100644 --- a/src/Optim.jl +++ b/src/Optim.jl @@ -215,4 +215,7 @@ include("multivariate/solvers/constrained/ipnewton/utilities/trace.jl") # Maximization convenience wrapper include("maximize.jl") +# MathOptInterface wrapper +include("MOI_wrapper.jl") + end diff --git a/test/MOI_wrapper.jl b/test/MOI_wrapper.jl new file mode 100644 index 000000000..dd2165f81 --- /dev/null +++ b/test/MOI_wrapper.jl @@ -0,0 +1,68 @@ +module TestOptim + +import Optim +using MathOptInterface +using Test + +const MOI = MathOptInterface + +const OPTIMIZER_CONSTRUCTOR = MOI.OptimizerWithAttributes( + Optim.Optimizer{Float64}, + MOI.Silent() => true, +) +const OPTIMIZER = MOI.instantiate(OPTIMIZER_CONSTRUCTOR) +const CACHED = MOI.Utilities.CachingOptimizer( + MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()), + OPTIMIZER, +) + +const CONFIG = MOI.Test.TestConfig( + atol = 1e-6, + rtol = 1e-6, + duals = false, + query = false, + infeas_certificates = false, + optimal_status = MOI.LOCALLY_SOLVED, +) + +function test_SolverName() + @test MOI.get(OPTIMIZER, MOI.SolverName()) == "Optim" +end + +function test_supports_incremental_interface() + @test TestOptim.MOI.Utilities.supports_default_copy_to(OPTIMIZER, false) + @test !TestOptim.MOI.Utilities.supports_default_copy_to(OPTIMIZER, true) +end + +function test_nlp() + MOI.Test.nlptest(CACHED, CONFIG, String[ + # FIXME The hessian callback for constraints is called with + # `λ = [-Inf, 0.0]` and then we get `NaN`, ... + "hs071", + # There are nonlinear constraints so we need `IPNewton` but `IPNewton` needs a hessian. + "hs071_no_hessian", "feasibility_sense_with_objective_and_no_hessian", + # FIXME Here there is no hessian but there is a hessian-vector product, can `IPNewton` work with that ? + "hs071_hessian_vector_product_test", + # No objective, would be fixed by https://github.com/jump-dev/MathOptInterface.jl/issues/1397 + "feasibility_sense_with_no_objective_and_with_hessian", + "feasibility_sense_with_no_objective_and_no_hessian", + # Affine objective, would be fixed by https://github.com/jump-dev/MathOptInterface.jl/issues/1397 + "nlp_objective_and_moi_objective", + #"feasibility_sense_with_no_objective_and_with_hessian", + ]) +end + +# This function runs all functions in this module starting with `test_`. +function runtests() + for name in names(@__MODULE__; all = true) + if startswith("$(name)", "test_") + @testset "$(name)" begin + getfield(@__MODULE__, name)() + end + end + end +end + +end # module TestOptim + +TestOptim.runtests() diff --git a/test/runtests.jl b/test/runtests.jl index 7e7ffbe51..8fe8db299 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -260,3 +260,7 @@ println("Literate examples") o = Optim.Options() @test occursin(" = ", sprint(show, o)) end + +@testset "MOI wrapper" begin + include("MOI_wrapper.jl") +end