Skip to content

Commit

Permalink
Implement local_optimizer for MOI interface (#173)
Browse files Browse the repository at this point in the history
  • Loading branch information
blegat authored Sep 7, 2021
1 parent 34f0122 commit 4a2460a
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 2 deletions.
16 changes: 16 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,22 @@ got 0.5443310477213124 at [0.3333333342139688,0.29629628951338166]
Note that the MathOptInterface interface sets slightly different convergence tolerances by default (these default values are given by the `NLopt.DEFAULT_OPTIONS` dictionary),
so the outputs from the two problems are not identical.

Some algorithms need a local optimizer. These are set with `local_optimizer`, e.g.,
```julia
model = Model(NLopt.Optimizer)
set_optimizer_attribute(model, "algorithm", :AUGLAG)
set_optimizer_attribute(model, "local_optimizer", :LD_LBFGS)
```
To parametrize the local optimizer, pass use the `NLopt.Opt` interface, e.g.,
```julia
model = Model(NLopt.Optimizer)
set_optimizer_attribute(model, "algorithm", :AUGLAG)
local_optimizer = NLopt.Opt(:LD_LBFGS, num_variables)
local_optimizer.xtol_rel = 1e-4
set_optimizer_attribute(model, "local_optimizer", local_optimizer)
```
where `num_variables` is the number of variables of the optimization problem.

## Reference

The main purpose of this section is to document the syntax and unique
Expand Down
10 changes: 10 additions & 0 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ const DEFAULT_OPTIONS = Dict{String, Any}(
"population" => 0,
"seed" => nothing,
"vector_storage" => 0,
"local_optimizer" => nothing,
)

function Optimizer()
Expand Down Expand Up @@ -776,6 +777,15 @@ function MOI.optimize!(model::Optimizer)
# TODO: Reuse model.inner for incremental solves if possible.
num_variables = length(model.variable_info)
model.inner = Opt(model.options["algorithm"], num_variables)
local_optimizer = model.options["local_optimizer"]
if local_optimizer !== nothing
if local_optimizer isa Symbol
local_optimizer = Opt(local_optimizer, num_variables)
else
local_optimizer = Opt(local_optimizer.algorithm, num_variables)
end
local_optimizer!(model.inner, local_optimizer)
end

# load parameters
stopval!(model.inner, model.options["stopval"])
Expand Down
32 changes: 30 additions & 2 deletions test/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,28 @@ end
@test !MOIU.supports_default_copy_to(optimizer, true)
end

@testset "Non-Linear tests" begin
function test_nlp(solver)
optimizer = MOI.instantiate(solver)
MOIT.nlptest(optimizer, config)
end
@testset "Non-Linear tests" begin
test_nlp(solver)
test_nlp(MOI.OptimizerWithAttributes(
NLopt.Optimizer,
"algorithm" => :AUGLAG,
"local_optimizer" => :LD_LBFGS,
))
# NLP tests have different number of variables so we
# cannot run through them all with the same `local_optimizer`.
# Let's just do hs071.
local_optimizer = Opt(:LD_LBFGS, 4)
opt.xtol_rel = 1e-6
MOIT.hs071_test(MOI.instantiate(MOI.OptimizerWithAttributes(
NLopt.Optimizer,
"algorithm" => :AUGLAG,
"local_optimizer" => local_optimizer,
)), config)
end

@testset "Testing getters" begin
MOI.Test.copytest(MOI.instantiate(solver, with_bridge_type=Float64), MOIU.Model{Float64}())
Expand Down Expand Up @@ -98,7 +117,16 @@ end
# NumberOfThreads not supported
"number_threads",
# Infeasibility and unboundedness not detected by NLopt
"solve_unbounded_model"
"solve_unbounded_model",
"solve_farkas_interval_lower",
"solve_farkas_lessthan",
"solve_farkas_equalto_lower",
"solve_farkas_equalto_upper",
"solve_farkas_variable_lessthan",
"solve_farkas_variable_lessthan_max",
"solve_farkas_greaterthan",
"solve_farkas_interval_upper",
"solve_farkas_lessthan",
]
MOIT.unittest(bridged, config, exclude)
end
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
include("tutorial.jl")
include("fix133.jl")
include("MPB_wrapper.jl")
include("MOI_wrapper.jl")

0 comments on commit 4a2460a

Please sign in to comment.