Skip to content

Commit

Permalink
Add is_solved_and_feasible (#3668)
Browse files Browse the repository at this point in the history
  • Loading branch information
odow authored Feb 14, 2024
1 parent 7181551 commit 86b0ec8
Show file tree
Hide file tree
Showing 56 changed files with 461 additions and 93 deletions.
12 changes: 12 additions & 0 deletions docs/src/background/algebraic_modeling_languages.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,9 @@ julia> function algebraic_knapsack(c, w, b)
@objective(model, Max, sum(c[i] * x[i] for i = 1:n))
@constraint(model, sum(w[i] * x[i] for i = 1:n) <= b)
optimize!(model)
if termination_status(model) != OPTIMAL
error("Not solved correctly")
end
return value.(x)
end
algebraic_knapsack (generic function with 1 method)
Expand Down Expand Up @@ -179,6 +182,9 @@ julia> function nonalgebraic_knapsack(c, w, b)
con = build_constraint(error, lhs, MOI.LessThan(b))
add_constraint(model, con)
optimize!(model)
if termination_status(model) != OPTIMAL
error("Not solved correctly")
end
return value.(x)
end
nonalgebraic_knapsack (generic function with 1 method)
Expand Down Expand Up @@ -219,6 +225,9 @@ julia> function mathoptinterface_knapsack(optimizer, c, w, b)
MOI.LessThan(b),
)
MOI.optimize!(model)
if MOI.get(model, MOI.TerminationStatus()) != MOI.OPTIMAL
error("Not solved correctly")
end
return MOI.get.(model, MOI.VariablePrimal(), x)
end
mathoptinterface_knapsack (generic function with 1 method)
Expand Down Expand Up @@ -257,6 +266,9 @@ julia> function highs_knapsack(c, w, b)
w,
)
Highs_run(model)
if Highs_getModelStatus(model) != kHighsModelStatusOptimal
error("Not solved correctly")
end
x = fill(NaN, 2)
Highs_getSolution(model, x, C_NULL, C_NULL, C_NULL)
Highs_destroy(model)
Expand Down
124 changes: 114 additions & 10 deletions docs/src/manual/solutions.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,34 @@ Subject to
y[b] ≤ 1
```

## Check if an optimal solution exists

Use [`is_solved_and_feasible`](@ref) to check if the solver found an optimal
solution:
```jldoctest solutions
julia> is_solved_and_feasible(model)
true
```

By default, [`is_solved_and_feasible`](@ref) returns `true` for both global and
local optima. Pass `allow_local = false` to check if the solver found a globally
optimal solution:
```jldoctest solutions
julia> is_solved_and_feasible(model; allow_local = false)
true
```

Pass `dual = true` to check if the solver found an optimal dual solution in
addition to an optimal primal solution:
```jldoctest solutions
julia> is_solved_and_feasible(model; dual = true)
true
```

If this function returns `false`, use the functions mentioned below like
[`solution_summary`](@ref), [`termination_status`](@ref), [`primal_status`](@ref),
and [`dual_status`](@ref) to understand what solution (if any) the solver found.

## Solutions summary

[`solution_summary`](@ref) can be used for checking the summary of the
Expand Down Expand Up @@ -267,25 +295,101 @@ And data, a 2-element Vector{Float64}:

## Recommended workflow

The recommended workflow for solving a model and querying the solution is
something like the following:
You should always check whether the solver found a solution before calling
solution functions like [`value`](@ref) or [`objective_value`](@ref).

A simple approach for small scripts and notebooks is to use
[`is_solved_and_feasible`](@ref):

```jldoctest solutions
julia> begin
if termination_status(model) == OPTIMAL
julia> function solve_and_print_solution(model)
optimize!(model)
if !is_solved_and_feasible(model; dual = true)
error(
"""
The model was not solved correctly:
termination_status : $(termination_status(model))
primal_status : $(primal_status(model))
dual_status : $(dual_status(model))
raw_status : $(raw_status(model))
""",
)
end
println("Solution is optimal")
println(" objective value = ", objective_value(model))
println(" primal solution: x = ", value(x))
println(" dual solution: c1 = ", dual(c1))
return
end
solve_and_print_solution (generic function with 1 method)
julia> solve_and_print_solution(model)
Solution is optimal
objective value = -205.14285714285714
primal solution: x = 15.428571428571429
dual solution: c1 = 1.7142857142857142
```

For code like libraries that should be more robust to the range of possible
termination and result statuses, do some variation of the following:
```jldoctest solutions
julia> function solve_and_print_solution(model)
status = termination_status(model)
if status in (OPTIMAL, LOCALLY_SOLVED)
println("Solution is optimal")
elseif termination_status(model) == TIME_LIMIT && has_values(model)
println("Solution is suboptimal due to a time limit, but a primal solution is available")
elseif status in (ALMOST_OPTIMAL, ALMOST_LOCALLY_SOLVED)
println("Solution is optimal to a relaxed tolerance")
elseif status == TIME_LIMIT
println(
"Solver stopped due to a time limit. If a solution is available, " *
"it may be suboptimal."
)
elseif status in (
ITERATION_LIMIT, NODE_LIMIT, SOLUTION_LIMIT, MEMORY_LIMIT,
OBJECTIVE_LIMIT, NORM_LIMIT, OTHER_LIMIT,
)
println(
"Solver stopped due to a limit. If a solution is available, it " *
"may be suboptimal."
)
elseif status in (INFEASIBLE, LOCALLY_INFEASIBLE)
println("The problem is primal infeasible")
elseif status == DUAL_INFEASIBLE
println(
"The problem is dual infeasible. If a primal feasible solution " *
"exists, the problem is unbounded. To check, set the objective " *
"to `@objective(model, Min, 0)` and re-solve. If the problem is " *
"feasible, the primal is unbounded. If the problem is " *
"infeasible, both the primal and dual are infeasible.",
)
elseif status == INFEASIBLE_OR_UNBOUNDED
println(
"The model is either infeasible or unbounded. Set the objective " *
"to `@objective(model, Min, 0)` and re-solve to disambiguate. If " *
"the problem was infeasible, it will still be infeasible. If the " *
"problem was unbounded, it will now have a finite optimal solution.",
)
else
error("The model was not solved correctly.")
println(
"The model was not solved correctly. The termination status is $status",
)
end
println(" objective value = ", objective_value(model))
if primal_status(model) == FEASIBLE_POINT
if primal_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT)
println(" objective value = ", objective_value(model))
println(" primal solution: x = ", value(x))
elseif primal_status(model) == INFEASIBILITY_CERTIFICATE
println(" primal certificate: x = ", value(x))
end
if dual_status(model) == FEASIBLE_POINT
if dual_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT)
println(" dual solution: c1 = ", dual(c1))
elseif dual_status(model) == INFEASIBILITY_CERTIFICATE
println(" dual certificate: c1 = ", dual(c1))
end
return
end
solve_and_print_solution (generic function with 1 method)
julia> solve_and_print_solution(model)
Solution is optimal
objective value = -205.14285714285714
primal solution: x = 15.428571428571429
Expand Down
9 changes: 7 additions & 2 deletions docs/src/tutorials/algorithms/benders_decomposition.jl
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ function solve_subproblem(x)
con = @constraint(model, A_2 * y .<= b - A_1 * x)
@objective(model, Min, c_2' * y)
optimize!(model)
@assert termination_status(model) == OPTIMAL
@assert is_solved_and_feasible(model; dual = true)
return (obj = objective_value(model), y = value.(y), π = dual.(con))
end

Expand Down Expand Up @@ -194,6 +194,7 @@ ABSOLUTE_OPTIMALITY_GAP = 1e-6
println("Iteration Lower Bound Upper Bound Gap")
for k in 1:MAXIMUM_ITERATIONS
optimize!(model)
@assert is_solved_and_feasible(model)
lower_bound = objective_value(model)
x_k = value.(x)
ret = solve_subproblem(x_k)
Expand All @@ -211,6 +212,7 @@ end
# Finally, we can obtain the optimal solution

optimize!(model)
@assert is_solved_and_feasible(model)
Test.@test value.(x) == [0.0, 1.0] #src
x_optimal = value.(x)

Expand Down Expand Up @@ -268,6 +270,7 @@ set_attribute(lazy_model, MOI.LazyConstraintCallback(), my_callback)
# Now when we optimize!, our callback is run:

optimize!(lazy_model)
@assert is_solved_and_feasible(lazy_model)

# For this model, the callback algorithm required more solves of the subproblem:

Expand Down Expand Up @@ -323,7 +326,7 @@ print(subproblem)
function solve_subproblem(model, x)
fix.(model[:x_copy], x)
optimize!(model)
@assert termination_status(model) == OPTIMAL
@assert is_solved_and_feasible(model; dual = true)
return (
obj = objective_value(model),
y = value.(model[:y]),
Expand All @@ -341,6 +344,7 @@ end
println("Iteration Lower Bound Upper Bound Gap")
for k in 1:MAXIMUM_ITERATIONS
optimize!(model)
@assert is_solved_and_feasible(model)
lower_bound = objective_value(model)
x_k = value.(x)
ret = solve_subproblem(subproblem, x_k)
Expand All @@ -358,6 +362,7 @@ end
# Finally, we can obtain the optimal solution:

optimize!(model)
@assert is_solved_and_feasible(model)
Test.@test value.(x) == [0.0, 1.0] #src
x_optimal = value.(x)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ set_silent(model)
@objective(model, Min, sum(x))
@constraint(model, demand[i in 1:I], patterns[i]' * x >= data.pieces[i].d)
optimize!(model)
@assert is_solved_and_feasible(model)
solution_summary(model)

# This solution requires 421 rolls. This solution is sub-optimal because the
Expand All @@ -252,6 +253,7 @@ solution_summary(model)

unset_integer.(x)
optimize!(model)
@assert is_solved_and_feasible(model; dual = true)
π_13 = dual(demand[13])

# Using the economic interpretation of the dual variable, we can say that a one
Expand Down Expand Up @@ -282,6 +284,7 @@ function solve_pricing(data::Data, π::Vector{Float64})
@constraint(model, sum(data.pieces[i].w * y[i] for i in 1:I) <= data.W)
@objective(model, Max, sum(π[i] * y[i] for i in 1:I))
optimize!(model)
@assert is_solved_and_feasible(model)
number_of_rolls_saved = objective_value(model)
if number_of_rolls_saved > 1 + 1e-8
## Benefit of pattern is more than the cost of a new roll plus some
Expand Down Expand Up @@ -312,6 +315,7 @@ solve_pricing(data, zeros(I))
while true
## Solve the linear relaxation
optimize!(model)
@assert is_solved_and_feasible(model; dual = true)
## Obtain a new dual vector
π = dual.(demand)
## Solve the pricing problem
Expand Down Expand Up @@ -362,6 +366,7 @@ sum(ceil.(Int, solution.rolls))

set_integer.(x)
optimize!(model)
@assert is_solved_and_feasible(model)
solution = DataFrames.DataFrame([
(pattern = p, rolls = value(x_p)) for (p, x_p) in enumerate(x)
])
Expand Down
3 changes: 3 additions & 0 deletions docs/src/tutorials/algorithms/parallelism.md
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ my_lock = Threads.ReentrantLock()
Threads.@threads for i in 1:10
set_lower_bound(x, i)
optimize!(model)
@assert is_solved_and_feasible(model)
Threads.lock(my_lock) do
push!(solutions, i => objective_value(model))
end
Expand Down Expand Up @@ -251,6 +252,7 @@ julia> Threads.@threads for i in 1:10
@objective(model, Min, x)
set_lower_bound(x, i)
optimize!(model)
@assert is_solved_and_feasible(sudoku)
Threads.lock(my_lock) do
push!(solutions, i => objective_value(model))
end
Expand Down Expand Up @@ -295,6 +297,7 @@ julia> Distributed.@everywhere begin
@objective(model, Min, x)
set_lower_bound(x, i)
optimize!(model)
@assert is_solved_and_feasible(sudoku)
return objective_value(model)
end
end
Expand Down
3 changes: 3 additions & 0 deletions docs/src/tutorials/algorithms/tsp_lazy_constraints.jl
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ subtour(x::AbstractMatrix{VariableRef}) = subtour(value.(x))

iterative_model = build_tsp_model(d, n)
optimize!(iterative_model)
@assert is_solved_and_feasible(iterative_model)
time_iterated = solve_time(iterative_model)
cycle = subtour(iterative_model[:x])
while 1 < length(cycle) < n
Expand All @@ -209,6 +210,7 @@ while 1 < length(cycle) < n
sum(iterative_model[:x][i, j] for (i, j) in S) <= length(cycle) - 1,
)
optimize!(iterative_model)
@assert is_solved_and_feasible(iterative_model)
global time_iterated += solve_time(iterative_model)
global cycle = subtour(iterative_model[:x])
end
Expand Down Expand Up @@ -262,6 +264,7 @@ set_attribute(
subtour_elimination_callback,
)
optimize!(lazy_model)
@assert is_solved_and_feasible(lazy_model)
objective_value(lazy_model)

# This finds the same optimal tour:
Expand Down
7 changes: 4 additions & 3 deletions docs/src/tutorials/applications/optimal_power_flow.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ import DataFrames
import Ipopt
import LinearAlgebra
import SparseArrays
import Test #src
import Test

# ## Initial formulation

Expand Down Expand Up @@ -137,6 +137,7 @@ println("Objective value (basic lower bound) : $basic_lower_bound")

@constraint(model, sum(P_G) >= sum(P_Demand))
optimize!(model)
@assert is_solved_and_feasible(model)
better_lower_bound = round(objective_value(model); digits = 2)
println("Objective value (better lower bound): $better_lower_bound")

Expand Down Expand Up @@ -280,6 +281,7 @@ P_G = real(S_G)
# We're finally ready to solve our nonlinear AC-OPF problem:

optimize!(model)
@assert is_solved_and_feasible(model)
Test.@test isapprox(objective_value(model), 3087.84; atol = 1e-2) #src
solution_summary(model)

Expand Down Expand Up @@ -418,9 +420,8 @@ optimize!(model)

#-

Test.@test is_solved_and_feasible(model; allow_almost = true)
sdp_relaxation_lower_bound = round(objective_value(model); digits = 2)
Test.@test termination_status(model) in (OPTIMAL, ALMOST_OPTIMAL) #src
Test.@test primal_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT) #src
Test.@test isapprox(sdp_relaxation_lower_bound, 2753.04; rtol = 1e-3) #src
println(
"Objective value (W & V relax. lower bound): $sdp_relaxation_lower_bound",
Expand Down
Loading

0 comments on commit 86b0ec8

Please sign in to comment.