Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,28 @@ version = "0.5.1"

[deps]
IntervalArithmetic = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253"
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
SolverCore = "ff4d7338-4cf1-434d-91df-b86cb86fb843"

[weakdeps]
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"

[extensions]
IntervalOptimisationNLPModelsJuMPExt = ["NLPModelsJuMP", "MathOptInterface"]

[compat]
IntervalArithmetic = "0.22 - 0.23, 1"
MathOptInterface = "1"
NLPModels = "0.21"
NLPModelsJuMP = "0.13"
SolverCore = "0.3"
julia = "1.10"

[extras]
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test"]
test = ["JuMP", "NLPModelsJuMP", "Test"]
31 changes: 31 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,37 @@ julia> minimisers
[[-4.74512e-09, 4.41017e-09]_com, [-4.74512e-09, 4.41017e-09]_com]
```

#### JuMP

IntervalOptimisation can be used as a solver through [JuMP](https://github.com/jump-dev/JuMP.jl) via
[NLPModelsJuMP](https://github.com/JuliaSmoothOptimizers/NLPModelsJuMP.jl):

```julia
using JuMP, NLPModelsJuMP, IntervalOptimisation

model = Model(NLPModelsJuMP.Optimizer)
set_attribute(model, "solver", IntervalOptimiser)
set_attribute(model, "tol", 1e-5)

@variable(model, -10 <= x <= 10)
@objective(model, Min, (x - 3)^2 + 1)
optimize!(model)

julia> value(x)
3.0000152587890625

julia> objective_value(model)
1.0000000002328306
```

The full interval enclosure of the global minimum and the minimizer boxes are available
through solver-specific attributes:

```julia
julia> get_attribute(model, RawStatusString()) # underlying solver status
"first-order stationary"
```

## References

- *Validated Numerics: A Short Introduction to Rigorous Computations*, W. Tucker, Princeton University Press (2010)
Expand Down
26 changes: 26 additions & 0 deletions ext/IntervalOptimisationNLPModelsJuMPExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
module IntervalOptimisationNLPModelsJuMPExt

using IntervalOptimisation
using NLPModelsJuMP: MathOptNLPModel
import MathOptInterface as MOI
import NLPModels

function IntervalOptimisation._make_objective(nlp::MathOptNLPModel, nvar)
if nlp.obj.type == "NONLINEAR"
obj_expr = MOI.objective_expr(nlp.eval)
if nvar == 1
return x -> IntervalOptimisation._eval_moi_expr(obj_expr, [x])
else
return x -> IntervalOptimisation._eval_moi_expr(obj_expr, x)
end
else
# For LINEAR/QUADRATIC, NLPModels.obj works directly with intervals
if nvar == 1
return x -> NLPModels.obj(nlp, [x])
else
return x -> NLPModels.obj(nlp, x)
end
end
end

end
2 changes: 1 addition & 1 deletion src/HeapedVectors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ function heaping(v, by)
end

function floatup!(ar, index, by)
par = convert(Int, floor(index/2))
par = div(index, 2)
if index <= 1
return ar
end
Expand Down
1 change: 1 addition & 0 deletions src/IntervalOptimisation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ using .HeapedVectors
using IntervalArithmetic

include("optimise.jl")
include("nlp.jl")


const minimize = minimise
Expand Down
156 changes: 156 additions & 0 deletions src/nlp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import NLPModels
import SolverCore

export IntervalOptimiser

"""
IntervalOptimiser <: SolverCore.AbstractOptimizationSolver

A global optimization solver based on the Moore-Skelboe interval branch-and-bound algorithm.
Can be used with NLPModelsJuMP to solve JuMP models.

# Usage with JuMP and NLPModelsJuMP

```julia
using JuMP, NLPModelsJuMP, IntervalOptimisation

model = Model(NLPModelsJuMP.Optimizer)
set_attribute(model, "solver", IntervalOptimiser)
set_attribute(model, "tol", 1e-5)

@variable(model, -10 <= x <= 10)
@objective(model, Min, (x - 3)^2 + 1)
optimize!(model)
```

The solver stores the interval enclosure of the global minimum and the list of minimizer
boxes in the `solver_specific` fields `:global_min_interval` and `:minimizers`.
"""
mutable struct IntervalOptimiser <: SolverCore.AbstractOptimizationSolver
end

function IntervalOptimiser(nlp::NLPModels.AbstractNLPModel; kwargs...)
return IntervalOptimiser()
end

"""
_eval_moi_expr(expr, x)

Recursively evaluate a Julia `Expr` returned by `MOI.objective_expr` using the
variable values in `x`. Works with any numeric type including intervals.
"""
function _eval_moi_expr(expr::Expr, x)
if expr.head == :ref && expr.args[1] == :x
# x[MOI.VariableIndex(i)] → x[i]
vi = expr.args[2]
return x[vi.value]
elseif expr.head == :call
op = expr.args[1]
evaluated_args = [_eval_moi_expr(a, x) for a in @view(expr.args[2:end])]
if op isa Symbol
return _call_op(Val(op), evaluated_args)
else
return op(evaluated_args...)
end
else
error("Unsupported expression head: $(expr.head)")
end
end

_eval_moi_expr(v::Number, _) = v

# Arithmetic
_call_op(::Val{:+}, args) = length(args) == 1 ? +args[1] : +(args...)
_call_op(::Val{:-}, args) = length(args) == 1 ? -args[1] : args[1] - args[2]
_call_op(::Val{:*}, args) = *(args...)
_call_op(::Val{:/}, args) = args[1] / args[2]
_call_op(::Val{:^}, args) = args[1] ^ args[2]
# Common math functions
_call_op(::Val{:log}, args) = log(args[1])
_call_op(::Val{:log2}, args) = log2(args[1])
_call_op(::Val{:log10}, args) = log10(args[1])
_call_op(::Val{:exp}, args) = exp(args[1])
_call_op(::Val{:sqrt}, args) = sqrt(args[1])
_call_op(::Val{:abs}, args) = abs(args[1])
_call_op(::Val{:sin}, args) = sin(args[1])
_call_op(::Val{:cos}, args) = cos(args[1])
_call_op(::Val{:tan}, args) = tan(args[1])
_call_op(::Val{:asin}, args) = asin(args[1])
_call_op(::Val{:acos}, args) = acos(args[1])
_call_op(::Val{:atan}, args) = atan(args...)
_call_op(::Val{:min}, args) = min(args...)
_call_op(::Val{:max}, args) = max(args...)
# Fallback: try to find the function in Base/Main
_call_op(::Val{op}, args) where {op} = getfield(Base, op)(args...)

function SolverCore.solve!(
solver::IntervalOptimiser,
nlp::NLPModels.AbstractNLPModel,
stats::SolverCore.GenericExecutionStats;
tol::Real = 1e-3,
structure = HeapedVector,
kwargs...,
)
start_time = time()

# Only box-constrained or unconstrained problems are supported
if nlp.meta.ncon > 0
SolverCore.set_status!(stats, :exception)
SolverCore.set_time!(stats, time() - start_time)
error("IntervalOptimiser only supports unconstrained or box-constrained problems")
end

nvar = nlp.meta.nvar
lvar = nlp.meta.lvar
uvar = nlp.meta.uvar

# Check that all variables have finite bounds
for i in 1:nvar
if !isfinite(lvar[i]) || !isfinite(uvar[i])
SolverCore.set_status!(stats, :exception)
SolverCore.set_time!(stats, time() - start_time)
error("IntervalOptimiser requires finite bounds on all variables")
end
end

# Build the interval search box
X = [interval(lvar[i], uvar[i]) for i in 1:nvar]

# Create the objective function for interval evaluation
f = _make_objective(nlp, nvar)

# Solve
dom = nvar == 1 ? X[1] : X
if nlp.meta.minimize
global_opt, optimizer_boxes = minimise(f, dom; structure = structure, tol = tol)
else
global_opt, optimizer_boxes = maximise(f, dom; structure = structure, tol = tol)
end

# Extract solution (midpoint of the first minimizer box)
if nvar == 1
solution = [mid(optimizer_boxes[1])]
else
solution = mid.(optimizer_boxes[1])
end
objective = mid(global_opt)

SolverCore.set_status!(stats, :first_order)
SolverCore.set_solution!(stats, solution)
SolverCore.set_objective!(stats, objective)
SolverCore.set_iter!(stats, length(optimizer_boxes))
SolverCore.set_time!(stats, time() - start_time)
SolverCore.set_solver_specific!(stats, :global_min_interval, global_opt)
SolverCore.set_solver_specific!(stats, :minimizers, optimizer_boxes)

return stats
end

# Default: use NLPModels.obj directly (works for linear/quadratic objectives)
function _make_objective(nlp::NLPModels.AbstractNLPModel, nvar)
if nvar == 1
return x -> NLPModels.obj(nlp, [x])
else
return x -> NLPModels.obj(nlp, x)
end
end
6 changes: 6 additions & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[deps]
IntervalArithmetic = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253"
IntervalOptimisation = "c7c68f13-a4a2-5b9a-b424-07d005f8d9d2"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
69 changes: 69 additions & 0 deletions test/nlp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
using IntervalOptimisation
using JuMP
using NLPModelsJuMP
using Test

@testset "NLPModels / JuMP integration" begin
@testset "1D quadratic minimization" begin
model = Model(NLPModelsJuMP.Optimizer)
set_attribute(model, "solver", IntervalOptimiser)
set_attribute(model, "tol", 1e-5)
set_optimizer_attribute(model, "silent", true)

@variable(model, -10 <= x <= 10)
@objective(model, Min, (x - 3)^2 + 1)
optimize!(model)

@test termination_status(model) == LOCALLY_SOLVED
@test value(x) ≈ 3.0 atol = 1e-3
@test objective_value(model) ≈ 1.0 atol = 1e-3
end

@testset "2D Rosenbrock-like" begin
model = Model(NLPModelsJuMP.Optimizer)
set_attribute(model, "solver", IntervalOptimiser)
set_attribute(model, "tol", 1e-3)
set_optimizer_attribute(model, "silent", true)

@variable(model, -5 <= x <= 5)
@variable(model, -5 <= y <= 5)
@objective(model, Min, (x - 1)^2 + (y - 2)^2)
optimize!(model)

@test termination_status(model) == LOCALLY_SOLVED
@test value(x) ≈ 1.0 atol = 1e-2
@test value(y) ≈ 2.0 atol = 1e-2
@test objective_value(model) ≈ 0.0 atol = 1e-2
end

@testset "Maximization" begin
model = Model(NLPModelsJuMP.Optimizer)
set_attribute(model, "solver", IntervalOptimiser)
set_attribute(model, "tol", 1e-5)
set_optimizer_attribute(model, "silent", true)

@variable(model, -2 <= x <= 2)
@objective(model, Max, -(x - 1)^2 + 5)
optimize!(model)

@test termination_status(model) == LOCALLY_SOLVED
@test value(x) ≈ 1.0 atol = 1e-3
@test objective_value(model) ≈ 5.0 atol = 1e-3
end

@testset "Nonlinear objective" begin
model = Model(NLPModelsJuMP.Optimizer)
set_attribute(model, "solver", IntervalOptimiser)
set_attribute(model, "tol", 1e-4)
set_optimizer_attribute(model, "silent", true)

@variable(model, 0.1 <= x <= 5)
@objective(model, Min, x - log(x))
optimize!(model)

# Minimum of x - log(x) is at x = 1, value = 1
@test termination_status(model) == LOCALLY_SOLVED
@test value(x) ≈ 1.0 atol = 1e-2
@test objective_value(model) ≈ 1.0 atol = 1e-2
end
end
3 changes: 2 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ using Test

include("sorted_vector.jl")
include("heaped_vector.jl")
include("optimise.jl")
include("optimise.jl")
include("nlp.jl")