From bf0d632d64d83873fe2749a4ef863008328c1c6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Wed, 15 Apr 2026 17:45:12 +0200 Subject: [PATCH 1/2] Add NLPModels and JuMP interfaces --- Project.toml | 17 ++- README.md | 31 ++++ ext/IntervalOptimisationNLPModelsJuMPExt.jl | 26 ++++ src/IntervalOptimisation.jl | 1 + src/nlp.jl | 156 ++++++++++++++++++++ test/Project.toml | 6 + test/nlp.jl | 69 +++++++++ test/runtests.jl | 3 +- 8 files changed, 307 insertions(+), 2 deletions(-) create mode 100644 ext/IntervalOptimisationNLPModelsJuMPExt.jl create mode 100644 src/nlp.jl create mode 100644 test/Project.toml create mode 100644 test/nlp.jl diff --git a/Project.toml b/Project.toml index 396b348..72ca6a2 100644 --- a/Project.toml +++ b/Project.toml @@ -4,13 +4,28 @@ version = "0.5.1" [deps] IntervalArithmetic = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253" +NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" +SolverCore = "ff4d7338-4cf1-434d-91df-b86cb86fb843" + +[weakdeps] +MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" +NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e" + +[extensions] +IntervalOptimisationNLPModelsJuMPExt = ["NLPModelsJuMP", "MathOptInterface"] [compat] IntervalArithmetic = "0.22 - 0.23, 1" +MathOptInterface = "1" +NLPModels = "0.21" +NLPModelsJuMP = "0.13" +SolverCore = "0.3" julia = "1.9" [extras] +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" +NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test"] +test = ["JuMP", "NLPModelsJuMP", "Test"] diff --git a/README.md b/README.md index 4b54a42..21fc10d 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,37 @@ julia> minimisers [[-4.74512e-09, 4.41017e-09]_com, [-4.74512e-09, 4.41017e-09]_com] ``` +#### JuMP + +IntervalOptimisation can be used as a solver through [JuMP](https://github.com/jump-dev/JuMP.jl) via +[NLPModelsJuMP](https://github.com/JuliaSmoothOptimizers/NLPModelsJuMP.jl): + +```julia +using JuMP, NLPModelsJuMP, IntervalOptimisation + +model = Model(NLPModelsJuMP.Optimizer) +set_attribute(model, "solver", IntervalOptimiser) +set_attribute(model, "tol", 1e-5) + +@variable(model, -10 <= x <= 10) +@objective(model, Min, (x - 3)^2 + 1) +optimize!(model) + +julia> value(x) +3.0000152587890625 + +julia> objective_value(model) +1.0000000002328306 +``` + +The full interval enclosure of the global minimum and the minimizer boxes are available +through solver-specific attributes: + +```julia +julia> get_attribute(model, RawStatusString()) # underlying solver status +"first-order stationary" +``` + ## References - *Validated Numerics: A Short Introduction to Rigorous Computations*, W. Tucker, Princeton University Press (2010) diff --git a/ext/IntervalOptimisationNLPModelsJuMPExt.jl b/ext/IntervalOptimisationNLPModelsJuMPExt.jl new file mode 100644 index 0000000..69ec061 --- /dev/null +++ b/ext/IntervalOptimisationNLPModelsJuMPExt.jl @@ -0,0 +1,26 @@ +module IntervalOptimisationNLPModelsJuMPExt + +using IntervalOptimisation +using NLPModelsJuMP: MathOptNLPModel +import MathOptInterface as MOI +import NLPModels + +function IntervalOptimisation._make_objective(nlp::MathOptNLPModel, nvar) + if nlp.obj.type == "NONLINEAR" + obj_expr = MOI.objective_expr(nlp.eval) + if nvar == 1 + return x -> IntervalOptimisation._eval_moi_expr(obj_expr, [x]) + else + return x -> IntervalOptimisation._eval_moi_expr(obj_expr, x) + end + else + # For LINEAR/QUADRATIC, NLPModels.obj works directly with intervals + if nvar == 1 + return x -> NLPModels.obj(nlp, [x]) + else + return x -> NLPModels.obj(nlp, x) + end + end +end + +end diff --git a/src/IntervalOptimisation.jl b/src/IntervalOptimisation.jl index 4fb2e93..ef77d8c 100644 --- a/src/IntervalOptimisation.jl +++ b/src/IntervalOptimisation.jl @@ -16,6 +16,7 @@ using .HeapedVectors using IntervalArithmetic include("optimise.jl") +include("nlp.jl") const minimize = minimise diff --git a/src/nlp.jl b/src/nlp.jl new file mode 100644 index 0000000..e3dfed2 --- /dev/null +++ b/src/nlp.jl @@ -0,0 +1,156 @@ +import NLPModels +import SolverCore + +export IntervalOptimiser + +""" + IntervalOptimiser <: SolverCore.AbstractOptimizationSolver + +A global optimization solver based on the Moore-Skelboe interval branch-and-bound algorithm. +Can be used with NLPModelsJuMP to solve JuMP models. + +# Usage with JuMP and NLPModelsJuMP + +```julia +using JuMP, NLPModelsJuMP, IntervalOptimisation + +model = Model(NLPModelsJuMP.Optimizer) +set_attribute(model, "solver", IntervalOptimiser) +set_attribute(model, "tol", 1e-5) + +@variable(model, -10 <= x <= 10) +@objective(model, Min, (x - 3)^2 + 1) +optimize!(model) +``` + +The solver stores the interval enclosure of the global minimum and the list of minimizer +boxes in the `solver_specific` fields `:global_min_interval` and `:minimizers`. +""" +mutable struct IntervalOptimiser <: SolverCore.AbstractOptimizationSolver +end + +function IntervalOptimiser(nlp::NLPModels.AbstractNLPModel; kwargs...) + return IntervalOptimiser() +end + +""" + _eval_moi_expr(expr, x) + +Recursively evaluate a Julia `Expr` returned by `MOI.objective_expr` using the +variable values in `x`. Works with any numeric type including intervals. +""" +function _eval_moi_expr(expr::Expr, x) + if expr.head == :ref && expr.args[1] == :x + # x[MOI.VariableIndex(i)] → x[i] + vi = expr.args[2] + return x[vi.value] + elseif expr.head == :call + op = expr.args[1] + evaluated_args = [_eval_moi_expr(a, x) for a in @view(expr.args[2:end])] + if op isa Symbol + return _call_op(Val(op), evaluated_args) + else + return op(evaluated_args...) + end + else + error("Unsupported expression head: $(expr.head)") + end +end + +_eval_moi_expr(v::Number, _) = v + +# Arithmetic +_call_op(::Val{:+}, args) = length(args) == 1 ? +args[1] : +(args...) +_call_op(::Val{:-}, args) = length(args) == 1 ? -args[1] : args[1] - args[2] +_call_op(::Val{:*}, args) = *(args...) +_call_op(::Val{:/}, args) = args[1] / args[2] +_call_op(::Val{:^}, args) = args[1] ^ args[2] +# Common math functions +_call_op(::Val{:log}, args) = log(args[1]) +_call_op(::Val{:log2}, args) = log2(args[1]) +_call_op(::Val{:log10}, args) = log10(args[1]) +_call_op(::Val{:exp}, args) = exp(args[1]) +_call_op(::Val{:sqrt}, args) = sqrt(args[1]) +_call_op(::Val{:abs}, args) = abs(args[1]) +_call_op(::Val{:sin}, args) = sin(args[1]) +_call_op(::Val{:cos}, args) = cos(args[1]) +_call_op(::Val{:tan}, args) = tan(args[1]) +_call_op(::Val{:asin}, args) = asin(args[1]) +_call_op(::Val{:acos}, args) = acos(args[1]) +_call_op(::Val{:atan}, args) = atan(args...) +_call_op(::Val{:min}, args) = min(args...) +_call_op(::Val{:max}, args) = max(args...) +# Fallback: try to find the function in Base/Main +_call_op(::Val{op}, args) where {op} = getfield(Base, op)(args...) + +function SolverCore.solve!( + solver::IntervalOptimiser, + nlp::NLPModels.AbstractNLPModel, + stats::SolverCore.GenericExecutionStats; + tol::Real = 1e-3, + structure = HeapedVector, + kwargs..., +) + start_time = time() + + # Only box-constrained or unconstrained problems are supported + if nlp.meta.ncon > 0 + SolverCore.set_status!(stats, :exception) + SolverCore.set_time!(stats, time() - start_time) + error("IntervalOptimiser only supports unconstrained or box-constrained problems") + end + + nvar = nlp.meta.nvar + lvar = nlp.meta.lvar + uvar = nlp.meta.uvar + + # Check that all variables have finite bounds + for i in 1:nvar + if !isfinite(lvar[i]) || !isfinite(uvar[i]) + SolverCore.set_status!(stats, :exception) + SolverCore.set_time!(stats, time() - start_time) + error("IntervalOptimiser requires finite bounds on all variables") + end + end + + # Build the interval search box + X = [interval(lvar[i], uvar[i]) for i in 1:nvar] + + # Create the objective function for interval evaluation + f = _make_objective(nlp, nvar) + + # Solve + dom = nvar == 1 ? X[1] : X + if nlp.meta.minimize + global_opt, optimizer_boxes = minimise(f, dom; structure = structure, tol = tol) + else + global_opt, optimizer_boxes = maximise(f, dom; structure = structure, tol = tol) + end + + # Extract solution (midpoint of the first minimizer box) + if nvar == 1 + solution = [mid(optimizer_boxes[1])] + else + solution = mid.(optimizer_boxes[1]) + end + objective = mid(global_opt) + + SolverCore.set_status!(stats, :first_order) + SolverCore.set_solution!(stats, solution) + SolverCore.set_objective!(stats, objective) + SolverCore.set_iter!(stats, length(optimizer_boxes)) + SolverCore.set_time!(stats, time() - start_time) + SolverCore.set_solver_specific!(stats, :global_min_interval, global_opt) + SolverCore.set_solver_specific!(stats, :minimizers, optimizer_boxes) + + return stats +end + +# Default: use NLPModels.obj directly (works for linear/quadratic objectives) +function _make_objective(nlp::NLPModels.AbstractNLPModel, nvar) + if nvar == 1 + return x -> NLPModels.obj(nlp, [x]) + else + return x -> NLPModels.obj(nlp, x) + end +end diff --git a/test/Project.toml b/test/Project.toml new file mode 100644 index 0000000..2cee810 --- /dev/null +++ b/test/Project.toml @@ -0,0 +1,6 @@ +[deps] +IntervalArithmetic = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253" +IntervalOptimisation = "c7c68f13-a4a2-5b9a-b424-07d005f8d9d2" +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" +NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/test/nlp.jl b/test/nlp.jl new file mode 100644 index 0000000..55be11f --- /dev/null +++ b/test/nlp.jl @@ -0,0 +1,69 @@ +using IntervalOptimisation +using JuMP +using NLPModelsJuMP +using Test + +@testset "NLPModels / JuMP integration" begin + @testset "1D quadratic minimization" begin + model = Model(NLPModelsJuMP.Optimizer) + set_attribute(model, "solver", IntervalOptimiser) + set_attribute(model, "tol", 1e-5) + set_optimizer_attribute(model, "silent", true) + + @variable(model, -10 <= x <= 10) + @objective(model, Min, (x - 3)^2 + 1) + optimize!(model) + + @test termination_status(model) == LOCALLY_SOLVED + @test value(x) ≈ 3.0 atol = 1e-3 + @test objective_value(model) ≈ 1.0 atol = 1e-3 + end + + @testset "2D Rosenbrock-like" begin + model = Model(NLPModelsJuMP.Optimizer) + set_attribute(model, "solver", IntervalOptimiser) + set_attribute(model, "tol", 1e-3) + set_optimizer_attribute(model, "silent", true) + + @variable(model, -5 <= x <= 5) + @variable(model, -5 <= y <= 5) + @objective(model, Min, (x - 1)^2 + (y - 2)^2) + optimize!(model) + + @test termination_status(model) == LOCALLY_SOLVED + @test value(x) ≈ 1.0 atol = 1e-2 + @test value(y) ≈ 2.0 atol = 1e-2 + @test objective_value(model) ≈ 0.0 atol = 1e-2 + end + + @testset "Maximization" begin + model = Model(NLPModelsJuMP.Optimizer) + set_attribute(model, "solver", IntervalOptimiser) + set_attribute(model, "tol", 1e-5) + set_optimizer_attribute(model, "silent", true) + + @variable(model, -2 <= x <= 2) + @objective(model, Max, -(x - 1)^2 + 5) + optimize!(model) + + @test termination_status(model) == LOCALLY_SOLVED + @test value(x) ≈ 1.0 atol = 1e-3 + @test objective_value(model) ≈ 5.0 atol = 1e-3 + end + + @testset "Nonlinear objective" begin + model = Model(NLPModelsJuMP.Optimizer) + set_attribute(model, "solver", IntervalOptimiser) + set_attribute(model, "tol", 1e-4) + set_optimizer_attribute(model, "silent", true) + + @variable(model, 0.1 <= x <= 5) + @objective(model, Min, x - log(x)) + optimize!(model) + + # Minimum of x - log(x) is at x = 1, value = 1 + @test termination_status(model) == LOCALLY_SOLVED + @test value(x) ≈ 1.0 atol = 1e-2 + @test objective_value(model) ≈ 1.0 atol = 1e-2 + end +end diff --git a/test/runtests.jl b/test/runtests.jl index e50374e..630078a 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -3,4 +3,5 @@ using Test include("sorted_vector.jl") include("heaped_vector.jl") -include("optimise.jl") \ No newline at end of file +include("optimise.jl") +include("nlp.jl") \ No newline at end of file From 4690886ee53fd2ee7e4791a63ef3893b0aea1cb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Thu, 16 Apr 2026 10:24:28 +0200 Subject: [PATCH 2/2] Fix --- src/HeapedVectors.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/HeapedVectors.jl b/src/HeapedVectors.jl index f01a1ea..e7bee3a 100644 --- a/src/HeapedVectors.jl +++ b/src/HeapedVectors.jl @@ -31,7 +31,7 @@ function heaping(v, by) end function floatup!(ar, index, by) - par = convert(Int, floor(index/2)) + par = div(index, 2) if index <= 1 return ar end