Skip to content

Commit bf0d632

Browse files
committed
Add NLPModels and JuMP interfaces
1 parent eb14c8f commit bf0d632

8 files changed

Lines changed: 307 additions & 2 deletions

File tree

Project.toml

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,28 @@ version = "0.5.1"
44

55
[deps]
66
IntervalArithmetic = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253"
7+
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
8+
SolverCore = "ff4d7338-4cf1-434d-91df-b86cb86fb843"
9+
10+
[weakdeps]
11+
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
12+
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
13+
14+
[extensions]
15+
IntervalOptimisationNLPModelsJuMPExt = ["NLPModelsJuMP", "MathOptInterface"]
716

817
[compat]
918
IntervalArithmetic = "0.22 - 0.23, 1"
19+
MathOptInterface = "1"
20+
NLPModels = "0.21"
21+
NLPModelsJuMP = "0.13"
22+
SolverCore = "0.3"
1023
julia = "1.9"
1124

1225
[extras]
26+
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
27+
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
1328
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
1429

1530
[targets]
16-
test = ["Test"]
31+
test = ["JuMP", "NLPModelsJuMP", "Test"]

README.md

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,37 @@ julia> minimisers
6464
[[-4.74512e-09, 4.41017e-09]_com, [-4.74512e-09, 4.41017e-09]_com]
6565
```
6666

67+
#### JuMP
68+
69+
IntervalOptimisation can be used as a solver through [JuMP](https://github.com/jump-dev/JuMP.jl) via
70+
[NLPModelsJuMP](https://github.com/JuliaSmoothOptimizers/NLPModelsJuMP.jl):
71+
72+
```julia
73+
using JuMP, NLPModelsJuMP, IntervalOptimisation
74+
75+
model = Model(NLPModelsJuMP.Optimizer)
76+
set_attribute(model, "solver", IntervalOptimiser)
77+
set_attribute(model, "tol", 1e-5)
78+
79+
@variable(model, -10 <= x <= 10)
80+
@objective(model, Min, (x - 3)^2 + 1)
81+
optimize!(model)
82+
83+
julia> value(x)
84+
3.0000152587890625
85+
86+
julia> objective_value(model)
87+
1.0000000002328306
88+
```
89+
90+
The full interval enclosure of the global minimum and the minimizer boxes are available
91+
through solver-specific attributes:
92+
93+
```julia
94+
julia> get_attribute(model, RawStatusString()) # underlying solver status
95+
"first-order stationary"
96+
```
97+
6798
## References
6899

69100
- *Validated Numerics: A Short Introduction to Rigorous Computations*, W. Tucker, Princeton University Press (2010)
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
module IntervalOptimisationNLPModelsJuMPExt
2+
3+
using IntervalOptimisation
4+
using NLPModelsJuMP: MathOptNLPModel
5+
import MathOptInterface as MOI
6+
import NLPModels
7+
8+
function IntervalOptimisation._make_objective(nlp::MathOptNLPModel, nvar)
9+
if nlp.obj.type == "NONLINEAR"
10+
obj_expr = MOI.objective_expr(nlp.eval)
11+
if nvar == 1
12+
return x -> IntervalOptimisation._eval_moi_expr(obj_expr, [x])
13+
else
14+
return x -> IntervalOptimisation._eval_moi_expr(obj_expr, x)
15+
end
16+
else
17+
# For LINEAR/QUADRATIC, NLPModels.obj works directly with intervals
18+
if nvar == 1
19+
return x -> NLPModels.obj(nlp, [x])
20+
else
21+
return x -> NLPModels.obj(nlp, x)
22+
end
23+
end
24+
end
25+
26+
end

src/IntervalOptimisation.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ using .HeapedVectors
1616
using IntervalArithmetic
1717

1818
include("optimise.jl")
19+
include("nlp.jl")
1920

2021

2122
const minimize = minimise

src/nlp.jl

Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,156 @@
1+
import NLPModels
2+
import SolverCore
3+
4+
export IntervalOptimiser
5+
6+
"""
7+
IntervalOptimiser <: SolverCore.AbstractOptimizationSolver
8+
9+
A global optimization solver based on the Moore-Skelboe interval branch-and-bound algorithm.
10+
Can be used with NLPModelsJuMP to solve JuMP models.
11+
12+
# Usage with JuMP and NLPModelsJuMP
13+
14+
```julia
15+
using JuMP, NLPModelsJuMP, IntervalOptimisation
16+
17+
model = Model(NLPModelsJuMP.Optimizer)
18+
set_attribute(model, "solver", IntervalOptimiser)
19+
set_attribute(model, "tol", 1e-5)
20+
21+
@variable(model, -10 <= x <= 10)
22+
@objective(model, Min, (x - 3)^2 + 1)
23+
optimize!(model)
24+
```
25+
26+
The solver stores the interval enclosure of the global minimum and the list of minimizer
27+
boxes in the `solver_specific` fields `:global_min_interval` and `:minimizers`.
28+
"""
29+
mutable struct IntervalOptimiser <: SolverCore.AbstractOptimizationSolver
30+
end
31+
32+
function IntervalOptimiser(nlp::NLPModels.AbstractNLPModel; kwargs...)
33+
return IntervalOptimiser()
34+
end
35+
36+
"""
37+
_eval_moi_expr(expr, x)
38+
39+
Recursively evaluate a Julia `Expr` returned by `MOI.objective_expr` using the
40+
variable values in `x`. Works with any numeric type including intervals.
41+
"""
42+
function _eval_moi_expr(expr::Expr, x)
43+
if expr.head == :ref && expr.args[1] == :x
44+
# x[MOI.VariableIndex(i)] → x[i]
45+
vi = expr.args[2]
46+
return x[vi.value]
47+
elseif expr.head == :call
48+
op = expr.args[1]
49+
evaluated_args = [_eval_moi_expr(a, x) for a in @view(expr.args[2:end])]
50+
if op isa Symbol
51+
return _call_op(Val(op), evaluated_args)
52+
else
53+
return op(evaluated_args...)
54+
end
55+
else
56+
error("Unsupported expression head: $(expr.head)")
57+
end
58+
end
59+
60+
_eval_moi_expr(v::Number, _) = v
61+
62+
# Arithmetic
63+
_call_op(::Val{:+}, args) = length(args) == 1 ? +args[1] : +(args...)
64+
_call_op(::Val{:-}, args) = length(args) == 1 ? -args[1] : args[1] - args[2]
65+
_call_op(::Val{:*}, args) = *(args...)
66+
_call_op(::Val{:/}, args) = args[1] / args[2]
67+
_call_op(::Val{:^}, args) = args[1] ^ args[2]
68+
# Common math functions
69+
_call_op(::Val{:log}, args) = log(args[1])
70+
_call_op(::Val{:log2}, args) = log2(args[1])
71+
_call_op(::Val{:log10}, args) = log10(args[1])
72+
_call_op(::Val{:exp}, args) = exp(args[1])
73+
_call_op(::Val{:sqrt}, args) = sqrt(args[1])
74+
_call_op(::Val{:abs}, args) = abs(args[1])
75+
_call_op(::Val{:sin}, args) = sin(args[1])
76+
_call_op(::Val{:cos}, args) = cos(args[1])
77+
_call_op(::Val{:tan}, args) = tan(args[1])
78+
_call_op(::Val{:asin}, args) = asin(args[1])
79+
_call_op(::Val{:acos}, args) = acos(args[1])
80+
_call_op(::Val{:atan}, args) = atan(args...)
81+
_call_op(::Val{:min}, args) = min(args...)
82+
_call_op(::Val{:max}, args) = max(args...)
83+
# Fallback: try to find the function in Base/Main
84+
_call_op(::Val{op}, args) where {op} = getfield(Base, op)(args...)
85+
86+
function SolverCore.solve!(
87+
solver::IntervalOptimiser,
88+
nlp::NLPModels.AbstractNLPModel,
89+
stats::SolverCore.GenericExecutionStats;
90+
tol::Real = 1e-3,
91+
structure = HeapedVector,
92+
kwargs...,
93+
)
94+
start_time = time()
95+
96+
# Only box-constrained or unconstrained problems are supported
97+
if nlp.meta.ncon > 0
98+
SolverCore.set_status!(stats, :exception)
99+
SolverCore.set_time!(stats, time() - start_time)
100+
error("IntervalOptimiser only supports unconstrained or box-constrained problems")
101+
end
102+
103+
nvar = nlp.meta.nvar
104+
lvar = nlp.meta.lvar
105+
uvar = nlp.meta.uvar
106+
107+
# Check that all variables have finite bounds
108+
for i in 1:nvar
109+
if !isfinite(lvar[i]) || !isfinite(uvar[i])
110+
SolverCore.set_status!(stats, :exception)
111+
SolverCore.set_time!(stats, time() - start_time)
112+
error("IntervalOptimiser requires finite bounds on all variables")
113+
end
114+
end
115+
116+
# Build the interval search box
117+
X = [interval(lvar[i], uvar[i]) for i in 1:nvar]
118+
119+
# Create the objective function for interval evaluation
120+
f = _make_objective(nlp, nvar)
121+
122+
# Solve
123+
dom = nvar == 1 ? X[1] : X
124+
if nlp.meta.minimize
125+
global_opt, optimizer_boxes = minimise(f, dom; structure = structure, tol = tol)
126+
else
127+
global_opt, optimizer_boxes = maximise(f, dom; structure = structure, tol = tol)
128+
end
129+
130+
# Extract solution (midpoint of the first minimizer box)
131+
if nvar == 1
132+
solution = [mid(optimizer_boxes[1])]
133+
else
134+
solution = mid.(optimizer_boxes[1])
135+
end
136+
objective = mid(global_opt)
137+
138+
SolverCore.set_status!(stats, :first_order)
139+
SolverCore.set_solution!(stats, solution)
140+
SolverCore.set_objective!(stats, objective)
141+
SolverCore.set_iter!(stats, length(optimizer_boxes))
142+
SolverCore.set_time!(stats, time() - start_time)
143+
SolverCore.set_solver_specific!(stats, :global_min_interval, global_opt)
144+
SolverCore.set_solver_specific!(stats, :minimizers, optimizer_boxes)
145+
146+
return stats
147+
end
148+
149+
# Default: use NLPModels.obj directly (works for linear/quadratic objectives)
150+
function _make_objective(nlp::NLPModels.AbstractNLPModel, nvar)
151+
if nvar == 1
152+
return x -> NLPModels.obj(nlp, [x])
153+
else
154+
return x -> NLPModels.obj(nlp, x)
155+
end
156+
end

test/Project.toml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[deps]
2+
IntervalArithmetic = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253"
3+
IntervalOptimisation = "c7c68f13-a4a2-5b9a-b424-07d005f8d9d2"
4+
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
5+
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
6+
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

test/nlp.jl

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
using IntervalOptimisation
2+
using JuMP
3+
using NLPModelsJuMP
4+
using Test
5+
6+
@testset "NLPModels / JuMP integration" begin
7+
@testset "1D quadratic minimization" begin
8+
model = Model(NLPModelsJuMP.Optimizer)
9+
set_attribute(model, "solver", IntervalOptimiser)
10+
set_attribute(model, "tol", 1e-5)
11+
set_optimizer_attribute(model, "silent", true)
12+
13+
@variable(model, -10 <= x <= 10)
14+
@objective(model, Min, (x - 3)^2 + 1)
15+
optimize!(model)
16+
17+
@test termination_status(model) == LOCALLY_SOLVED
18+
@test value(x) 3.0 atol = 1e-3
19+
@test objective_value(model) 1.0 atol = 1e-3
20+
end
21+
22+
@testset "2D Rosenbrock-like" begin
23+
model = Model(NLPModelsJuMP.Optimizer)
24+
set_attribute(model, "solver", IntervalOptimiser)
25+
set_attribute(model, "tol", 1e-3)
26+
set_optimizer_attribute(model, "silent", true)
27+
28+
@variable(model, -5 <= x <= 5)
29+
@variable(model, -5 <= y <= 5)
30+
@objective(model, Min, (x - 1)^2 + (y - 2)^2)
31+
optimize!(model)
32+
33+
@test termination_status(model) == LOCALLY_SOLVED
34+
@test value(x) 1.0 atol = 1e-2
35+
@test value(y) 2.0 atol = 1e-2
36+
@test objective_value(model) 0.0 atol = 1e-2
37+
end
38+
39+
@testset "Maximization" begin
40+
model = Model(NLPModelsJuMP.Optimizer)
41+
set_attribute(model, "solver", IntervalOptimiser)
42+
set_attribute(model, "tol", 1e-5)
43+
set_optimizer_attribute(model, "silent", true)
44+
45+
@variable(model, -2 <= x <= 2)
46+
@objective(model, Max, -(x - 1)^2 + 5)
47+
optimize!(model)
48+
49+
@test termination_status(model) == LOCALLY_SOLVED
50+
@test value(x) 1.0 atol = 1e-3
51+
@test objective_value(model) 5.0 atol = 1e-3
52+
end
53+
54+
@testset "Nonlinear objective" begin
55+
model = Model(NLPModelsJuMP.Optimizer)
56+
set_attribute(model, "solver", IntervalOptimiser)
57+
set_attribute(model, "tol", 1e-4)
58+
set_optimizer_attribute(model, "silent", true)
59+
60+
@variable(model, 0.1 <= x <= 5)
61+
@objective(model, Min, x - log(x))
62+
optimize!(model)
63+
64+
# Minimum of x - log(x) is at x = 1, value = 1
65+
@test termination_status(model) == LOCALLY_SOLVED
66+
@test value(x) 1.0 atol = 1e-2
67+
@test objective_value(model) 1.0 atol = 1e-2
68+
end
69+
end

test/runtests.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,5 @@ using Test
33

44
include("sorted_vector.jl")
55
include("heaped_vector.jl")
6-
include("optimise.jl")
6+
include("optimise.jl")
7+
include("nlp.jl")

0 commit comments

Comments
 (0)