Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions src/ConicProgram/ConicProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -451,14 +451,14 @@ function MOI.get(
end

function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
return error(
"ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
)
end

function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
return error(
"ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
)
end

Expand Down
8 changes: 4 additions & 4 deletions src/QuadraticProgram/QuadraticProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -502,14 +502,14 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver)
end

function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
return error(
"ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
)
end

function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
return error(
"ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
)
end

Expand Down
18 changes: 18 additions & 0 deletions src/jump_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -143,3 +143,21 @@ Get the value of a variable output sensitivity for forward mode.
function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef)
return MOI.get(model, ForwardVariablePrimal(), variable)
end

"""
set_reverse_objective(model::JuMP.Model, value::Number)

Set the value of the objective input sensitivity for reverse mode.
"""
function set_reverse_objective(model::JuMP.Model, value::Number)
return MOI.set(model, ReverseObjectiveSensitivity(), value)
end

"""
get_forward_objective(model::JuMP.Model)

Get the value of the objective output sensitivity for forward mode.
"""
function get_forward_objective(model::JuMP.Model)
return MOI.get(model, ForwardObjectiveSensitivity())
end
92 changes: 90 additions & 2 deletions src/moi_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -574,11 +574,58 @@ function reverse_differentiate!(model::Optimizer)
MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value)
end
if !iszero(model.input_cache.dobj)
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
if !isempty(model.input_cache.dx)
error(
"Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.",
)
end
try
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
catch e
if e isa MOI.UnsupportedAttribute
_fallback_set_reverse_objective_sensitivity(
model,
model.input_cache.dobj,
)
else
rethrow(e)
end
end
end
return reverse_differentiate!(diff)
end

function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val)
diff = _diff(model)
obj_type = MOI.get(model, MOI.ObjectiveFunctionType())
obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}())
for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func)
df_dx = MOI.Nonlinear.SymbolicAD.simplify!(
MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi),
)
if iszero(df_dx)
continue
end
dd = 0.0
if df_dx isa Number
dd = df_dx * val
elseif df_dx isa MOI.ScalarAffineFunction{Float64}
for term in df_dx.terms
xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable)
dd += term.coefficient * xj_val * val
end
dd += df_dx * val
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't this line be removed ?

else
error(
Copy link
Member

@blegat blegat Dec 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you use

elseif df_dx isa MOI.AbstractFunction
    MOI.Utilities.eval_variables(variable -> MOI.get(model, MOI.VariablePrimal(), term.variable), model, df_dx) * val
end

Then it will also work for nonlinear

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But it will be quite inefficient to use that for loop for nonlinear. In that case, we should do reverse diff instead. But then, if the code only works for quadratic, we probably shouldn't do MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi) and we should directly specialize it for the ScalarQuadraticFunction looping over the quadratic and linear terms.

"Cannot compute forward objective sensitivity fallback: " *
"unsupported derivative found.",
)
end
MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], dd)
end
return
end

function _copy_forward_in_constraint(diff, index_map, con_map, constraints)
for (index, value) in constraints
MOI.set(
Expand Down Expand Up @@ -830,7 +877,48 @@ function MOI.get(
end

function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity)
return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr)
diff_model = _checked_diff(model, attr, :forward_differentiate!)
val = 0.0
try
val = MOI.get(diff_model, attr)
catch e
if e isa MOI.UnsupportedAttribute
val = _fallback_get_forward_objective_sensitivity(model)
else
rethrow(e)
end
end
return val
end

function _fallback_get_forward_objective_sensitivity(model::Optimizer)
ret = 0.0
obj_type = MOI.get(model, MOI.ObjectiveFunctionType())
obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}())
for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func)
df_dx = MOI.Nonlinear.SymbolicAD.simplify!(
MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi),
)
if iszero(df_dx)
continue
end
dx_dp = MOI.get(model, ForwardVariablePrimal(), xi)
if df_dx isa Number
ret += df_dx * dx_dp
elseif df_dx isa MOI.ScalarAffineFunction{Float64}
for term in df_dx.terms
xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable)
ret += term.coefficient * xj_val * dx_dp
end
ret += df_dx.constant * dx_dp
else
error(
"Cannot compute forward objective sensitivity fallback: " *
"unsupported derivative found.",
)
end
end
return ret
end

function MOI.supports(
Expand Down
141 changes: 141 additions & 0 deletions test/jump_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,147 @@ function runtests()
return
end

function test_obj_simple()
for (MODEL, SOLVER) in [
(DiffOpt.diff_model, HiGHS.Optimizer),
(DiffOpt.diff_model, SCS.Optimizer),
(DiffOpt.diff_model, Ipopt.Optimizer),
],
sign in [+1, -1],
sign_p in [-1, +1],
sense in [:Min, :Max],
with_bridge_type in [Float64, nothing]

if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer
continue
end

@testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin
model = MODEL(SOLVER; with_bridge_type)
set_silent(model)

p_val = 4.0
@variable(model, x)
@variable(model, p in Parameter(p_val))
@constraint(model, con, x == 3 * sign_p * p)
@objective(model, Min, 2 * sign * x)
if sense == :Max
@objective(model, Max, 2 * sign * x)
end
optimize!(model)
@test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL

DiffOpt.empty_input_sensitivities!(model)
direction_obj = 2.0
DiffOpt.set_reverse_objective(model, direction_obj)
DiffOpt.reverse_differentiate!(model)
@test DiffOpt.get_reverse_parameter(model, p) ≈
sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL

DiffOpt.empty_input_sensitivities!(model)
direction_p = 3.0
DiffOpt.set_forward_parameter(model, p, direction_p)
DiffOpt.forward_differentiate!(model)
@test DiffOpt.get_forward_objective(model) ≈
sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL
end
end

return
end

function test_obj()
for (MODEL, SOLVER) in [
(DiffOpt.diff_model, HiGHS.Optimizer),
(DiffOpt.diff_model, SCS.Optimizer),
(DiffOpt.diff_model, Ipopt.Optimizer),
(DiffOpt.quadratic_diff_model, HiGHS.Optimizer),
(DiffOpt.quadratic_diff_model, SCS.Optimizer),
(DiffOpt.quadratic_diff_model, Ipopt.Optimizer),
(DiffOpt.conic_diff_model, HiGHS.Optimizer),
(DiffOpt.conic_diff_model, SCS.Optimizer),
(DiffOpt.conic_diff_model, Ipopt.Optimizer),
(DiffOpt.nonlinear_diff_model, HiGHS.Optimizer),
(DiffOpt.nonlinear_diff_model, SCS.Optimizer),
(DiffOpt.nonlinear_diff_model, Ipopt.Optimizer),
],
ineq in [true, false],
_min in [true, false],
flip in [true, false],
with_bridge_type in [Float64, nothing]

if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer
continue
end

@testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin
model = MODEL(SOLVER; with_bridge_type)
set_silent(model)

p_val = 4.0
pc_val = 2.0
@variable(model, x)
@variable(model, p in Parameter(p_val))
@variable(model, pc in Parameter(pc_val))
if ineq
if !flip
cons = @constraint(model, con, pc * x >= 3 * p)
else
cons = @constraint(model, con, pc * x <= 3 * p)
end
else
cons = @constraint(model, con, pc * x == 3 * p)
end

for obj_coef in [2, 5]
sign = flip ? -1 : 1
dir = _min ? 1 : -1
if _min
@objective(model, Min, dir * obj_coef * x * sign)
else
@objective(model, Max, dir * obj_coef * x * sign)
end

optimize!(model)
@test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL

DiffOpt.empty_input_sensitivities!(model)
direction_obj = 2.0
DiffOpt.set_reverse_objective(model, direction_obj)
DiffOpt.reverse_differentiate!(model)
@test DiffOpt.get_reverse_parameter(model, p) ≈
dir * sign * obj_coef * direction_obj * 3 / pc_val atol =
ATOL rtol = RTOL
@test DiffOpt.get_reverse_parameter(model, pc) ≈
- dir * sign * obj_coef * direction_obj * 3 * p_val /
(pc_val^2) atol = ATOL rtol = RTOL

DiffOpt.empty_input_sensitivities!(model)
direction_p = 3.0
DiffOpt.set_forward_parameter(model, p, direction_p)
DiffOpt.forward_differentiate!(model)
@test DiffOpt.get_forward_objective(model) ≈
dir * sign * obj_coef * direction_p * 3 / pc_val atol =
ATOL rtol = RTOL

# stop differentiating with respect to p
DiffOpt.empty_input_sensitivities!(model)
# differentiate w.r.t. pc
direction_pc = 10.0
DiffOpt.set_forward_parameter(model, pc, direction_pc)
DiffOpt.forward_differentiate!(model)
@test DiffOpt.get_forward_objective(model) ≈
- dir * sign * obj_coef * direction_pc * 3 * p_val /
pc_val^2 atol = ATOL rtol = RTOL
end
end
end

return
end

# TODO test quadratic obj

function test_jump_api()
for (MODEL, SOLVER) in [
(DiffOpt.diff_model, HiGHS.Optimizer),
Expand Down
Loading