Skip to content

Objective Sensitivity #282

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 14 commits into
base: master
Choose a base branch
from
31 changes: 28 additions & 3 deletions src/NonLinearProgram/NonLinearProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,12 @@ end
Base.@kwdef struct ForwCache
primal_Δs::Dict{MOI.VariableIndex,Float64} # Sensitivity for primal variables (indexed by VariableIndex)
dual_Δs::Vector{Float64} # Sensitivity for constraints and bounds (indexed by ConstraintIndex)
dual_p::Dict{MOI.ConstraintIndex,Float64} # Dual wrt parameters
end

Base.@kwdef struct ReverseCache
Δp::Vector{Float64} # Sensitivity for parameters
dual_p::Dict{MOI.ConstraintIndex,Float64} # Dual wrt parameters
end

# Define the form of the NLP
Expand Down Expand Up @@ -513,15 +515,21 @@ function DiffOpt.forward_differentiate!(model::Model; tol = 1e-6)
end

# Compute Jacobian
Δs = _compute_sensitivity(model; tol = tol)
Δs, df_dp = _compute_sensitivity(model; tol = tol)

# Extract primal and dual sensitivities
primal_Δs = Δs[1:length(model.cache.primal_vars), :] * Δp # Exclude slacks
dual_Δs = Δs[cache.index_duals, :] * Δp # Includes constraints and bounds

# Dual wrt parameters
varorder =
sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value)
dual_p = [df_dp[form.var2param[var_idx].value] for var_idx in varorder]

model.forw_grad_cache = ForwCache(;
primal_Δs = Dict(model.cache.primal_vars .=> primal_Δs),
dual_Δs = dual_Δs,
dual_p = Dict([form.var2ci[var_idx] for var_idx in varorder] .=> dual_p),
)
end
return nothing
Expand All @@ -533,7 +541,7 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6)
form = model.model

# Compute Jacobian
Δs = _compute_sensitivity(model; tol = tol)
Δs, df_dp = _compute_sensitivity(model; tol = tol)
num_primal = length(cache.primal_vars)
# Fetch primal sensitivities
Δx = zeros(num_primal)
Expand Down Expand Up @@ -576,7 +584,12 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6)
sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value)
Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder]

model.back_grad_cache = ReverseCache(; Δp = Δp)
# Dual wrt parameters
dual_p = [df_dp[form.var2param[var_idx].value] for var_idx in varorder]

model.back_grad_cache = ReverseCache(; Δp = Δp,
dual_p = Dict([form.var2ci[var_idx] for var_idx in varorder] .=> dual_p),
)
end
return nothing
end
Expand Down Expand Up @@ -610,4 +623,16 @@ function MOI.get(
return MOI.Parameter{T}(model.back_grad_cache.Δp[ci.value])
end

function MOI.get(
model::Model,
::MOI.ConstraintDual,
ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}},
) where {T}
if !isnothing(model.forw_grad_cache)
return model.forw_grad_cache.dual_p[ci]
else
return model.back_grad_cache.dual_p[ci]
end
end

end # module NonLinearProgram
16 changes: 14 additions & 2 deletions src/NonLinearProgram/nlp_utilities.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,13 @@ function _fill_off_diagonal(H::SparseMatrixCSC)
return ret
end

function _compute_gradient(model::Model)
evaluator = model.cache.evaluator
grad = zeros(length(model.x))
MOI.eval_objective_gradient(evaluator, grad, model.x)
return grad
end

"""
_compute_optimal_hessian(evaluator::MOI.Nonlinear.Evaluator, rows::Vector{JuMP.ConstraintRef}, x::Vector{JuMP.VariableRef})

Expand Down Expand Up @@ -104,7 +111,7 @@ function _create_evaluator(form::Form)
backend,
MOI.VariableIndex.(1:form.num_variables),
)
MOI.initialize(evaluator, [:Hess, :Jac])
MOI.initialize(evaluator, [:Hess, :Jac, :Grad])
return evaluator
end

Expand Down Expand Up @@ -496,5 +503,10 @@ function _compute_sensitivity(model::Model; tol = 1e-6)
∂s[num_w+num_cons+1:num_w+num_cons+num_lower, :] *= _sense_multiplier
# Dual bounds upper
∂s[num_w+num_cons+num_lower+1:end, :] *= -_sense_multiplier
return ∂s

# dual wrt parameter
primal_idx = [i.value for i in model.cache.primal_vars]
df_dx = _compute_gradient(model)[primal_idx]
df_dp = df_dx'∂s[1:num_vars, :]
return ∂s, df_dp
end
20 changes: 20 additions & 0 deletions src/moi_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -732,6 +732,26 @@ function MOI.get(
)
end

function MOI.get(
model::Optimizer,
attr::MOI.ConstraintDual,
ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}},
) where {T}
try
return MOI.get(
_checked_diff(model, attr, :forward_differentiate!),
attr,
model.index_map[ci],
)
catch
return MOI.get(
_checked_diff(model, attr, :reverse_differentiate!),
attr,
model.index_map[ci],
)
end
end

function MOI.supports(
::Optimizer,
::ReverseVariablePrimal,
Expand Down
4 changes: 4 additions & 0 deletions test/nlp_program.jl
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,8 @@ function test_analytical_simple(; P = 2) # Number of parameters
# Compute derivatives
DiffOpt.forward_differentiate!(m)

@test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8))

# Test sensitivities
@test_throws ErrorException MOI.get(
m.moi_backend.optimizer.model.diff.model,
Expand Down Expand Up @@ -736,6 +738,8 @@ function test_ReverseConstraintDual()
# Compute derivatives
DiffOpt.reverse_differentiate!(m)

@test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8))

# Test sensitivities ReverseConstraintSet
@test all(
isapprox(
Expand Down
Loading