From a6ae41e2bc5aa40edcdbe073f07d5195ad6fcb78 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 2 Jun 2025 12:18:48 +0530 Subject: [PATCH 01/17] Create ode.md Docs for first draft of OptimizationODE.jl --- docs/src/optimization_packages/ode.md | 59 +++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 docs/src/optimization_packages/ode.md diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md new file mode 100644 index 000000000..15e1746ed --- /dev/null +++ b/docs/src/optimization_packages/ode.md @@ -0,0 +1,59 @@ +# OptimizationODE.jl + +**OptimizationODE.jl** provides ODE-based optimization methods as a solver plugin for [SciML's Optimization.jl](https://github.com/SciML/Optimization.jl). It wraps various ODE solvers to perform gradient-based optimization using continuous-time dynamics. + +## Installation + +```julia +using Pkg +Pkg.add(url="OptimizationODE.jl") +``` + +## Usage + +```julia +using OptimizationODE, Optimization, ADTypes, SciMLBase + +function f(x, p) + return sum(abs2, x) +end + +function g!(g, x, p) + @. g = 2 * x +end + +x0 = [2.0, -3.0] +p = [] + +f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad = g!) +prob_manual = OptimizationProblem(f_manual, x0) + +opt = ODEGradientDescent(dt=0.01) +sol = solve(prob_manual, opt; maxiters=50_000) + +@show sol.u +@show sol.objective +``` + +## Available Optimizers + +* `ODEGradientDescent(dt=...)` — uses the explicit Euler method. +* `RKChebyshevDescent()` — uses the ROCK2 method. +* `RKAccelerated()` — uses the Tsit5 Runge-Kutta method. +* `HighOrderDescent()` — uses the Vern7 high-order Runge-Kutta method. + +## Interface Details + +All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). + +### Keyword Arguments + +* `dt` — time step size (only for `ODEGradientDescent`). +* `maxiters` — maximum number of ODE steps. +* `callback` — function to observe progress. +* `progress=true` — enables live progress display. + +## Development + +Please refer to the `runtests.jl` file for a complete set of tests that demonstrate how each optimizer is used. + From 99d06eff4e7f4718c38f093091974026da846a56 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 2 Jun 2025 16:41:14 +0530 Subject: [PATCH 02/17] Update docs/src/optimization_packages/ode.md Co-authored-by: Christopher Rackauckas --- docs/src/optimization_packages/ode.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index 15e1746ed..152bb5e40 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -53,7 +53,3 @@ All optimizers require gradient information (either via automatic differentiatio * `callback` — function to observe progress. * `progress=true` — enables live progress display. -## Development - -Please refer to the `runtests.jl` file for a complete set of tests that demonstrate how each optimizer is used. - From b7e29271e6023ddb58877e3c77e022a7ee2ed88c Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 2 Jun 2025 16:49:29 +0530 Subject: [PATCH 03/17] Update ode.md Method descriptions for the solvers added. --- docs/src/optimization_packages/ode.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index 152bb5e40..f80568483 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -37,14 +37,21 @@ sol = solve(prob_manual, opt; maxiters=50_000) ## Available Optimizers -* `ODEGradientDescent(dt=...)` — uses the explicit Euler method. -* `RKChebyshevDescent()` — uses the ROCK2 method. -* `RKAccelerated()` — uses the Tsit5 Runge-Kutta method. -* `HighOrderDescent()` — uses the Vern7 high-order Runge-Kutta method. +All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: + +* `ODEGradientDescent(dt=...)` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems. + +* `RKChebyshevDescent()` — uses the ROCK2 solver, a stabilized explicit Runge-Kutta method suitable for stiff problems. It allows larger step sizes while maintaining stability. + +* `RKAccelerated()` — leverages the Tsit5 method, a 5th-order Runge-Kutta solver that achieves faster convergence for smooth problems by improving integration accuracy. + +* `HighOrderDescent()` — applies Vern7, a high-order (7th-order) explicit Runge-Kutta method for even more accurate integration. This can be beneficial for problems requiring high precision. + +You can also define a custom optimizer using the generic `ODEOptimizer(solver; dt=nothing)` constructor by supplying any ODE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/). ## Interface Details -All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). +All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). The optimization is performed by integrating the ODE defined by the negative gradient until a steady state is reached. ### Keyword Arguments From 36be3e80d4cc0bfb980dde0eb85d48ef4e06905f Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 23 Jun 2025 01:25:43 +0530 Subject: [PATCH 04/17] Update ode.md Updated docs. --- docs/src/optimization_packages/ode.md | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index f80568483..d36ba8b84 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -35,7 +35,7 @@ sol = solve(prob_manual, opt; maxiters=50_000) @show sol.objective ``` -## Available Optimizers +## Local-gradient based Optimizers All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: @@ -53,10 +53,3 @@ You can also define a custom optimizer using the generic `ODEOptimizer(solver; d All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). The optimization is performed by integrating the ODE defined by the negative gradient until a steady state is reached. -### Keyword Arguments - -* `dt` — time step size (only for `ODEGradientDescent`). -* `maxiters` — maximum number of ODE steps. -* `callback` — function to observe progress. -* `progress=true` — enables live progress display. - From 1cd41ca1b33d9d069b63c2de0fa2157238bde8eb Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 23 Jun 2025 01:30:28 +0530 Subject: [PATCH 05/17] Update ode.md updated docs --- docs/src/optimization_packages/ode.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index d36ba8b84..0144ac301 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -35,7 +35,7 @@ sol = solve(prob_manual, opt; maxiters=50_000) @show sol.objective ``` -## Local-gradient based Optimizers +## Local Gradient-based Optimizers All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: From 73851942ed9447724d5d7dad4906aef530acef88 Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Tue, 24 Jun 2025 02:01:08 +0530 Subject: [PATCH 06/17] DAE optimizers added to OptimizationODE --- lib/OptimizationODE/src/OptimizationODE.jl | 345 ++++++++++++++++++--- lib/OptimizationODE/test/runtests.jl | 300 ++++++++++++++++-- 2 files changed, 576 insertions(+), 69 deletions(-) diff --git a/lib/OptimizationODE/src/OptimizationODE.jl b/lib/OptimizationODE/src/OptimizationODE.jl index ffacdc20a..36a8b02d2 100644 --- a/lib/OptimizationODE/src/OptimizationODE.jl +++ b/lib/OptimizationODE/src/OptimizationODE.jl @@ -2,58 +2,189 @@ module OptimizationODE using Reexport @reexport using Optimization, SciMLBase -using OrdinaryDiffEq, SteadyStateDiffEq +using LinearAlgebra, ForwardDiff + +using NonlinearSolve +using OrdinaryDiffEq, DifferentialEquations, SteadyStateDiffEq, Sundials export ODEOptimizer, ODEGradientDescent, RKChebyshevDescent, RKAccelerated, HighOrderDescent +export DAEOptimizer, DAEMassMatrix, DAEIndexing + +struct ODEOptimizer{T} + solver::T +end + +ODEGradientDescent() = ODEOptimizer(Euler()) +RKChebyshevDescent() = ODEOptimizer(ROCK2()) +RKAccelerated() = ODEOptimizer(Tsit5()) +HighOrderDescent() = ODEOptimizer(Vern7()) -struct ODEOptimizer{T, T2} +struct DAEOptimizer{T} solver::T - dt::T2 end -ODEOptimizer(solver ; dt=nothing) = ODEOptimizer(solver, dt) -# Solver Constructors (users call these) -ODEGradientDescent(; dt) = ODEOptimizer(Euler(); dt) -RKChebyshevDescent() = ODEOptimizer(ROCK2()) -RKAccelerated() = ODEOptimizer(Tsit5()) -HighOrderDescent() = ODEOptimizer(Vern7()) +DAEMassMatrix() = DAEOptimizer(Rodas5()) +DAEIndexing() = DAEOptimizer(IDA()) -SciMLBase.requiresbounds(::ODEOptimizer) = false -SciMLBase.allowsbounds(::ODEOptimizer) = false -SciMLBase.allowscallback(::ODEOptimizer) = true +SciMLBase.requiresbounds(::ODEOptimizer) = false +SciMLBase.allowsbounds(::ODEOptimizer) = false +SciMLBase.allowscallback(::ODEOptimizer) = true SciMLBase.supports_opt_cache_interface(::ODEOptimizer) = true -SciMLBase.requiresgradient(::ODEOptimizer) = true -SciMLBase.requireshessian(::ODEOptimizer) = false -SciMLBase.requiresconsjac(::ODEOptimizer) = false -SciMLBase.requiresconshess(::ODEOptimizer) = false +SciMLBase.requiresgradient(::ODEOptimizer) = true +SciMLBase.requireshessian(::ODEOptimizer) = false +SciMLBase.requiresconsjac(::ODEOptimizer) = false +SciMLBase.requiresconshess(::ODEOptimizer) = false + + +SciMLBase.requiresbounds(::DAEOptimizer) = false +SciMLBase.allowsbounds(::DAEOptimizer) = false +SciMLBase.allowsconstraints(::DAEOptimizer) = true +SciMLBase.allowscallback(::DAEOptimizer) = true +SciMLBase.supports_opt_cache_interface(::DAEOptimizer) = true +SciMLBase.requiresgradient(::DAEOptimizer) = true +SciMLBase.requireshessian(::DAEOptimizer) = false +SciMLBase.requiresconsjac(::DAEOptimizer) = true +SciMLBase.requiresconshess(::DAEOptimizer) = false function SciMLBase.__init(prob::OptimizationProblem, opt::ODEOptimizer; - callback=Optimization.DEFAULT_CALLBACK, progress=false, + callback=Optimization.DEFAULT_CALLBACK, progress=false, dt=nothing, maxiters=nothing, kwargs...) - - return OptimizationCache(prob, opt; callback=callback, progress=progress, + return OptimizationCache(prob, opt; callback=callback, progress=progress, dt=dt, maxiters=maxiters, kwargs...) end -function SciMLBase.__solve( - cache::OptimizationCache{F,RC,LB,UB,LC,UC,S,O,D,P,C} - ) where {F,RC,LB,UB,LC,UC,S,O<:ODEOptimizer,D,P,C} +function SciMLBase.__init(prob::OptimizationProblem, opt::DAEOptimizer; + callback=Optimization.DEFAULT_CALLBACK, progress=false, dt=nothing, + maxiters=nothing, differential_vars=nothing, kwargs...) + return OptimizationCache(prob, opt; callback=callback, progress=progress, dt=dt, + maxiters=maxiters, differential_vars=differential_vars, kwargs...) +end + + +function solve_constrained_root(cache, u0, p) + n = length(u0) + cons_vals = cache.f.cons(u0, p) + m = length(cons_vals) + function resid!(res, u) + temp = similar(u) + f_mass!(temp, u, p, 0.0) + res .= temp + end + u0_ext = vcat(u0, zeros(m)) + prob_nl = NonlinearProblem(resid!, u0_ext, p) + sol_nl = solve(prob_nl, Newton(); tol = 1e-8, maxiters = 100000, + callback = cache.callback, progress = get(cache.solver_args, :progress, false)) + u_ext = sol_nl.u + return u_ext[1:n], sol_nl.retcode +end + + +function get_solver_type(opt::DAEOptimizer) + if opt.solver isa Union{Rodas5, RadauIIA5, ImplicitEuler, Trapezoid} + return :mass_matrix + else + return :indexing + end +end - dt = cache.opt.dt - maxit = get(cache.solver_args, :maxiters, 1000) +function handle_parameters(p) + if p isa SciMLBase.NullParameters + return Float64[] + else + return p + end +end + +function setup_progress_callback(cache, solve_kwargs) + if get(cache.solver_args, :progress, false) + condition = (u, t, integrator) -> true + affect! = (integrator) -> begin + u_opt = integrator.u isa AbstractArray ? integrator.u : integrator.u.u + cache.solver_args[:callback](u_opt, integrator.p, integrator.t) + end + cb = DiscreteCallback(condition, affect!) + solve_kwargs[:callback] = cb + end + return solve_kwargs +end +function finite_difference_jacobian(f, x; ϵ = 1e-8) + n = length(x) + fx = f(x) + if fx === nothing + return zeros(eltype(x), 0, n) + elseif isa(fx, Number) + J = zeros(eltype(fx), 1, n) + for j in 1:n + xj = copy(x) + xj[j] += ϵ + diff = f(xj) + if diff === nothing + diffval = zero(eltype(fx)) + else + diffval = diff - fx + end + J[1, j] = diffval / ϵ + end + return J + else + m = length(fx) + J = zeros(eltype(fx), m, n) + for j in 1:n + xj = copy(x) + xj[j] += ϵ + fxj = f(xj) + if fxj === nothing + @inbounds for i in 1:m + J[i, j] = -fx[i] / ϵ + end + else + @inbounds for i in 1:m + J[i, j] = (fxj[i] - fx[i]) / ϵ + end + end + end + return J + end +end + +function SciMLBase.__solve( + cache::OptimizationCache{F,RC,LB,UB,LC,UC,S,O,D,P,C} + ) where {F,RC,LB,UB,LC,UC,S,O<:Union{ODEOptimizer,DAEOptimizer},D,P,C} + + dt = get(cache.solver_args, :dt, nothing) + maxit = get(cache.solver_args, :maxiters, nothing) + differential_vars = get(cache.solver_args, :differential_vars, nothing) u0 = copy(cache.u0) - p = cache.p + p = handle_parameters(cache.p) # Properly handle NullParameters + if cache.opt isa ODEOptimizer + return solve_ode(cache, dt, maxit, u0, p) + else + solver_method = get_solver_type(cache.opt) + if solver_method == :mass_matrix + return solve_dae_mass_matrix(cache, dt, maxit, u0, p) + else + return solve_dae_indexing(cache, dt, maxit, u0, p, differential_vars) + end + end +end + +function solve_ode(cache, dt, maxit, u0, p) if cache.f.grad === nothing error("ODEOptimizer requires a gradient. Please provide a function with `grad` defined.") end function f!(du, u, p, t) - cache.f.grad(du, u, p) - @. du = -du + grad_vec = similar(u) + if isempty(p) + cache.f.grad(grad_vec, u) + else + cache.f.grad(grad_vec, u, p) + end + @. du = -grad_vec return nothing end @@ -62,14 +193,11 @@ function SciMLBase.__solve( algorithm = DynamicSS(cache.opt.solver) cb = cache.callback - if cb != Optimization.DEFAULT_CALLBACK || get(cache.solver_args,:progress,false) === true - function condition(u, t, integrator) - true - end + if cb != Optimization.DEFAULT_CALLBACK || get(cache.solver_args,:progress,false) + function condition(u, t, integrator) true end function affect!(integrator) u_now = integrator.u - state = Optimization.OptimizationState(u=u_now, objective=cache.f(integrator.u, integrator.p)) - Optimization.callback_function(cb, state) + cache.callback(u_now, integrator.p, integrator.t) end cb_struct = DiscreteCallback(condition, affect!) callback = CallbackSet(cb_struct) @@ -86,16 +214,16 @@ function SciMLBase.__solve( end sol = solve(ss_prob, algorithm; solve_kwargs...) -has_destats = hasproperty(sol, :destats) -has_t = hasproperty(sol, :t) && !isempty(sol.t) + has_destats = hasproperty(sol, :destats) + has_t = hasproperty(sol, :t) && !isempty(sol.t) -stats = Optimization.OptimizationStats( - iterations = has_destats ? get(sol.destats, :iters, 10) : (has_t ? length(sol.t) - 1 : 10), - time = has_t ? sol.t[end] : 0.0, - fevals = has_destats ? get(sol.destats, :f_calls, 0) : 0, - gevals = has_destats ? get(sol.destats, :iters, 0) : 0, - hevals = 0 -) + stats = Optimization.OptimizationStats( + iterations = has_destats ? get(sol.destats, :iters, 10) : (has_t ? length(sol.t) - 1 : 10), + time = has_t ? sol.t[end] : 0.0, + fevals = has_destats ? get(sol.destats, :f_calls, 0) : 0, + gevals = has_destats ? get(sol.destats, :iters, 0) : 0, + hevals = 0 + ) SciMLBase.build_solution(cache, cache.opt, sol.u, cache.f(sol.u, p); retcode = ReturnCode.Success, @@ -103,4 +231,137 @@ stats = Optimization.OptimizationStats( ) end +function solve_dae_mass_matrix(cache, dt, maxit, u0, p) + if cache.f.cons === nothing + return solve_ode(cache, dt, maxit, u0, p) + end + x=u0 + cons_vals = cache.f.cons(x, p) + n = length(u0) + m = length(cons_vals) + u0_extended = vcat(u0, zeros(m)) + M = zeros(n + m, n + m) + M[1:n, 1:n] = I(n) + + function f_mass!(du, u, p_, t) + x = @view u[1:n] + λ = @view u[n+1:end] + grad_f = similar(x) + if cache.f.grad !== nothing + cache.f.grad(grad_f, x, p_) + else + grad_f .= ForwardDiff.gradient(z -> cache.f.f(z, p_), x) + end + J = Matrix{eltype(x)}(undef, m, n) + if cache.f.cons_j !== nothing + cache.f.cons_j(J, x) + else + J .= finite_difference_jacobian(z -> cache.f.cons(z, p_), x) + end + @. du[1:n] = -grad_f - (J' * λ) + consv = cache.f.cons(x, p_) + if consv === nothing + fill!(du[n+1:end], zero(eltype(x))) + else + if isa(consv, Number) + @assert m == 1 + du[n+1] = consv + else + @assert length(consv) == m + @. du[n+1:end] = consv + end + end + return nothing + end + + if m == 0 + optf = ODEFunction(f_mass!, mass_matrix = I(n)) + prob = ODEProblem(optf, u0, (0.0, 1.0), p) + return solve(prob, HighOrderDescent(); dt=dt, maxiters=maxit) + end + + ss_prob = SteadyStateProblem(ODEFunction(f_mass!, mass_matrix = M), u0_extended, p) + + solve_kwargs = setup_progress_callback(cache, Dict()) + if maxit !== nothing; solve_kwargs[:maxiters] = maxit; end + if dt !== nothing; solve_kwargs[:dt] = dt; end + + sol = solve(ss_prob, DynamicSS(cache.opt.solver); solve_kwargs...) + # if sol.retcode ≠ ReturnCode.Success + # # you may still accept Default or warn + # end + u_ext = sol.u + u_final = u_ext[1:n] + return SciMLBase.build_solution(cache, cache.opt, u_final, cache.f(u_final, p); + retcode = sol.retcode) end + + +function solve_dae_indexing(cache, dt, maxit, u0, p, differential_vars) + if cache.f.cons === nothing + return solve_ode(cache, dt, maxit, u0, p) + end + x=u0 + cons_vals = cache.f.cons(x, p) + n = length(u0) + m = length(cons_vals) + u0_ext = vcat(u0, zeros(m)) + du0_ext = zeros(n + m) + + if differential_vars === nothing + differential_vars = vcat(fill(true, n), fill(false, m)) + else + if length(differential_vars) == n + differential_vars = vcat(differential_vars, fill(false, m)) + elseif length(differential_vars) == n + m + # use as is + else + error("differential_vars length must be number of variables ($n) or extended size ($(n+m))") + end + end + + function dae_residual!(res, du, u, p_, t) + x = @view u[1:n] + λ = @view u[n+1:end] + du_x = @view du[1:n] + grad_f = similar(x) + cache.f.grad(grad_f, x, p_) + J = zeros(m, n) + if cache.f.cons_j !== nothing + cache.f.cons_j(J, x) + else + J .= finite_difference_jacobian(z -> cache.f.cons(z,p_), x) + end + @. res[1:n] = du_x + grad_f + J' * λ + consv = cache.f.cons(x, p_) + @. res[n+1:end] = consv + return nothing + end + + if m == 0 + optf = ODEFunction(dae_residual!, differential_vars = differential_vars) + prob = ODEProblem(optf, du0_ext, (0.0, 1.0), p) + return solve(prob, HighOrderDescent(); dt=dt, maxiters=maxit) + end + + tspan = (0.0, 10.0) + prob = DAEProblem(dae_residual!, du0_ext, u0_ext, tspan, p; + differential_vars = differential_vars) + + solve_kwargs = setup_progress_callback(cache, Dict()) + if maxit !== nothing; solve_kwargs[:maxiters] = maxit; end + if dt !== nothing; solve_kwargs[:dt] = dt; end + if hasfield(typeof(cache.opt.solver), :initializealg) + solve_kwargs[:initializealg] = BrownFullBasicInit() + end + + sol = solve(prob, cache.opt.solver; solve_kwargs...) + u_ext = sol.u + u_final = u_ext[end][1:n] + + return SciMLBase.build_solution(cache, cache.opt, u_final, cache.f(u_final, p); + retcode = sol.retcode) +end + + +end \ No newline at end of file diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl index f3d8a18c0..11978911c 100644 --- a/lib/OptimizationODE/test/runtests.jl +++ b/lib/OptimizationODE/test/runtests.jl @@ -1,44 +1,290 @@ using Test -using OptimizationODE, SciMLBase, ADTypes +using OptimizationODE +using Optimization +using LinearAlgebra, ForwardDiff +using OrdinaryDiffEq, DifferentialEquations, SteadyStateDiffEq, Sundials -@testset "OptimizationODE Tests" begin +# Test helper functions +function rosenbrock(x, p,args...) + return (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 +end + +function rosenbrock_grad!(grad, x, p,args...) + grad[1] = -2.0 * (p[1] - x[1]) - 4.0 * p[2] * (x[2] - x[1]^2) * x[1] + grad[2] = 2.0 * p[2] * (x[2] - x[1]^2) +end + +function quadratic(x, p,args...) + return (x[1] - p[1])^2 + (x[2] - p[2])^2 +end + +function quadratic_grad!(grad, x, p,args...) + grad[1] = 2.0 * (x[1] - p[1]) + grad[2] = 2.0 * (x[2] - p[2]) +end - function f(x, p) - return sum(abs2, x) +# Constrained optimization problem +function constrained_objective(x, p,args...) + return x[1]^2 + x[2]^2 +end + +function constrained_objective_grad!(grad, x, p,args...) + grad[1] = 2.0 * x[1] + grad[2] = 2.0 * x[2] +end + +function constraint_func(res, x, p,args...) + res[1] = x[1] + x[2] - 1.0 # x[1] + x[2] = 1 + return x[1] + x[2] - 1.0 +end + +function constraint_jac!(jac, x, p,args...) + jac[1, 1] = 1.0 + jac[1, 2] = -1.0 +end + +@testset "OptimizationODE.jl Tests" begin + + + @testset "Basic Unconstrained Optimization" begin + @testset "Quadratic Function - ODE Optimizers" begin + x0 = [2.0, 2.0] + p = [1.0, 1.0] # Minimum at (1, 1) + + optf = OptimizationFunction(quadratic, grad=quadratic_grad!) + prob = OptimizationProblem(optf, x0, p) + + optimizers = [ + ("ODEGradientDescent", ODEGradientDescent()), + ("RKChebyshevDescent", RKChebyshevDescent()), + ("RKAccelerated", RKAccelerated()), + ("HighOrderDescent", HighOrderDescent()) + ] + + for (name, opt) in optimizers + @testset "$name" begin + sol = solve(prob, opt, dt=0.001, maxiters=1000000) + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u, p, atol=1e-1) + @test sol.objective < 1e-2 + end + end + end + + @testset "Rosenbrock Function - Selected Optimizers" begin + x0 = [1.5, 2.0] + p = [1.0, 100.0] # Classic Rosenbrock parameters + + optf = OptimizationFunction(rosenbrock, grad=rosenbrock_grad!) + prob = OptimizationProblem(optf, x0, p) + + # Test with more robust optimizers for Rosenbrock + optimizers = [ + ("RKAccelerated", RKAccelerated()), + ("HighOrderDescent", HighOrderDescent()) + ] + + for (name, opt) in optimizers + @testset "$name" begin + sol = solve(prob, opt, dt=0.001, maxiters=1000000) + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + # Rosenbrock is harder, so we use looser tolerances + @test isapprox(sol.u[1], 1.0, atol=1e-1) + @test isapprox(sol.u[2], 1.0, atol=1e-1) + @test sol.objective < 1.0 + end + end + end end + + @testset "Constrained Optimization - DAE Optimizers" begin + @testset "Equality Constrained Optimization" begin + # Minimize f(x) = x₁² + x₂² + # Subject to x₁ - x₂ = 1 - function g!(g, x, p) - @. g = 2 * x + function constrained_objective(x, p,args...) + return x[1]^2 + x[2]^2 end - x0 = [2.0, -3.0] - p = [5.0] + function constrained_objective_grad!(g, x, p, args...) + g .= 2 .* x .* p[1] + return nothing + end - f_autodiff = OptimizationFunction(f, ADTypes.AutoForwardDiff()) - prob_auto = OptimizationProblem(f_autodiff, x0, p) + # Constraint: x₁ - x₂ - p[1] = 0 (p[1] = 1 → x₁ - x₂ = 1) + function constraint_func(x, p, args...) + return x[1] - x[2] - p[1] + end - for opt in (ODEGradientDescent(dt=0.01), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent()) - sol = solve(prob_auto, opt; maxiters=50_000) - @test sol.u ≈ [0.0, 0.0] atol=1e-2 - @test sol.objective ≈ 0.0 atol=1e-2 - @test sol.retcode == ReturnCode.Success + function constraint_jac!(J, x,args...) + J[1, 1] = 1.0 + J[1, 2] = -1.0 + return nothing end - f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad=g!) - prob_manual = OptimizationProblem(f_manual, x0) + x0 = [1.0, 0.0] # reasonable initial guess + p = [1.0] # enforce x₁ - x₂ = 1 - for opt in (ODEGradientDescent(dt=0.01), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent()) - sol = solve(prob_manual, opt; maxiters=50_000) - @test sol.u ≈ [0.0, 0.0] atol=1e-2 - @test sol.objective ≈ 0.0 atol=1e-2 - @test sol.retcode == ReturnCode.Success - end + optf = OptimizationFunction(constrained_objective; + grad = constrained_objective_grad!, + cons = constraint_func, + cons_j = constraint_jac!) - f_fail = OptimizationFunction(f, SciMLBase.NoAD()) - prob_fail = OptimizationProblem(f_fail, x0) + @testset "Equality Constrained - Mass Matrix Method" begin + prob = OptimizationProblem(optf, x0, p) + opt = DAEMassMatrix() + sol = solve(prob, opt; dt=0.01, maxiters=1_000_000) - for opt in (ODEGradientDescent(dt=0.001), RKChebyshevDescent(), RKAccelerated(), HighOrderDescent()) - @test_throws ErrorException solve(prob_fail, opt; maxiters=20_000) + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u[1] - sol.u[2], 1.0; atol = 1e-2) + @test isapprox(sol.u, [0.5, -0.5]; atol = 1e-2) end + @testset "Equality Constrained - Index Method" begin + prob = OptimizationProblem(optf, x0, p) + opt = DAEIndexing() + differential_vars = [true, true, false] # x vars = differential, λ = algebraic + sol = solve(prob, opt; dt=0.01, maxiters=1_000_000, + differential_vars = differential_vars) + + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u[1] - sol.u[2], 1.0; atol = 1e-2) + @test isapprox(sol.u, [0.5, -0.5]; atol = 1e-2) + end end + end + + @testset "Parameter Handling" begin + @testset "NullParameters Handling" begin + x0 = [0.0, 0.0] + p=Float64[] # No parameters provided + # Create a problem with NullParameters + optf = OptimizationFunction((x, p, args...) -> sum(x.^2), + grad=(grad, x, p, args...) -> (grad .= 2.0 .* x)) + prob = OptimizationProblem(optf, x0,p) # No parameters provided + + opt = ODEGradientDescent() + sol = solve(prob, opt, dt=0.01, maxiters=100000) + + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u, [0.0, 0.0], atol=1e-2) + end + + @testset "Regular Parameters" begin + x0 = [0.5, 1.5] + p = [1.0, 1.0] + + optf = OptimizationFunction(quadratic, grad=quadratic_grad!) + prob = OptimizationProblem(optf, x0, p) + + opt = RKAccelerated() + sol = solve(prob, opt; dt=0.001, maxiters=1000000) + + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u, p, atol=1e-1) + end + end + + @testset "Solver Options and Keywords" begin + @testset "Custom dt and maxiters" begin + x0 = [0.0, 0.0] + p = [1.0, 1.0] + + optf = OptimizationFunction(quadratic, grad=quadratic_grad!) + prob = OptimizationProblem(optf, x0, p) + + opt = RKAccelerated() + + # Test with custom dt + sol1 = solve(prob, opt; dt=0.001, maxiters=100000) + @test sol1.retcode == ReturnCode.Success || sol1.retcode == ReturnCode.Default + + # Test with smaller dt (should be more accurate) + sol2 = solve(prob, opt; dt=0.001, maxiters=100000) + @test sol2.retcode == ReturnCode.Success || sol2.retcode == ReturnCode.Default + @test sol2.objective <= sol1.objective # Should be at least as good + end + end + + @testset "Callback Functionality" begin + @testset "Progress Callback" begin + x0 = [0.0, 0.0] + p = [1.0, 1.0] + + callback_called = Ref(false) + callback_values = Vector{Vector{Float64}}() + + function test_callback(x, p, t) + return false + end + + optf = OptimizationFunction(quadratic; grad=quadratic_grad!) + prob = OptimizationProblem(optf, x0, p) + + opt = RKAccelerated() + sol = solve(prob, opt, dt=0.1, maxiters=100000, callback=test_callback, progress=true) + + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + end + end + + @testset "Finite Difference Jacobian" begin + @testset "Jacobian Computation" begin + x = [1.0, 2.0] + f(x) = [x[1]^2 + x[2], x[1] * x[2]] + + J = OptimizationODE.finite_difference_jacobian(f, x) + + expected_J = [2.0 1.0; 2.0 1.0] + + @test isapprox(J, expected_J, atol=1e-6) + end + end + + @testset "Solver Type Detection" begin + @testset "Mass Matrix Solvers" begin + opt = DAEMassMatrix() + @test OptimizationODE.get_solver_type(opt) == :mass_matrix + end + + @testset "Index Method Solvers" begin + opt = DAEIndexing() + @test OptimizationODE.get_solver_type(opt) == :indexing + end + end + + @testset "Error Handling and Edge Cases" begin + @testset "Empty Constraints" begin + x0 = [1.5, 0.5] + p = Float64[] + + # Problem without constraints should fall back to ODE method + optf = OptimizationFunction(constrained_objective, + grad=constrained_objective_grad!) + prob = OptimizationProblem(optf, x0, p) + + opt = DAEMassMatrix() + sol = solve(prob, opt; dt=0.001, maxiters=50000) + + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u, [0.0, 0.0], atol=1e-1) + end + + @testset "Single Variable Optimization" begin + x0 = [0.5] + p = [1.0] + + single_var_func(x, p,args...) = (x[1] - p[1])^2 + single_var_grad!(grad, x, p,args...) = (grad[1] = 2.0 * (x[1] - p[1])) + + optf = OptimizationFunction(single_var_func; grad=single_var_grad!) + prob = OptimizationProblem(optf, x0, p) + + opt = RKAccelerated() + sol = solve(prob, opt; dt=0.001, maxiters=10000) + + @test sol.retcode == ReturnCode.Success || sol.retcode == ReturnCode.Default + @test isapprox(sol.u[1], p[1], atol=1e-1) + end + end +end \ No newline at end of file From ad2f53fd0cbea01f8f6efac7f6d666545b1448f1 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 26 Jun 2025 01:55:40 +0530 Subject: [PATCH 07/17] Redundant functions removed from OptimizationODE.jl Removed redundant functions, used proper jacobian. --- lib/OptimizationODE/src/OptimizationODE.jl | 51 ++++------------------ 1 file changed, 9 insertions(+), 42 deletions(-) diff --git a/lib/OptimizationODE/src/OptimizationODE.jl b/lib/OptimizationODE/src/OptimizationODE.jl index 36a8b02d2..44fe60e49 100644 --- a/lib/OptimizationODE/src/OptimizationODE.jl +++ b/lib/OptimizationODE/src/OptimizationODE.jl @@ -63,24 +63,6 @@ function SciMLBase.__init(prob::OptimizationProblem, opt::DAEOptimizer; end -function solve_constrained_root(cache, u0, p) - n = length(u0) - cons_vals = cache.f.cons(u0, p) - m = length(cons_vals) - function resid!(res, u) - temp = similar(u) - f_mass!(temp, u, p, 0.0) - res .= temp - end - u0_ext = vcat(u0, zeros(m)) - prob_nl = NonlinearProblem(resid!, u0_ext, p) - sol_nl = solve(prob_nl, Newton(); tol = 1e-8, maxiters = 100000, - callback = cache.callback, progress = get(cache.solver_args, :progress, false)) - u_ext = sol_nl.u - return u_ext[1:n], sol_nl.retcode -end - - function get_solver_type(opt::DAEOptimizer) if opt.solver isa Union{Rodas5, RadauIIA5, ImplicitEuler, Trapezoid} return :mass_matrix @@ -89,6 +71,7 @@ function get_solver_type(opt::DAEOptimizer) end end + function handle_parameters(p) if p isa SciMLBase.NullParameters return Float64[] @@ -240,8 +223,8 @@ function solve_dae_mass_matrix(cache, dt, maxit, u0, p) n = length(u0) m = length(cons_vals) u0_extended = vcat(u0, zeros(m)) - M = zeros(n + m, n + m) - M[1:n, 1:n] = I(n) + M = Diagonal(ones(n + m)) + function f_mass!(du, u, p_, t) x = @view u[1:n] @@ -253,24 +236,11 @@ function solve_dae_mass_matrix(cache, dt, maxit, u0, p) grad_f .= ForwardDiff.gradient(z -> cache.f.f(z, p_), x) end J = Matrix{eltype(x)}(undef, m, n) - if cache.f.cons_j !== nothing - cache.f.cons_j(J, x) - else - J .= finite_difference_jacobian(z -> cache.f.cons(z, p_), x) - end + cache.f.cons_j !== nothing && cache.f.cons_j(J, x) + @. du[1:n] = -grad_f - (J' * λ) consv = cache.f.cons(x, p_) - if consv === nothing - fill!(du[n+1:end], zero(eltype(x))) - else - if isa(consv, Number) - @assert m == 1 - du[n+1] = consv - else - @assert length(consv) == m - @. du[n+1:end] = consv - end - end + @. du[n+1:end] = consv return nothing end @@ -327,11 +297,8 @@ function solve_dae_indexing(cache, dt, maxit, u0, p, differential_vars) grad_f = similar(x) cache.f.grad(grad_f, x, p_) J = zeros(m, n) - if cache.f.cons_j !== nothing - cache.f.cons_j(J, x) - else - J .= finite_difference_jacobian(z -> cache.f.cons(z,p_), x) - end + cache.f.cons_j !== nothing && cache.f.cons_j(J, x) + @. res[1:n] = du_x + grad_f + J' * λ consv = cache.f.cons(x, p_) @. res[n+1:end] = consv @@ -364,4 +331,4 @@ function solve_dae_indexing(cache, dt, maxit, u0, p, differential_vars) end -end \ No newline at end of file +end From 9a623d1d6b1556ee1391643ad77ccc67918cebb3 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 26 Jun 2025 23:02:45 +0530 Subject: [PATCH 08/17] Redundant functions removed from OptimizationODE.jl Code changes based on review. --- lib/OptimizationODE/src/OptimizationODE.jl | 41 +--------------------- 1 file changed, 1 insertion(+), 40 deletions(-) diff --git a/lib/OptimizationODE/src/OptimizationODE.jl b/lib/OptimizationODE/src/OptimizationODE.jl index 44fe60e49..f1598f0f5 100644 --- a/lib/OptimizationODE/src/OptimizationODE.jl +++ b/lib/OptimizationODE/src/OptimizationODE.jl @@ -93,45 +93,6 @@ function setup_progress_callback(cache, solve_kwargs) return solve_kwargs end -function finite_difference_jacobian(f, x; ϵ = 1e-8) - n = length(x) - fx = f(x) - if fx === nothing - return zeros(eltype(x), 0, n) - elseif isa(fx, Number) - J = zeros(eltype(fx), 1, n) - for j in 1:n - xj = copy(x) - xj[j] += ϵ - diff = f(xj) - if diff === nothing - diffval = zero(eltype(fx)) - else - diffval = diff - fx - end - J[1, j] = diffval / ϵ - end - return J - else - m = length(fx) - J = zeros(eltype(fx), m, n) - for j in 1:n - xj = copy(x) - xj[j] += ϵ - fxj = f(xj) - if fxj === nothing - @inbounds for i in 1:m - J[i, j] = -fx[i] / ϵ - end - else - @inbounds for i in 1:m - J[i, j] = (fxj[i] - fx[i]) / ϵ - end - end - end - return J - end -end function SciMLBase.__solve( cache::OptimizationCache{F,RC,LB,UB,LC,UC,S,O,D,P,C} @@ -331,4 +292,4 @@ function solve_dae_indexing(cache, dt, maxit, u0, p, differential_vars) end -end +end From b759ce71b5a8070a00167fbd23ba9562bb2bfc9e Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 26 Jun 2025 23:03:31 +0530 Subject: [PATCH 09/17] Jacobian fix runtests.jl Jacobian fix for tests. --- lib/OptimizationODE/test/runtests.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl index 11978911c..5c4d414ff 100644 --- a/lib/OptimizationODE/test/runtests.jl +++ b/lib/OptimizationODE/test/runtests.jl @@ -233,7 +233,7 @@ end x = [1.0, 2.0] f(x) = [x[1]^2 + x[2], x[1] * x[2]] - J = OptimizationODE.finite_difference_jacobian(f, x) + J = ForwardDiff.jacobian(f, x) expected_J = [2.0 1.0; 2.0 1.0] @@ -287,4 +287,4 @@ end @test isapprox(sol.u[1], p[1], atol=1e-1) end end -end \ No newline at end of file +end From 47a9d6106628bad3b6775b77552971427fadfcaf Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 30 Jun 2025 00:33:57 +0530 Subject: [PATCH 10/17] Update OptimizationODE.jl Removed hard coded alg call. --- lib/OptimizationODE/src/OptimizationODE.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/OptimizationODE/src/OptimizationODE.jl b/lib/OptimizationODE/src/OptimizationODE.jl index f1598f0f5..030d0c675 100644 --- a/lib/OptimizationODE/src/OptimizationODE.jl +++ b/lib/OptimizationODE/src/OptimizationODE.jl @@ -206,9 +206,9 @@ function solve_dae_mass_matrix(cache, dt, maxit, u0, p) end if m == 0 - optf = ODEFunction(f_mass!, mass_matrix = I(n)) + optf = ODEFunction(f_mass!) prob = ODEProblem(optf, u0, (0.0, 1.0), p) - return solve(prob, HighOrderDescent(); dt=dt, maxiters=maxit) + return solve(prob, cache.opt.solver; dt=dt, maxiters=maxit) end ss_prob = SteadyStateProblem(ODEFunction(f_mass!, mass_matrix = M), u0_extended, p) From 1f358beb6592a6f05286efd9e2270d1173805af2 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 30 Jun 2025 01:04:35 +0530 Subject: [PATCH 11/17] Update Project.toml Linear Algebra dependency added. --- lib/OptimizationODE/Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/OptimizationODE/Project.toml b/lib/OptimizationODE/Project.toml index a4fee5663..98dbab0df 100644 --- a/lib/OptimizationODE/Project.toml +++ b/lib/OptimizationODE/Project.toml @@ -6,6 +6,7 @@ version = "0.1.0" [deps] ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" From 2b03b367739cd29297403110a076960518ab3f5c Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 30 Jun 2025 01:47:58 +0530 Subject: [PATCH 12/17] Packages added to Project.toml added package dependencies --- lib/OptimizationODE/Project.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/OptimizationODE/Project.toml b/lib/OptimizationODE/Project.toml index 98dbab0df..9305ba131 100644 --- a/lib/OptimizationODE/Project.toml +++ b/lib/OptimizationODE/Project.toml @@ -11,6 +11,9 @@ OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" SteadyStateDiffEq = "9672c7b4-1e72-59bd-8a11-6ac3964bc41f" +NonlinearSolve = "8913a72c-1f9b-4ce2-8d82-65094dcecaec" +Sundials = "c3572dad-4567-51f8-b174-8c6c989267f4" +DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" [compat] ForwardDiff = "1" From 36a85901461488b1c2eed912dfa4e303d1d368ad Mon Sep 17 00:00:00 2001 From: Christopher Rackauckas Date: Thu, 3 Jul 2025 15:38:49 +0000 Subject: [PATCH 13/17] Update lib/OptimizationODE/test/runtests.jl --- lib/OptimizationODE/test/runtests.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl index 5c4d414ff..ad8b7f599 100644 --- a/lib/OptimizationODE/test/runtests.jl +++ b/lib/OptimizationODE/test/runtests.jl @@ -28,17 +28,17 @@ function constrained_objective(x, p,args...) return x[1]^2 + x[2]^2 end -function constrained_objective_grad!(grad, x, p,args...) +function constrained_objective_grad!(grad, x, p) grad[1] = 2.0 * x[1] grad[2] = 2.0 * x[2] end -function constraint_func(res, x, p,args...) +function constraint_func(res, x, p) res[1] = x[1] + x[2] - 1.0 # x[1] + x[2] = 1 return x[1] + x[2] - 1.0 end -function constraint_jac!(jac, x, p,args...) +function constraint_jac!(jac, x, p) jac[1, 1] = 1.0 jac[1, 2] = -1.0 end From 953fda2b360715a0a732dec99a3c94ff151f0c3d Mon Sep 17 00:00:00 2001 From: Christopher Rackauckas Date: Thu, 3 Jul 2025 15:38:58 +0000 Subject: [PATCH 14/17] Update lib/OptimizationODE/test/runtests.jl --- lib/OptimizationODE/test/runtests.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl index ad8b7f599..779989da2 100644 --- a/lib/OptimizationODE/test/runtests.jl +++ b/lib/OptimizationODE/test/runtests.jl @@ -5,26 +5,26 @@ using LinearAlgebra, ForwardDiff using OrdinaryDiffEq, DifferentialEquations, SteadyStateDiffEq, Sundials # Test helper functions -function rosenbrock(x, p,args...) +function rosenbrock(x, p) return (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 end -function rosenbrock_grad!(grad, x, p,args...) +function rosenbrock_grad!(grad, x, p) grad[1] = -2.0 * (p[1] - x[1]) - 4.0 * p[2] * (x[2] - x[1]^2) * x[1] grad[2] = 2.0 * p[2] * (x[2] - x[1]^2) end -function quadratic(x, p,args...) +function quadratic(x, p) return (x[1] - p[1])^2 + (x[2] - p[2])^2 end -function quadratic_grad!(grad, x, p,args...) +function quadratic_grad!(grad, x, p) grad[1] = 2.0 * (x[1] - p[1]) grad[2] = 2.0 * (x[2] - p[2]) end # Constrained optimization problem -function constrained_objective(x, p,args...) +function constrained_objective(x, p) return x[1]^2 + x[2]^2 end From 0691c24de6cac79bd1a00c5f4c8046939387c7b8 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 4 Jul 2025 13:16:40 +0530 Subject: [PATCH 15/17] Update runtests.jl Removed redundant tests. --- lib/OptimizationODE/test/runtests.jl | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl index 779989da2..c98f5a78e 100644 --- a/lib/OptimizationODE/test/runtests.jl +++ b/lib/OptimizationODE/test/runtests.jl @@ -240,19 +240,7 @@ end @test isapprox(J, expected_J, atol=1e-6) end end - - @testset "Solver Type Detection" begin - @testset "Mass Matrix Solvers" begin - opt = DAEMassMatrix() - @test OptimizationODE.get_solver_type(opt) == :mass_matrix - end - - @testset "Index Method Solvers" begin - opt = DAEIndexing() - @test OptimizationODE.get_solver_type(opt) == :indexing - end - end - + @testset "Error Handling and Edge Cases" begin @testset "Empty Constraints" begin x0 = [1.5, 0.5] From 17204c496f3761653caa572344f42f9e056ef2d2 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 4 Jul 2025 13:39:19 +0530 Subject: [PATCH 16/17] Update OptimizationODE.jl Mismatch on problem and algorithm fixed. --- lib/OptimizationODE/src/OptimizationODE.jl | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/lib/OptimizationODE/src/OptimizationODE.jl b/lib/OptimizationODE/src/OptimizationODE.jl index 030d0c675..bcfe80338 100644 --- a/lib/OptimizationODE/src/OptimizationODE.jl +++ b/lib/OptimizationODE/src/OptimizationODE.jl @@ -23,7 +23,7 @@ struct DAEOptimizer{T} solver::T end -DAEMassMatrix() = DAEOptimizer(Rodas5()) +DAEMassMatrix() = DAEOptimizer(Rosenbrock23(autodiff = false)) DAEIndexing() = DAEOptimizer(IDA()) @@ -63,15 +63,6 @@ function SciMLBase.__init(prob::OptimizationProblem, opt::DAEOptimizer; end -function get_solver_type(opt::DAEOptimizer) - if opt.solver isa Union{Rodas5, RadauIIA5, ImplicitEuler, Trapezoid} - return :mass_matrix - else - return :indexing - end -end - - function handle_parameters(p) if p isa SciMLBase.NullParameters return Float64[] @@ -107,8 +98,7 @@ function SciMLBase.__solve( if cache.opt isa ODEOptimizer return solve_ode(cache, dt, maxit, u0, p) else - solver_method = get_solver_type(cache.opt) - if solver_method == :mass_matrix + if cache.opt.solver == Rosenbrock23(autodiff = false) return solve_dae_mass_matrix(cache, dt, maxit, u0, p) else return solve_dae_indexing(cache, dt, maxit, u0, p, differential_vars) From 11bd6651c54b6d934f23a4bcf36a90aa5a42ddf9 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 4 Jul 2025 23:43:26 +0530 Subject: [PATCH 17/17] Update runtests.jl Correcting function definition. --- lib/OptimizationODE/test/runtests.jl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/OptimizationODE/test/runtests.jl b/lib/OptimizationODE/test/runtests.jl index c98f5a78e..c0c3aa867 100644 --- a/lib/OptimizationODE/test/runtests.jl +++ b/lib/OptimizationODE/test/runtests.jl @@ -102,21 +102,21 @@ end # Minimize f(x) = x₁² + x₂² # Subject to x₁ - x₂ = 1 - function constrained_objective(x, p,args...) + function constrained_objective(x, p) return x[1]^2 + x[2]^2 end - function constrained_objective_grad!(g, x, p, args...) + function constrained_objective_grad!(g, x, p) g .= 2 .* x .* p[1] return nothing end # Constraint: x₁ - x₂ - p[1] = 0 (p[1] = 1 → x₁ - x₂ = 1) - function constraint_func(x, p, args...) + function constraint_func(x, p) return x[1] - x[2] - p[1] end - function constraint_jac!(J, x,args...) + function constraint_jac!(J, x) J[1, 1] = 1.0 J[1, 2] = -1.0 return nothing @@ -159,8 +159,8 @@ end x0 = [0.0, 0.0] p=Float64[] # No parameters provided # Create a problem with NullParameters - optf = OptimizationFunction((x, p, args...) -> sum(x.^2), - grad=(grad, x, p, args...) -> (grad .= 2.0 .* x)) + optf = OptimizationFunction((x, p) -> sum(x.^2), + grad=(grad, x, p) -> (grad .= 2.0 .* x)) prob = OptimizationProblem(optf, x0,p) # No parameters provided opt = ODEGradientDescent() @@ -262,8 +262,8 @@ end x0 = [0.5] p = [1.0] - single_var_func(x, p,args...) = (x[1] - p[1])^2 - single_var_grad!(grad, x, p,args...) = (grad[1] = 2.0 * (x[1] - p[1])) + single_var_func(x, p) = (x[1] - p[1])^2 + single_var_grad!(grad, x, p) = (grad[1] = 2.0 * (x[1] - p[1])) optf = OptimizationFunction(single_var_func; grad=single_var_grad!) prob = OptimizationProblem(optf, x0, p)