Skip to content

Commit 0b9872a

Browse files
authored
Merge 69aa822 into 5fde281
2 parents 5fde281 + 69aa822 commit 0b9872a

File tree

3 files changed

+181
-6
lines changed

3 files changed

+181
-6
lines changed

src/controller/construct.jl

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -439,7 +439,12 @@ function setconstraint!(
439439
JuMP.delete(optim, optim[:linconstraint])
440440
JuMP.unregister(optim, :linconstraint)
441441
@constraint(optim, linconstraint, A*Z̃var .≤ b)
442-
set_nonlincon!(mpc, model, transcription, optim)
442+
# TODO: change this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
443+
if JuMP.solver_name(optim) "Ipopt"
444+
set_nonlincon!(mpc, model, transcription, optim)
445+
else
446+
set_nonlincon_exp(mpc, optim)
447+
end
443448
else
444449
i_b, i_g = init_matconstraint_mpc(
445450
model, transcription, nc,

src/controller/nonlinmpc.jl

Lines changed: 159 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,9 @@ end
513513
514514
Init the nonlinear optimization for [`NonLinMPC`](@ref) controllers.
515515
"""
516-
function init_optimization!(mpc::NonLinMPC, model::SimModel, optim::JuMP.GenericModel)
516+
function init_optimization!(
517+
mpc::NonLinMPC, model::SimModel, optim::JuMP.GenericModel{JNT}
518+
) where JNT<:Real
517519
# --- variables and linear constraints ---
518520
con, transcription = mpc.con, mpc.transcription
519521
nZ̃ = length(mpc.Z̃)
@@ -542,8 +544,162 @@ function init_optimization!(mpc::NonLinMPC, model::SimModel, optim::JuMP.Generic
542544
)
543545
@operator(optim, J, nZ̃, Jfunc, ∇Jfunc!)
544546
@objective(optim, Min, J(Z̃var...))
545-
init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!)
546-
set_nonlincon!(mpc, model, transcription, optim)
547+
if JuMP.solver_name(optim) "Ipopt"
548+
init_nonlincon!(mpc, model, transcription, gfuncs, ∇gfuncs!, geqfuncs, ∇geqfuncs!)
549+
set_nonlincon!(mpc, model, transcription, optim)
550+
else
551+
set_nonlincon_exp(mpc, optim)
552+
end
553+
return nothing
554+
end
555+
556+
set_nonlincon_exp(::PredictiveController, ::JuMP.GenericModel) = nothing
557+
# TODO: cleanup this function, this is super dirty
558+
function set_nonlincon_exp(mpc::NonLinMPC, optim::JuMP.GenericModel{JNT}) where JNT<:Real
559+
# ========= Test new experimental feature ========================================
560+
561+
nonlin_constraints = JuMP.all_constraints(optim, JuMP.Vector{JuMP.VariableRef}, Ipopt._VectorNonlinearOracle)
562+
map(con_ref -> JuMP.delete(optim, con_ref), nonlin_constraints)
563+
564+
565+
Z̃var = optim[:Z̃var]
566+
567+
model = mpc.estim.model
568+
jac = mpc.jacobian
569+
nu, ny, nx̂, nϵ, nk = model.nu, model.ny, mpc.estim.nx̂, mpc.nϵ, model.nk
570+
Hp, Hc = mpc.Hp, mpc.Hc
571+
ng, nc, neq = length(mpc.con.i_g), mpc.con.nc, mpc.con.neq
572+
nZ̃, nU, nŶ, nX̂, nK = length(mpc.Z̃), Hp*nu, Hp*ny, Hp*nx̂, Hp*nk
573+
nΔŨ, nUe, nŶe = nu*Hc + nϵ, nU + nu, nŶ + ny
574+
strict = Val(true)
575+
myNaN = convert(JNT, NaN)
576+
myInf = convert(JNT, Inf)
577+
578+
579+
ΔŨ::Vector{JNT} = zeros(JNT, nΔŨ)
580+
x̂0end::Vector{JNT} = zeros(JNT, nx̂)
581+
K0::Vector{JNT} = zeros(JNT, nK)
582+
Ue::Vector{JNT}, Ŷe::Vector{JNT} = zeros(JNT, nUe), zeros(JNT, nŶe)
583+
U0::Vector{JNT}, Ŷ0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nŶ)
584+
Û0::Vector{JNT}, X̂0::Vector{JNT} = zeros(JNT, nU), zeros(JNT, nX̂)
585+
gc::Vector{JNT}, g::Vector{JNT} = zeros(JNT, nc), zeros(JNT, ng)
586+
geq::Vector{JNT} = zeros(JNT, neq)
587+
588+
589+
590+
# TODO: transfer all the following in set_nonlincon!, including a copy-paste
591+
# of all the vectors above.
592+
function gfunc!(g, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, geq)
593+
update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
594+
return nothing
595+
end
596+
Z̃_∇g = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call
597+
∇g_context = (
598+
Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0),
599+
Cache(Û0), Cache(K0), Cache(X̂0),
600+
Cache(gc), Cache(geq),
601+
)
602+
# temporarily enable all the inequality constraints for sparsity detection:
603+
mpc.con.i_g[1:end-nc] .= true
604+
∇g_prep = prepare_jacobian(gfunc!, g, jac, Z̃_∇g, ∇g_context...; strict)
605+
mpc.con.i_g[1:end-nc] .= false
606+
∇g = init_diffmat(JNT, jac, ∇g_prep, nZ̃, ng)
607+
608+
609+
function update_con!(g, ∇g, Z̃_∇g, Z̃_arg)
610+
if isdifferent(Z̃_arg, Z̃_∇g)
611+
Z̃_∇g .= Z̃_arg
612+
value_and_jacobian!(gfunc!, g, ∇g, ∇g_prep, jac, Z̃_∇g, ∇g_context...)
613+
end
614+
return nothing
615+
end
616+
function gfunc_set!(g_arg, Z̃_arg)
617+
update_con!(g, ∇g, Z̃_∇g, Z̃_arg)
618+
g_arg .= @views g[mpc.con.i_g]
619+
return nothing
620+
end
621+
∇g_i_g = ∇g[mpc.con.i_g, :]
622+
function ∇gfunc_set!(∇g_arg, Z̃_arg)
623+
update_con!(g, ∇g, Z̃_∇g, Z̃_arg)
624+
∇g_i_g .= @views ∇g[mpc.con.i_g, :]
625+
diffmat2vec!(∇g_arg, ∇g_i_g)
626+
return nothing
627+
end
628+
629+
g_min = fill(-myInf, sum(mpc.con.i_g))
630+
g_max = zeros(JNT, sum(mpc.con.i_g))
631+
632+
∇g_structure = init_diffstructure(∇g[mpc.con.i_g, :])
633+
634+
g_set = Ipopt._VectorNonlinearOracle(;
635+
dimension = nZ̃,
636+
l = g_min,
637+
u = g_max,
638+
eval_f = gfunc_set!,
639+
jacobian_structure = ∇g_structure,
640+
eval_jacobian = ∇gfunc_set!
641+
)
642+
@constraint(optim, Z̃var in g_set)
643+
644+
645+
646+
function geqfunc!(geq, Z̃, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g)
647+
update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
648+
return nothing
649+
end
650+
Z̃_∇geq = fill(myNaN, nZ̃) # NaN to force update_predictions! at first call
651+
∇geq_context = (
652+
Cache(ΔŨ), Cache(x̂0end), Cache(Ue), Cache(Ŷe), Cache(U0), Cache(Ŷ0),
653+
Cache(Û0), Cache(K0), Cache(X̂0),
654+
Cache(gc), Cache(g)
655+
)
656+
∇geq_prep = prepare_jacobian(geqfunc!, geq, jac, Z̃_∇geq, ∇geq_context...; strict)
657+
∇geq = init_diffmat(JNT, jac, ∇geq_prep, nZ̃, neq)
658+
659+
660+
function update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg)
661+
if isdifferent(Z̃_arg, Z̃_∇geq)
662+
Z̃_∇geq .= Z̃_arg
663+
value_and_jacobian!(geqfunc!, geq, ∇geq, ∇geq_prep, jac, Z̃_∇geq, ∇geq_context...)
664+
end
665+
return nothing
666+
end
667+
function geqfunc_set!(geq_arg, Z̃_arg)
668+
update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg)
669+
geq_arg .= geq
670+
return nothing
671+
end
672+
function ∇geqfunc_set!(∇geq_arg, Z̃_arg)
673+
update_con_eq!(geq, ∇geq, Z̃_∇geq, Z̃_arg)
674+
diffmat2vec!(∇geq_arg, ∇geq)
675+
return nothing
676+
end
677+
678+
geq_min = zeros(JNT, mpc.con.neq)
679+
geq_max = zeros(JNT, mpc.con.neq)
680+
681+
∇geq_structure = init_diffstructure(∇geq)
682+
683+
#=
684+
# Langragian of the optimization problem:
685+
function Lfunc!(Z̃, μ, ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq)
686+
update_predictions!(ΔŨ, x̂0end, Ue, Ŷe, U0, Ŷ0, Û0, K0, X̂0, gc, g, geq, mpc, Z̃)
687+
J = obj_nonlinprog!(Ŷ0, U0, mpc, model, Ue, Ŷe, ΔŨ)
688+
L = J + dot(μ, geq)
689+
return L
690+
end
691+
=#
692+
693+
694+
geq_set = Ipopt._VectorNonlinearOracle(;
695+
dimension = nZ̃,
696+
l = geq_min,
697+
u = geq_max,
698+
eval_f = geqfunc_set!,
699+
jacobian_structure = ∇geq_structure,
700+
eval_jacobian = ∇geqfunc_set!
701+
)
702+
@constraint(optim, Z̃var in geq_set)
547703
return nothing
548704
end
549705

src/general.jl

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,22 @@ function limit_solve_time(optim::GenericModel, Ts)
5858
end
5959

6060
"Init a differentiation result matrix as dense or sparse matrix, as required by `backend`."
61-
init_diffmat(T, backend::AbstractADType, _ , nx , ny) = Matrix{T}(undef, ny, nx)
62-
init_diffmat(T, backend::AutoSparse ,prep , _ , _ ) = similar(sparsity_pattern(prep), T)
61+
init_diffmat(T, ::AbstractADType, _ , nx, ny) = zeros(T, ny, nx)
62+
function init_diffmat(T, ::AutoSparse, prep , _ , _ )
63+
A = similar(sparsity_pattern(prep), T)
64+
return A .= 0
65+
end
66+
67+
"Init the sparsity structure of matrix `A` as required by `JuMP.jl`."
68+
function init_diffstructure(A::AbstractSparseMatrix)
69+
I, J = findnz(A)
70+
return collect(zip(I, J))
71+
end
72+
init_diffstructure(A::AbstractMatrix)= Tuple.(CartesianIndices(A))[:]
73+
74+
"Store the differentiation matrix `A` in the vector `v` as required by `JuMP.jl.`"
75+
diffmat2vec!(v::AbstractVector, A::AbstractSparseMatrix) = v .= nonzeros(A)
76+
diffmat2vec!(v::AbstractVector, A::AbstractMatrix) = v[:] = A
6377

6478
backend_str(backend::AbstractADType) = string(nameof(typeof(backend)))
6579
function backend_str(backend::AutoSparse)

0 commit comments

Comments
 (0)