-
Notifications
You must be signed in to change notification settings - Fork 23
/
KnitroSolverInterface.jl
161 lines (139 loc) · 5.17 KB
/
KnitroSolverInterface.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
importall MathProgBase.SolverInterface
###############################################################################
# Solver objects
export KnitroSolver
immutable KnitroSolver <: AbstractMathProgSolver
options
end
KnitroSolver(;kwargs...) = KnitroSolver(kwargs)
type KnitroMathProgModel <: AbstractMathProgModel
inner
options
end
function KnitroMathProgModel(;options...)
KnitroMathProgModel(nothing,options)
end
model(s::KnitroSolver) = KnitroMathProgModel(;s.options...)
export model
###############################################################################
# Begin interface implementation
function sparse_merge_hess_duplicates(I,J, m, n)
V = [Int[i] for i in 1:length(I)]
for i in 1:length(I) # make upper triangular
if I[i] > J[i]
I[i],J[i] = J[i],I[i]
end
end
findnz(sparse(I,J,V,m,n,vcat))
end
function sparse_merge_jac_duplicates(I,J, m, n)
V = [Int[i] for i in 1:length(I)]
findnz(sparse(I,J,V,m,n,vcat))
end
# generic nonlinear interface
function loadnonlinearproblem!(m::KnitroMathProgModel,
numVar::Integer,
numConstr::Integer,
x_l, x_u, g_lb, g_ub,
sense::Symbol,
d::AbstractNLPEvaluator)
initialize(d, [:Grad, :Jac, :Hess])
Ijac, Jjac = jac_structure(d)
Ihess, Jhess = hesslag_structure(d)
nnzJ = length(Ijac)
nnzH = length(Ihess)
jac_tmp = Array(Float64, nnzJ)
hess_tmp = Array(Float64, nnzH)
@assert length(Ijac) == length(Jjac)
@assert length(Ihess) == length(Jhess)
jac_con, jac_var, jac_indices = sparse_merge_jac_duplicates(Ijac, Jjac,
numConstr,
numVar)
hess_row, hess_col, hess_indices = sparse_merge_hess_duplicates(Ihess,
Jhess,
numVar,
numVar)
n_jac_indices = length(jac_indices)
n_hess_indices = length(hess_indices)
x_l, x_u, g_lb, g_ub = float(x_l), float(x_u), float(g_lb), float(g_ub)
@assert length(x_l) == length(x_u)
@assert length(g_lb) == length(g_ub)
for i in 1:length(x_l)
if x_l[i] == -Inf
x_l[i] = -KTR_INFBOUND
end
if x_u[i] == Inf
x_u[i] = KTR_INFBOUND
end
end
for i in 1:length(g_lb)
if g_lb[i] == -Inf
g_lb[i] = -KTR_INFBOUND
end
if g_ub[i] == Inf
g_ub[i] = KTR_INFBOUND
end
end
@assert sense == :Min || sense == :Max
# Objective sense
if sense == :Min
objGoal = KTR_OBJGOAL_MINIMIZE
else
objGoal = KTR_OBJGOAL_MAXIMIZE
end
# allow for the possibility of specializing to LINEAR or QUADRATIC?
objType = KTR_OBJTYPE_GENERAL
c_Type = fill(KTR_CONTYPE_GENERAL, numConstr)
# Objective callback
eval_f_cb(x) = eval_f(d,x)
# Objective gradient callback
eval_grad_f_cb(x, grad_f) = eval_grad_f(d, grad_f, x)
# Constraint value callback
eval_g_cb(x, g) = eval_g(d, g, x)
# Jacobian callback
function eval_jac_g_cb(x, jac)
eval_jac_g(d, jac_tmp, x)
for i in 1:n_jac_indices
jac[i] = sum(jac_tmp[jac_indices[i]])
end
end
# Hessian callback
function eval_h_cb(x, lambda, sigma, hess)
eval_hesslag(d, hess_tmp, x, sigma, lambda)
for i in 1:n_hess_indices
hess[i] = sum(hess_tmp[hess_indices[i]])
end
end
# Hessian-vector callback
eval_hv_cb(x, lambda, sigma, hv) = eval_hesslag_prod(d, hv, x, sigma,
lambda)
m.inner = createProblem()
# ---
# set options/parameters here
# ---
initializeProblem(m.inner, objGoal, objType, x_l, x_u, c_Type, g_lb, g_ub,
int32(jac_var-1), int32(jac_con-1), int32(hess_row-1),
int32(hess_col-1))
setCallbacks(m.inner, eval_f_cb, eval_g_cb, eval_grad_f_cb, eval_jac_g_cb,
eval_h_cb, eval_hv_cb)
end
getsense(m::KnitroMathProgModel) = int32(m.inner.sense)
numvar(m::KnitroMathProgModel) = int32(m.inner.n)
numconstr(m::KnitroMathProgModel) = int32(m.inner.m)
optimize!(m::KnitroMathProgModel) = solveProblem(m.inner)
function status(m::KnitroMathProgModel)
applicationReturnStatus(m.inner)
end
getobjval(m::KnitroMathProgModel) = m.inner.obj_val[1]
getsolution(m::KnitroMathProgModel) = m.inner.x
getconstrsolution(m::KnitroMathProgModel) = m.inner.g
getreducedcosts(m::KnitroMathProgModel) = zeros(m.inner.n)
getconstrduals(m::KnitroMathProgModel) = zeros(m.inner.m)
getrawsolver(m::KnitroMathProgModel) = m.inner
function warmstart(m::KnitroMathProgModel, x)
if m.inner.status == :Uninitialized
error("KNITRO.jl: Error setting warm start. Initialize the problem using loadnonlinearproblem! first.")
end
restartProblem(m.inner, float64(x), m.inner.lambda)
end
setwarmstart!(m::KnitroMathProgModel, x) = warmstart(m,x)