Skip to content

Commit

Permalink
Updated demo files for differentiator
Browse files Browse the repository at this point in the history
  • Loading branch information
Felix-Mac committed May 23, 2023
1 parent f9f32ac commit 0ebe06d
Show file tree
Hide file tree
Showing 2 changed files with 96 additions and 2 deletions.
60 changes: 60 additions & 0 deletions examples/nlpdifferentiator/cstr_mpc_differentiater.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@

# This file is part of do-mpc
#
# do-mpc: An environment for the easy, modular and efficient implementation of
# robust nonlinear model predictive control
#
# Copyright (c) 2014-2019 Sergio Lucia, Alexandru Tatulea-Codrean
# TU Dortmund. All rights reserved
#
# do-mpc is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# do-mpc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with do-mpc. If not, see <http://www.gnu.org/licenses/>.

# %%
import sys
import os
import numpy as np
import casadi.tools as castools

sys.path.append(os.path.join('..','..'))
sys.path.append(os.path.join('..','CSTR'))
import do_mpc
from template_model import template_model
from template_mpc import template_mpc
from template_simulator import template_simulator


# %%

model = template_model()
mpc = template_mpc(model)
simulator = template_simulator(model)

# %%
nlpdiff = do_mpc.differentiator.DoMPCDifferentiator(mpc)
nlpdiff.settings.check_rank = False
nlpdiff.settings.check_LICQ = False

# %%
simulator.x0 = np.array([0.5, 0.5, 134.14, 130.0]).reshape(-1,1)
# %%
mpc.make_step(simulator.x0)
# %%
dxdp, dlamdp = nlpdiff.differentiate()
# %%
dlamdp.shape
# %%
nlpdiff.status
# %%
nlpdiff.sens_num['dxdp', castools.indexf['_u', 0, 0], castools.indexf['_x0']]
# %%
38 changes: 36 additions & 2 deletions examples/nlpdifferentiator/demo_nlp_differentiator.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,35 @@

# This file is part of do-mpc
#
# do-mpc: An environment for the easy, modular and efficient implementation of
# robust nonlinear model predictive control
#
# Copyright (c) 2014-2019 Sergio Lucia, Alexandru Tatulea-Codrean
# TU Dortmund. All rights reserved
#
# do-mpc is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# do-mpc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with do-mpc. If not, see <http://www.gnu.org/licenses/>.

# %%
import casadi as ca
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import pdb

import os
import sys
plt.style.use('dark_background')

sys.path.append(os.path.join('..','..'))
import do_mpc
Expand Down Expand Up @@ -62,14 +87,21 @@ def get_optim_diff(nlp, nlp_bounds):
dxdp_test[i] = dxdp.full().flatten()
# %%
fig, ax = plt.subplots(2, sharex=True)

colors = mpl.rcParams['axes.prop_cycle'].by_key()['color']

ax[0].plot(p_test, x_test[:,0], label='$x_0^*(p)$')
ax[1].plot(p_test, x_test[:,1], label='$x_1^*(p)$')

ax[0].plot(p_test, dxdp_test[:,0], label='$\partial_p x_0^*(p)$')
ax[1].plot(p_test, dxdp_test[:,1], label='$\partial_p x_1^*(p)$')

ax[0].quiver(p_test, x_test[:,0], np.ones_like(p_test), dxdp_test[:,0], angles='xy')
ax[1].quiver(p_test, x_test[:,1], np.ones_like(p_test), dxdp_test[:,1], angles='xy')
every_nth = 5

s = slice(None, None, every_nth)

ax[0].quiver(p_test[s], x_test[s,0], np.ones_like(p_test[s]), dxdp_test[s,0], angles='xy', color=colors[1])
ax[1].quiver(p_test[s], x_test[s,1], np.ones_like(p_test[s]), dxdp_test[s,1], angles='xy', color=colors[1])

ax[1].set_xlabel('$p$')

Expand All @@ -78,5 +110,7 @@ def get_optim_diff(nlp, nlp_bounds):

ax[0].set_title('Optimal solution and sensitivity depending on parameter $p$')

fig.savefig('demo_nlp_differentiator_dark.svg', format='svg')
plt.show(block=True)

# %%

0 comments on commit 0ebe06d

Please sign in to comment.