Skip to content

Commit

Permalink
Dynamic html update on maximize in Jupiter
Browse files Browse the repository at this point in the history
  • Loading branch information
jpn-- committed Jan 27, 2017
1 parent 062d2e7 commit 6b26acf
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 13 deletions.
9 changes: 6 additions & 3 deletions py/jupyter.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,11 +148,9 @@ def ipython_status(magic_matplotlib=True):
# message_set.add('IPython inline plotting not available')
# else:
message_set.add('IPython')

# Caution: cfg is an IPython.config.loader.Config
if cfg['IPKernelApp']:
message_set.add('IPython QtConsole')

try:
if cfg['IPKernelApp']['pylab'] == 'inline':
message_set.add('pylab inline')
Expand All @@ -176,7 +174,12 @@ def ipython_status(magic_matplotlib=True):
try:
stylesheet()
larch_tag()
jupyter_active = True
except:
pass
jupyter_active = False
else:
jupyter_active = True
else:
jupyter_active = False


38 changes: 32 additions & 6 deletions py/model_reporter/art.py
Original file line number Diff line number Diff line change
Expand Up @@ -1370,8 +1370,14 @@ def new_xhtml_idco_variable_analysis(self, figurename, *names, description_catal


def art_simple_parameters(self, foot=None):
a = ART(columns=('PARAM','VALUE','GRAD','HOLD'), n_head_rows=1, title="<larch.Model> "+self.title, short_title="<larch.Model>", n_rows=len(self)+1)
a.set_jrow_kwd_strings(0, PARAM="Parameter", VALUE="Value", GRAD="Gradient", HOLD="Holdfast")
any_holdfast = numpy.any(self.parameter_holdfast_array)
if any_holdfast:
a = ART(columns=('PARAM','VALUE','GRAD','HOLD'), n_head_rows=1, title="<larch.Model> "+self.title, short_title="<larch.Model>", n_rows=len(self)+1)
a.set_jrow_kwd_strings(0, PARAM="Parameter", VALUE=" Value", GRAD=" Gradient", HOLD="Holdfast")
else:
a = ART(columns=('PARAM','VALUE','GRAD'), n_head_rows=1, title="<larch.Model> "+self.title, short_title="<larch.Model>", n_rows=len(self)+1)
a.set_jrow_kwd_strings(0, PARAM="Parameter", VALUE=" Value", GRAD=" Gradient")

names = self.parameter_names()

try:
Expand All @@ -1382,13 +1388,16 @@ def art_simple_parameters(self, foot=None):
for j in range(len(self)):
j1 = j+1
a.set_jrow_iloc(j1, 0, names[j], attrib=None, anchorlabel=None)
a.set_jrow_iloc(j1, 1, self.parameter_array[j], attrib=None, anchorlabel=None)
a.set_jrow_iloc(j1, 1, "{:< 20.12g}".format(self.parameter_array[j]).replace(" "," "), attrib=None, anchorlabel=None)
if g is None:
a.set_jrow_iloc(j1, 2, 'N/A', attrib=None, anchorlabel=None)
else:
a.set_jrow_iloc(j1, 2, g[j], attrib=None, anchorlabel=None)
if self.parameter_holdfast_array[j]:
a.set_jrow_iloc(j1, 3, self.parameter_holdfast_array[j], attrib=None, anchorlabel=None)
a.set_jrow_iloc(j1, 2, "{:< 20.12g}".format(g[j]).replace(" "," "), attrib=None, anchorlabel=None)
if any_holdfast:
if self.parameter_holdfast_array[j]:
a.set_jrow_iloc(j1, 3, self.parameter_holdfast_array[j], attrib=None, anchorlabel=None)
else:
a.set_jrow_iloc(j1, 3, " ", attrib=None, anchorlabel=None)

if isinstance(foot,str):
a.footnotes.append(foot)
Expand All @@ -1398,3 +1407,20 @@ def art_simple_parameters(self, foot=None):
elif foot is not None:
a.footnotes.append(str(foot))
return a

def _art_simple_status(self, *arg_ignored, **kwarg_ignored):
from ..jupyter import jupyter_active
if jupyter_active:
try:
iterat = self._iteration
except AttributeError:
self._iteration = iterat = 1
else:
self._iteration += 1

from IPython import display
display.clear_output(wait=True)
display.display_html(self.art_simple_parameters(foot=[
"At iteration {}".format(iterat),
"Convergence Tolerance = {}".format(self.bhhh_tolerance()),
]))
1 change: 1 addition & 0 deletions py/util/optimize/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def optimizers(model, *arg, ctol=1e-7):
ot = OptimizeTechniques(ctol=ctol, ctol_fun=model.bhhh_tolerance, logger=model.logger(),
fun = model.negative_loglike, bhhh = model.bhhh,
jac = model.negative_d_loglike,
callback=model._art_simple_status,
)
for a in arg:
if isinstance(a,dict):
Expand Down
26 changes: 22 additions & 4 deletions py/util/optimize/grinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import numpy
from ...core import LarchError, LarchCacheError, runstats
import time as _time
from ...jupyter import jupyter_active

class outcomes(Enum):
success = 1
Expand Down Expand Up @@ -91,12 +92,23 @@ def __call__(self, x):
raise ProgressTooSlow('average improvement only {:.4g} over {} iterations'.format(-avg,length), x, self.count, slowness=(-avg,length))



def minimize_with_watcher(fun, x0, args=(), *, slow_len=(), slow_thresh=(), ctol_fun=None, ctol=1e-6, logger=None, callback=None, method_str="default method", options={}, **k):
watcher = Watcher(fun, x0, ctol_fun=ctol_fun, ctol=ctol, logger=logger, slow_len=slow_len, slow_thresh=slow_thresh)
if callback:
new_callback = lambda z: (callback(z), watcher(z), )
if 'callback' in options:
option_callback = options['callback']
new_callback = lambda z: (callback(z), watcher(z), option_callback(z))
del options['callback']
else:
new_callback = lambda z: (callback(z), watcher(z), )
else:
new_callback = watcher
if 'callback' in options:
option_callback = options['callback']
new_callback = lambda z: (watcher(z), option_callback(z))
del options['callback']
else:
new_callback = lambda z: (watcher(z), )
if method_str in ("bhhh","bhhh-wolfe",):
options['logger'] = logger
try:
Expand Down Expand Up @@ -142,12 +154,13 @@ def __str__(self):
def __repr__(self):
return self.__str__()
def __init__(self, method, fun, *, slow_len=(), slow_thresh=(), ctol=None, ctol_fun=None, options=None,
bhhh=None, init_inv_hess=None, jac=None, logger=None, hess=None):
bhhh=None, init_inv_hess=None, jac=None, logger=None, hess=None, callback=None):
self.fun = fun
self.bhhh = bhhh
self.init_inv_hess = init_inv_hess
self.hess = hess
self.method_str = str(method)
self.callback = callback
self.options = {} if options is None else options
if isinstance(method,str) and method.lower()=='bfgs-init':
from .algorithms import _minimize_bfgs_1
Expand Down Expand Up @@ -197,6 +210,7 @@ def __call__(self, ignored_fun, x0, args=(), options={}, **kwargs):
jac = self.jac,
method_str = self.method_str,
logger = self.logger,
callback = self.callback,
)

if 'ctol' in kwargs and kwargs['ctol'] is not None:
Expand Down Expand Up @@ -299,7 +313,8 @@ def messages(self):
return s

class OptimizeTechniques():
def __init__(self, techniques=None, ctol_fun=None, ctol=1e-6, logger=None, fun=None, jac=None, hess=None, bhhh=None, start_timer=None, end_timer=None):
def __init__(self, techniques=None, ctol_fun=None, ctol=1e-6, logger=None, fun=None, jac=None, hess=None, bhhh=None,
start_timer=None, end_timer=None, callback=None):
self._techniques = [] if techniques is None else list(techniques)
self.meta_iteration = 0
self.ctol_fun = ctol_fun
Expand All @@ -311,6 +326,7 @@ def __init__(self, techniques=None, ctol_fun=None, ctol=1e-6, logger=None, fun=N
self._bhhh = bhhh
self._start_timer = start_timer
self._end_timer = end_timer
self._callback = callback
def unfail_all(self, but=None):
for i in self._techniques:
if i is not but:
Expand All @@ -326,6 +342,8 @@ def add(self, *arg, **kwarg):
kwarg['hess'] = self._hess
if self.ctol is not None and 'ctol' not in kwarg:
kwarg['ctol'] = self.ctol
if self._callback is not None and 'callback' not in kwarg:
kwarg['callback'] = self._callback
self._techniques.append(OptimizeTechnique(*arg, **kwarg))
def __len__(self):
return len(self._techniques)
Expand Down

0 comments on commit 6b26acf

Please sign in to comment.