Skip to content

Commit

Permalink
Make logging output more relevant for modeling.
Browse files Browse the repository at this point in the history
  • Loading branch information
Leif Johnson committed Sep 9, 2015
1 parent 5b173e0 commit f46f6ef
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 8 deletions.
8 changes: 6 additions & 2 deletions theanets/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,6 @@ def set_loss(self, *args, **kwargs):
All parameters and keyword arguments are passed to :func:`add_loss`
after clearing the current losses.
'''
logging.info('resetting model losses!')
self.losses = []
self.add_loss(*args, **kwargs)

Expand Down Expand Up @@ -441,9 +440,13 @@ def build_graph(self, regularizers=()):
A list of updates that should be performed by a theano function that
computes something using this graph.
'''
regularizers = tuple(regularizers)
key = self._hash(regularizers)
if key not in self._graphs:
logging.info('building computation graph')
for loss in self.losses:
loss.log()
for reg in regularizers:
reg.log()
outputs = {i.name: i for i in self.inputs}
updates = []
for layer in self.layers:
Expand Down Expand Up @@ -543,6 +546,7 @@ def feed_forward(self, x, **kwargs):
if key not in self._functions:
outputs, updates = self.build_graph(regs)
labels, exprs = list(outputs.keys()), list(outputs.values())
logging.info('compiling feed_forward function')
self._functions[key] = (labels, theano.function(
self.inputs, exprs, updates=updates))
labels, f = self._functions[key]
Expand Down
2 changes: 0 additions & 2 deletions theanets/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,6 @@ def __init__(self, target, weight=1., weighted=False, output_name='out'):
if ':' not in self.output_name:
self.output_name += ':out'

self.log()

@property
def variables(self):
'''A list of Theano variables used in this loss.'''
Expand Down
4 changes: 0 additions & 4 deletions theanets/regularizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,6 @@ class Regularizer(util.Registrar(str('Base'), (), {})):
def __init__(self, pattern=None, weight=0.):
self.pattern = pattern
self.weight = weight
self.log()

def log(self):
'''Log some diagnostic info about this regularizer.'''
Expand Down Expand Up @@ -396,7 +395,6 @@ def loss(self, layers, outputs):
pattern = ns[0] if len(ns) == 1 else '{' + ','.join(ns) + '}'
matches = util.outputs_matching(outputs, pattern)
hiddens = [expr for _, expr in matches]
logging.info('found %s matching %s', hiddens, pattern)
if not hiddens:
return 0
return sum(abs(h).mean() for h in hiddens) / len(hiddens)
Expand Down Expand Up @@ -490,9 +488,7 @@ def loss(self, layers_, outputs):
ns = [l.output_name() for l in layers_[1:-1]]
pattern = ns[0] if len(ns) == 1 else '{' + ','.join(ns) + '}'
targets = [expr for _, expr in util.outputs_matching(outputs, pattern)]
logging.info('found %s matching %s', targets, pattern)
if not targets:
raise ValueError
return 0
wrt = [l.input for l in layers_
if isinstance(l, layers.Input) and
Expand Down

0 comments on commit f46f6ef

Please sign in to comment.