/
summary.py
86 lines (76 loc) · 2.68 KB
/
summary.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# File: summary.py
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
import tensorflow as tf
import logger
from .naming import *
def create_summary(name, v):
"""
Return a tf.Summary object with name and simple value v
Args: v: a value
"""
assert isinstance(name, basestring), type(name)
v = float(v)
s = tf.Summary()
s.value.add(tag=name, simple_value=v)
return s
def add_activation_summary(x, name=None):
"""
Summary for an activation tensor x.
If name is None, use x.name
"""
ndim = x.get_shape().ndims
assert ndim >= 2, \
"Summary a scalar with histogram? Maybe use scalar instead. FIXME!"
if name is None:
name = x.name
tf.histogram_summary(name + '/activations', x)
tf.scalar_summary(name + '/sparsity', tf.nn.zero_fraction(x))
# TODO avoid repeating activations on multiple GPUs
def add_histogram_summary(regex):
"""
Add histogram summary for all trainable variables matching the regex
"""
import re
params = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
for p in params:
name = p.name
if re.search(regex, name):
tf.histogram_summary(name, p)
def summary_moving_average(cost_var):
""" Create a MovingAverage op and summary for all variables in
COST_VARS_KEY, SUMMARY_VARS_KEY, as well as the argument
Return a op to maintain these average
"""
global_step_var = tf.get_default_graph().get_tensor_by_name(GLOBAL_STEP_VAR_NAME)
averager = tf.train.ExponentialMovingAverage(
0.9, num_updates=global_step_var, name='avg')
vars_to_summary = [cost_var] + \
tf.get_collection(SUMMARY_VARS_KEY) + \
tf.get_collection(COST_VARS_KEY)
avg_maintain_op = averager.apply(vars_to_summary)
for c in vars_to_summary:
tf.scalar_summary(c.op.name, averager.average(c))
return avg_maintain_op
def describe_model():
""" describe the current model parameters"""
train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
msg = [""]
total = 0
for v in train_vars:
shape = v.get_shape()
ele = shape.num_elements()
total += ele
msg.append("{}: shape={}, dim={}".format(
v.name, shape.as_list(), ele))
msg.append("Total dim={}".format(total))
logger.info("Model Params: {}".format('\n'.join(msg)))
def get_shape_str(tensors):
""" return the shape string for a tensor or a list of tensors"""
if isinstance(tensors, list):
shape_str = ",".join(
map(str(x.get_shape().as_list()), tensors))
else:
shape_str = str(tensors.get_shape().as_list())
return shape_str