Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix PEP8 #15378

Merged
merged 1 commit into from
Dec 18, 2017
Merged

Fix PEP8 #15378

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion tensorflow/contrib/opt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
'VariableClippingOptimizer',
'MultitaskOptimizerWrapper',
'clip_gradients_by_global_norm',
'ElasticAverageOptimizer',
'ElasticAverageOptimizer',
'ElasticAverageCustomGetter'
]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ def __call__(self, getter, name, trainable, collections, *args, **kwargs):
if trainable:
with ops.device(self._worker_device):
local_var = getter(name, trainable=True,
collections=[ops.GraphKeys.LOCAL_VARIABLES],
collections=[ops.GraphKeys.LOCAL_VARIABLES],
*args, **kwargs)

global_center_variable = variable_scope.variable(
name='%s/%s' %
(GLOBAL_VARIABLE_NAME,
Expand All @@ -96,7 +96,7 @@ def __call__(self, getter, name, trainable, collections, *args, **kwargs):
initial_value=local_var.initialized_value(),
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES])

self._local_map[local_var] = local_center_variable
self._global_map[local_var] = global_center_variable
return local_var
Expand Down Expand Up @@ -173,7 +173,7 @@ def compute_gradients(self, loss, var_list=None,
colocate_gradients_with_ops=False,
grad_loss=None):
"""Compute gradients of `loss` for the variables in `var_list`.

Add rho*elastic_difference to loss to control the exploration
This is the first part of `minimize()`. It returns a list
of (gradient, variable) pairs where "gradient" is the gradient
Expand Down Expand Up @@ -204,7 +204,7 @@ def compute_gradients(self, loss, var_list=None,
"""
if not var_list:
var_list = variables.trainable_variables()

elastic_difference = [math_ops.subtract(v, lv) for v, lv in zip(
variables.trainable_variables(),
[self._local_map[var] for var in var_list])]
Expand Down
2 changes: 1 addition & 1 deletion tensorflow/contrib/tpu/profiler/pip_package/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def decode(serialized_example):

# Convert label from a scalar uint8 tensor to an int32 scalar.
label = tf.cast(features['label'], tf.int32)

return image, label

def augment(image, label):
Expand Down Expand Up @@ -172,7 +172,7 @@ def run_training():
step += 1
except tf.errors.OutOfRangeError:
print('Done training for %d epochs, %d steps.' % (FLAGS.num_epochs, step))

def main(_):
run_training()

Expand Down