Skip to content

Commit

Permalink
Merge pull request #1010 from backyes/fix_gradient_clipping
Browse files Browse the repository at this point in the history
fix bug: gradient_clipping_threshold should be allowed to set with parameter-grain
  • Loading branch information
reyoung committed Dec 26, 2016
2 parents 87170a7 + 224e5fc commit 9ae7a10
Showing 1 changed file with 25 additions and 15 deletions.
40 changes: 25 additions & 15 deletions python/paddle/trainer_config_helpers/attrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,34 +19,34 @@


def convert_and_compare(x, Type):
"""
Convert x to be the same type as Type and then convert back to
check whether there is a loss of information
:param x: object to be checked
:param Type: target type to check x over
"""
Convert x to be the same type as Type and then convert back to
check whether there is a loss of information
:param x: object to be checked
:param Type: target type to check x over
"""
return type(x)(Type(x)) == x


def is_compatible_with(x, Type):
"""
Check if x has a type compatible with Type
:param x: object to be checked
:param Type: target type to check x over
"""
Check if x has a type compatible with Type
:param x: object to be checked
:param Type: target type to check x over
"""
if type(x) == Type:
return True
try:
if float == Type or int == Type:
# avoid those types that can be converted to float/int but not very
# meaningful and could potentially lead to error
# i.e., str and bool typed value should not be used for initializing float/int variable
# avoid those types that can be converted to float/int but not very
# meaningful and could potentially lead to error
# i.e., str and bool typed value should not be used for initializing float/int variable
if not isinstance(x, str) and not isinstance(x, bool):
return convert_and_compare(x, Type)
elif bool == Type:
# should not use string type to initialize bool variable
# should not use string type to initialize bool variable
if not isinstance(x, str):
return convert_and_compare(x, Type)
else:
Expand Down Expand Up @@ -88,6 +88,10 @@ class ParameterAttribute(object):
:type learning_rate: float or None
:param momentum: The parameter momentum. None means use global value.
:type momentum: float or None
:param gradient_clipping_threshold: gradient clipping threshold. If gradient
value larger than some value, will be
clipped.
:type gradient_clipping_threshold: float
:param sparse_update: Enable sparse update for this parameter. It will
enable both local and remote sparse update.
:type sparse_update: bool
Expand All @@ -104,6 +108,7 @@ def __init__(self,
l2_rate=None,
learning_rate=None,
momentum=None,
gradient_clipping_threshold=None,
sparse_update=False):
# initialize strategy.
if is_static:
Expand Down Expand Up @@ -152,6 +157,11 @@ def __init__(self,
self.attr['sparse_update'] = True
self.attr['sparse_remote_update'] = True

if gradient_clipping_threshold is not None and \
is_compatible_with(gradient_clipping_threshold, float):
self.attr['gradient_clipping_threshold'] = \
gradient_clipping_threshold

def set_default_parameter_name(self, name):
"""
Set default parameter name. If parameter not set, then will use default
Expand Down

0 comments on commit 9ae7a10

Please sign in to comment.