Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update glorot_uniform_initializer to match other Initializers #20108

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 4 additions & 4 deletions tensorflow/python/keras/initializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
# These imports are brought in so that keras.initializers.deserialize
# has them available in module_objects.
from tensorflow.python.ops.init_ops import Constant
from tensorflow.python.ops.init_ops import glorot_normal_initializer
from tensorflow.python.ops.init_ops import glorot_uniform_initializer
from tensorflow.python.ops.init_ops import GlorotNormal
from tensorflow.python.ops.init_ops import GlorotUniform
from tensorflow.python.ops.init_ops import he_normal # pylint: disable=unused-import
from tensorflow.python.ops.init_ops import he_uniform # pylint: disable=unused-import
from tensorflow.python.ops.init_ops import Identity
Expand Down Expand Up @@ -56,8 +56,8 @@
truncated_normal = TruncatedNormal
identity = Identity
orthogonal = Orthogonal
glorot_normal = glorot_normal_initializer
glorot_uniform = glorot_uniform_initializer
glorot_normal = GlorotNormal
glorot_uniform = GlorotUniform

# pylint: enable=invalid-name

Expand Down
91 changes: 55 additions & 36 deletions tensorflow/python/ops/init_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -1116,29 +1116,9 @@ def __call__(self, shape, dtype=None, partition_info=None):
def get_config(self):
return {"gain": self.gain, "dtype": self.dtype.name}

# Aliases.

# pylint: disable=invalid-name
zeros_initializer = Zeros
ones_initializer = Ones
constant_initializer = Constant
random_uniform_initializer = RandomUniform
random_normal_initializer = RandomNormal
truncated_normal_initializer = TruncatedNormal
uniform_unit_scaling_initializer = UniformUnitScaling
variance_scaling_initializer = VarianceScaling
orthogonal_initializer = Orthogonal
identity_initializer = Identity
convolutional_delta_orthogonal = ConvolutionDeltaOrthogonal
convolutional_orthogonal_1d = ConvolutionOrthogonal1D
convolutional_orthogonal_2d = ConvolutionOrthogonal2D
convolutional_orthogonal_3d = ConvolutionOrthogonal3D
# pylint: enable=invalid-name


@tf_export("glorot_uniform_initializer", "keras.initializers.glorot_uniform",
"initializers.glorot_uniform")
def glorot_uniform_initializer(seed=None, dtype=dtypes.float32):
class GlorotUniform(VarianceScaling):
"""The Glorot uniform initializer, also called Xavier uniform initializer.

It draws samples from a uniform distribution within [-limit, limit]
Expand All @@ -1153,17 +1133,27 @@ def glorot_uniform_initializer(seed=None, dtype=dtypes.float32):
`tf.set_random_seed`
for behavior.
dtype: The data type. Only floating point types are supported.

Returns:
An initializer.
"""
return variance_scaling_initializer(
scale=1.0, mode="fan_avg", distribution="uniform", seed=seed, dtype=dtype)
def __init__(self,
seed=None,
dtype=dtypes.float32):
super(GlorotUniform, self).__init__(
scale=1.0,
mode="fan_avg",
distribution="uniform",
seed=seed,
dtype=dtype)

def get_config(self):
return {
"seed": self.seed,
"dtype": self.dtype.name
}


@tf_export("glorot_normal_initializer", "keras.initializers.glorot_normal",
"initializers.glorot_normal")
def glorot_normal_initializer(seed=None, dtype=dtypes.float32):
class GlorotNormal(VarianceScaling):
"""The Glorot normal initializer, also called Xavier normal initializer.

It draws samples from a truncated normal distribution centered on 0
Expand All @@ -1178,16 +1168,45 @@ def glorot_normal_initializer(seed=None, dtype=dtypes.float32):
`tf.set_random_seed`
for behavior.
dtype: The data type. Only floating point types are supported.

Returns:
An initializer.
"""
return variance_scaling_initializer(
scale=1.0,
mode="fan_avg",
distribution="truncated_normal",
seed=seed,
dtype=dtype)
def __init__(self,
seed=None,
dtype=dtypes.float32):
super(GlorotNormal, self).__init__(
scale=1.0,
mode="fan_avg",
distribution="truncated_normal",
seed=seed,
dtype=dtype)

def get_config(self):
return {
"seed": self.seed,
"dtype": self.dtype.name
}



# Aliases.

# pylint: disable=invalid-name
zeros_initializer = Zeros
ones_initializer = Ones
constant_initializer = Constant
random_uniform_initializer = RandomUniform
random_normal_initializer = RandomNormal
truncated_normal_initializer = TruncatedNormal
uniform_unit_scaling_initializer = UniformUnitScaling
variance_scaling_initializer = VarianceScaling
glorot_uniform_initializer = GlorotUniform
glorot_normal_initializer = GlorotNormal
orthogonal_initializer = Orthogonal
identity_initializer = Identity
convolutional_delta_orthogonal = ConvolutionDeltaOrthogonal
convolutional_orthogonal_1d = ConvolutionOrthogonal1D
convolutional_orthogonal_2d = ConvolutionOrthogonal2D
convolutional_orthogonal_3d = ConvolutionOrthogonal3D
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While we're here, could we remove this aliases (are they even effective? Do they do anything given that the tf_export decorators determine what's in the API?)?

@annarev do you think these actually do anything?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like these are used in contrib:
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/framework/__init__.py
I suppose they can be imported with "as convolutional_orthogonal_*d" instead.

# pylint: enable=invalid-name


@tf_export("keras.initializers.lecun_normal", "initializers.lecun_normal")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
path: "tensorflow.glorot_normal_initializer"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.GlorotNormal\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.VarianceScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
path: "tensorflow.glorot_uniform_initializer"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.GlorotUniform\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.VarianceScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
path: "tensorflow.initializers.glorot_normal"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.GlorotNormal\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.VarianceScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
path: "tensorflow.initializers.glorot_uniform"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.GlorotUniform\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.VarianceScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
16 changes: 8 additions & 8 deletions tensorflow/tools/api/golden/v1/tensorflow.initializers.pbtxt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@ tf_module {
name: "constant"
mtype: "<type \'type\'>"
}
member {
name: "glorot_normal"
mtype: "<type \'type\'>"
}
member {
name: "glorot_uniform"
mtype: "<type \'type\'>"
}
member {
name: "identity"
mtype: "<type \'type\'>"
Expand Down Expand Up @@ -44,14 +52,6 @@ tf_module {
name: "global_variables"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "glorot_normal"
argspec: "args=[\'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "glorot_uniform"
argspec: "args=[\'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "he_normal"
argspec: "args=[\'seed\'], varargs=None, keywords=None, defaults=[\'None\'], "
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
path: "tensorflow.keras.initializers.glorot_normal"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.GlorotNormal\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.VarianceScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
path: "tensorflow.keras.initializers.glorot_uniform"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.GlorotUniform\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.VarianceScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,14 @@ tf_module {
name: "constant"
mtype: "<type \'type\'>"
}
member {
name: "glorot_normal"
mtype: "<type \'type\'>"
}
member {
name: "glorot_uniform"
mtype: "<type \'type\'>"
}
member {
name: "identity"
mtype: "<type \'type\'>"
Expand Down Expand Up @@ -88,14 +96,6 @@ tf_module {
name: "get"
argspec: "args=[\'identifier\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "glorot_normal"
argspec: "args=[\'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "glorot_uniform"
argspec: "args=[\'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "he_normal"
argspec: "args=[\'seed\'], varargs=None, keywords=None, defaults=[\'None\'], "
Expand Down
16 changes: 8 additions & 8 deletions tensorflow/tools/api/golden/v1/tensorflow.pbtxt
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,14 @@ tf_module {
name: "gfile"
mtype: "<type \'module\'>"
}
member {
name: "glorot_normal_initializer"
mtype: "<type \'type\'>"
}
member {
name: "glorot_uniform_initializer"
mtype: "<type \'type\'>"
}
member {
name: "graph_util"
mtype: "<type \'module\'>"
Expand Down Expand Up @@ -1192,14 +1200,6 @@ tf_module {
name: "global_variables_initializer"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "glorot_normal_initializer"
argspec: "args=[\'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "glorot_uniform_initializer"
argspec: "args=[\'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "gradients"
argspec: "args=[\'ys\', \'xs\', \'grad_ys\', \'name\', \'colocate_gradients_with_ops\', \'gate_gradients\', \'aggregation_method\', \'stop_gradients\'], varargs=None, keywords=None, defaults=[\'None\', \'gradients\', \'False\', \'False\', \'None\', \'None\'], "
Expand Down