Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

framework.py enhancement #8471

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 16 additions & 11 deletions python/paddle/v2/fluid/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ class Variable(object):
shape(tuple|list|None): The shape of variable. -1 means the batch size.
Some kinds of variable do not contain shape, just set it to None.
dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
lod_level(int): The level of lod tensor. 0 means there is not a time
lod_level(int): The level of lod tensor. 0 means it is not a time
series data.
persistable(bool): True if the variable should be saved as check point.
Defaults to False.
Expand Down Expand Up @@ -342,7 +342,7 @@ def instance(cls):
def __init__(self):
assert not hasattr(
self.__class__,
'_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
'_instance'), 'Please use `instance()` to get OpProtoHolder object!'
op_protos = get_all_op_protos()
self.op_proto_map = {}
for proto in op_protos:
Expand All @@ -364,8 +364,8 @@ def get_op_proto(self, type):

class Operator(object):
"""
Python Operator class. The operator represents the build in instructs in a
Block. Users can use the build in instructs to describe their neural
Python Operator class. The operator represents the build in instructions in a
Block. Users can use the build in instructions to describe their neural
network.
"""

Expand Down Expand Up @@ -474,7 +474,7 @@ def find_name(var_list, name):
raise TypeError("'attrs' should be a dict.")
for attr in proto.attrs:
attr_name = attr.name
if (not attr_name in attrs) or (attrs[attr_name] is None):
if (attr_name not in attrs) or (attrs[attr_name] is None):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hard to believe Python can do the same thing with so many different ways...

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

From PEP8

Use is not operator rather than not ... is. While both expressions are 
functionally identical, the former is more readable and preferred.

Yes:

if foo is not None:

No:

if not foo is None:

Copy link
Contributor

@helinwang helinwang Feb 22, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the info. It's better if only one is allowed by the syntax.

continue
if isinstance(attrs[attr_name], Block):
self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
Expand Down Expand Up @@ -709,7 +709,7 @@ def iter_parameters(self):
if isinstance(item[1], Parameter))

def create_var(self, *args, **kwargs):
var = Variable(self, *args, **kwargs)
var = Variable(block=self, *args, **kwargs)
if 'initializer' in kwargs:
kwargs['initializer'](var, self)
return var
Expand All @@ -726,13 +726,13 @@ def create_parameter(self, *args, **kwargs):

def append_op(self, *args, **kwargs):
op_desc = self.desc.append_op()
op = Operator(self, op_desc, *args, **kwargs)
op = Operator(block=self, desc=op_desc, *args, **kwargs)
self.ops.append(op)
return op

def delete_ops(self, ops):
# remove from cpp
# FIXME(typhoonzero): remove only the first occuracy.
# FIXME(typhoonzero): remove only the first occurrence.
try:
start = list(self.ops).index(ops[0])
end = list(self.ops).index(ops[-1])
Expand All @@ -750,6 +750,11 @@ def prepend_op(self, *args, **kwargs):
return op

def sync_with_cpp(self):
"""
Sync with the desc on the c++ end.

This method is used to synchronize the c++ desc instance generated by backward.
"""
# sync variables from cpp
for var in self.desc.all_vars():
if not self.has_var(var.name()):
Expand Down Expand Up @@ -795,9 +800,9 @@ def sync_with_cpp(self):

def copy_param_info_from(self, other):
"""
Copy the information of parameters from other block
Copy the information of parameters from the other block
Args:
other(Block): other block
other(Block): the other block

Returns:
None
Expand Down Expand Up @@ -1143,6 +1148,6 @@ def get_var(name, program=None):
if program is None:
program = default_main_program()
assert isinstance(name, str)
assert isinstance(name, Program)
assert isinstance(program, Program)

return program.global_block().var(name)
2 changes: 1 addition & 1 deletion python/paddle/v2/fluid/layers/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def fc(input,
* :math:`X_i`: The input tensor.
* :math:`W`: The weights created by this layer.
* :math:`b`: The bias parameter created by this layer (if needed).
* :math:`Act`: The activation funtion.
* :math:`Act`: The activation function.
* :math:`Out`: The output tensor.

Args:
Expand Down