diff --git a/paddle/fluid/jit/compilation_unit.cc b/paddle/fluid/jit/compilation_unit.cc index 1a2351048f90a..110f012c8e361 100644 --- a/paddle/fluid/jit/compilation_unit.cc +++ b/paddle/fluid/jit/compilation_unit.cc @@ -27,7 +27,7 @@ std::shared_ptr CompilationUnit::GetEngine( engine_map_.count(name), 1, phi::errors::InvalidArgument( - "Funciton named %s is not existed in engine_map_.", name)); + "Function named %s is not existed in engine_map_.", name)); return engine_map_.at(name); } diff --git a/paddle/fluid/jit/layer.cc b/paddle/fluid/jit/layer.cc index 0b2e20f77837a..c1a493db17ea9 100644 --- a/paddle/fluid/jit/layer.cc +++ b/paddle/fluid/jit/layer.cc @@ -65,7 +65,7 @@ const std::shared_ptr& Layer::FunctionInfo( info_map_.count(name), 1, phi::errors::InvalidArgument( - "FuncitonInfo named %s is not existed in info_map_.", name)); + "FunctionInfo named %s is not existed in info_map_.", name)); return info_map_.at(name); } @@ -77,7 +77,7 @@ std::vector Layer::FunctionNames() const { return names; } -#define PD_SPECIALZE_ATTRIBUTE_TYPE(T) \ +#define PD_SPECIALIZE_ATTRIBUTE_TYPE(T) \ template <> \ T Layer::Attribute(const std::string& name) const { \ if (attrs_map_->find(name) == attrs_map_->end()) { \ @@ -90,12 +90,12 @@ std::vector Layer::FunctionNames() const { return ret; \ } -PD_SPECIALZE_ATTRIBUTE_TYPE(int) -PD_SPECIALZE_ATTRIBUTE_TYPE(float) -PD_SPECIALZE_ATTRIBUTE_TYPE(framework::String) -PD_SPECIALZE_ATTRIBUTE_TYPE(std::vector) -PD_SPECIALZE_ATTRIBUTE_TYPE(std::vector) -PD_SPECIALZE_ATTRIBUTE_TYPE(std::vector) +PD_SPECIALIZE_ATTRIBUTE_TYPE(int) +PD_SPECIALIZE_ATTRIBUTE_TYPE(float) +PD_SPECIALIZE_ATTRIBUTE_TYPE(framework::String) +PD_SPECIALIZE_ATTRIBUTE_TYPE(std::vector) +PD_SPECIALIZE_ATTRIBUTE_TYPE(std::vector) +PD_SPECIALIZE_ATTRIBUTE_TYPE(std::vector) std::shared_ptr Layer::Clone(void* stream) { std::shared_ptr x = diff --git a/paddle/fluid/operators/generator/parse_utils.py b/paddle/fluid/operators/generator/parse_utils.py index 3395f265e2647..0370d6cfba4b3 100644 --- a/paddle/fluid/operators/generator/parse_utils.py +++ b/paddle/fluid/operators/generator/parse_utils.py @@ -627,11 +627,11 @@ def validate_backward_attrs(op, forward_attrs, backward_attrs): def validate_backward_inputs( op, forward_inputs, forward_outputs, backward_inputs ): - foward_input_names = [item["name"] for item in forward_inputs] + forward_input_names = [item["name"] for item in forward_inputs] forward_output_names = [item["name"] for item in forward_outputs] backward_input_names = [item["name"] for item in backward_inputs] - assert len(backward_input_names) <= len(foward_input_names) + 2 * len( + assert len(backward_input_names) <= len(forward_input_names) + 2 * len( forward_output_names ), f"{op } has too many inputs." diff --git a/paddle/fluid/pir/dialect/op_generator/op_gen.py b/paddle/fluid/pir/dialect/op_generator/op_gen.py index d4c2d6eb6b6fa..161aa4ac9a1b5 100644 --- a/paddle/fluid/pir/dialect/op_generator/op_gen.py +++ b/paddle/fluid/pir/dialect/op_generator/op_gen.py @@ -95,7 +95,7 @@ extern std::set onednn_only_op_set; """ -GET_OP_LIST_TEMPALTE = """{} +GET_OP_LIST_TEMPLATE = """{} """ DECLARE_OP_TYPE_ID = """ @@ -338,7 +338,7 @@ class {TEST_API} {op_name} : public pir::Op<{op_name}{interfaces}{traits}> {{ def to_phi_and_fluid_op_name(op_item): - # Templat: - op : phi_name (fluid_name) + # Template: - op : phi_name (fluid_name) names = op_item.split('(') if len(names) == 1: phi_fluid_name = names[0].strip() @@ -350,7 +350,7 @@ def to_phi_and_fluid_op_name(op_item): def to_phi_and_fluid_grad_op_name(op_item): - # Templat: sum_grad (reduce_sum_grad), sum_double_grad + # Template: sum_grad (reduce_sum_grad), sum_double_grad rtn = [] all_names = op_item.split(', ') for name in all_names: @@ -1499,7 +1499,7 @@ def AutoCodeGen(op_info_items, all_op_info_items, namespaces, dialect_name): # =================================== # # gen GetOpInfo func str # # =================================== # - # generate get op info funciton: inputs + # generate get op info function: inputs input_info_list = [] for idx in range(len(op_input_name_list)): input_info_list.append( @@ -1529,7 +1529,7 @@ def AutoCodeGen(op_info_items, all_op_info_items, namespaces, dialect_name): inputs_info_str = ", ".join(input_info_list) else: inputs_info_str = "" - # generate get op info funciton: outputs + # generate get op info function: outputs outputs_info_str = "" if len(op_output_name_list) > 0: output_info_list = [] @@ -1543,7 +1543,7 @@ def AutoCodeGen(op_info_items, all_op_info_items, namespaces, dialect_name): ) ) outputs_info_str = ", ".join(output_info_list) - # generate get op info funciton: attributes + # generate get op info function: attributes attribute_info_str = "" if len(op_non_mutable_attribute_name_list) > 0: attribute_info_list = [] @@ -1866,7 +1866,7 @@ def AutoCodeGen(op_info_items, all_op_info_items, namespaces, dialect_name): ops_name_with_namespace_list = [] for name in ops_name_list: ops_name_with_namespace_list.append(op_namespaces_prev + name) - op_list_str = GET_OP_LIST_TEMPALTE.format( + op_list_str = GET_OP_LIST_TEMPLATE.format( ", ".join(ops_name_with_namespace_list) ) @@ -2154,7 +2154,7 @@ def OpGenerator( # (6) write to files for xx_vjp_op.cc.tmp # NOTE(Aurelius84): op_gen.py is called multiply times, - # and vjp is only avaible for pd dialect. + # and vjp is only available for pd dialect. vjp_source_file_str = "\n".join(vjp_source_file_strs) vjp_source_file_str = VJP_CC_FILE_TEMPLATE.format(input=vjp_source_file_str) if ( diff --git a/paddle/fluid/pir/dialect/op_generator/op_infermeta_gen.py b/paddle/fluid/pir/dialect/op_generator/op_infermeta_gen.py index 2c38dd43701aa..a900a378cfd77 100644 --- a/paddle/fluid/pir/dialect/op_generator/op_infermeta_gen.py +++ b/paddle/fluid/pir/dialect/op_generator/op_infermeta_gen.py @@ -98,7 +98,7 @@ def get_infermeta_inputs_str( infermeta_inputs_str += "\n" infermeta_inputs_str += ' VLOG(4) << "Builder construction outputs";\n' - # Prepar input type + # Prepare input type for idx in range(len(op_input_name_list)): if op_input_name_list[idx] not in inuse_infer_meta_args: continue @@ -367,7 +367,7 @@ def GenBuildOutputsPart2( elif attr_dtype[0] == "pir::StrAttribute": build_output_str += "" else: - assert "mutable attribtue type is not right." + assert "mutable attribute type is not right." build_output_str += "\n" # Prepare inputs_meta_tensor & attributes for infer meta diff --git a/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py b/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py index 08090ba434bcf..21fcc02b11634 100644 --- a/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py +++ b/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py @@ -96,7 +96,7 @@ VLOG(6) << "Vjp prepare Prepare attributes of {op_grad_name}"; {attribute_code} - VLOG(6) << "Vjp prepare call {op_phi_name}'s vjp inteface"; + VLOG(6) << "Vjp prepare call {op_phi_name}'s vjp interface"; {call_vjp_code} VLOG(6) << "Vjp prepare stop gradient of {op_grad_name}"; diff --git a/paddle/fluid/pir/dialect/operator/ir/control_flow_op.cc b/paddle/fluid/pir/dialect/operator/ir/control_flow_op.cc index 30d5ce5a1b685..7fa9570035eaf 100644 --- a/paddle/fluid/pir/dialect/operator/ir/control_flow_op.cc +++ b/paddle/fluid/pir/dialect/operator/ir/control_flow_op.cc @@ -469,7 +469,7 @@ std::vector> WhileOp::Vjp( PADDLE_ENFORCE_EQ(push_op.container().use_empty(), true, phi::errors::InvalidArgument( - "The last container in foward while op must used " + "The last container in forward while op must used " "empty while construct while_grad op")); break; } diff --git a/paddle/phi/api/yaml/generator/api_gen.py b/paddle/phi/api/yaml/generator/api_gen.py index 6905db3f8089b..7741b9daca613 100644 --- a/paddle/phi/api/yaml/generator/api_gen.py +++ b/paddle/phi/api/yaml/generator/api_gen.py @@ -494,12 +494,12 @@ def generate_api( source_file.write(namespace[0]) for api in apis: - foward_api = ForwardAPI(api) - if foward_api.is_dygraph_api: - foward_api.is_dygraph_api = False + forward_api = ForwardAPI(api) + if forward_api.is_dygraph_api: + forward_api.is_dygraph_api = False - header_file.write(foward_api.gene_api_declaration()) - source_file.write(foward_api.gene_api_code()) + header_file.write(forward_api.gene_api_declaration()) + source_file.write(forward_api.gene_api_code()) header_file.write(namespace[1]) source_file.write(namespace[1]) diff --git a/paddle/phi/api/yaml/generator/dist_api_gen.py b/paddle/phi/api/yaml/generator/dist_api_gen.py index e7dfc4c50563b..b1a775d912f27 100644 --- a/paddle/phi/api/yaml/generator/dist_api_gen.py +++ b/paddle/phi/api/yaml/generator/dist_api_gen.py @@ -1889,15 +1889,15 @@ def generate_api( source_file.write(namespace[0]) for api in apis: - dist_foward_api = DistForwardAPI(api) - if dist_foward_api.is_dygraph_api: - dist_foward_api.is_dygraph_api = False + dist_forward_api = DistForwardAPI(api) + if dist_forward_api.is_dygraph_api: + dist_forward_api.is_dygraph_api = False - header_file.write(dist_foward_api.gene_api_declaration()) + header_file.write(dist_forward_api.gene_api_declaration()) if is_fused_ops_yaml is True: - source_file.write(dist_foward_api.gene_api_code()) + source_file.write(dist_forward_api.gene_api_code()) else: - source_file.write(dist_foward_api.gene_api_code()) + source_file.write(dist_forward_api.gene_api_code()) header_file.write(namespace[1]) source_file.write(namespace[1]) diff --git a/paddle/phi/api/yaml/generator/intermediate_api_gen.py b/paddle/phi/api/yaml/generator/intermediate_api_gen.py index efae34934408e..31e23252bc9c6 100644 --- a/paddle/phi/api/yaml/generator/intermediate_api_gen.py +++ b/paddle/phi/api/yaml/generator/intermediate_api_gen.py @@ -121,10 +121,12 @@ def generate_intermediate_api( apis.extend(api_list) for api in apis: - foward_api = DistForwardAPI(api) if gen_dist_branch else ForwardAPI(api) - if foward_api.is_dygraph_api: - dygraph_header_file.write(foward_api.gene_api_declaration()) - dygraph_source_file.write(foward_api.gene_api_code()) + forward_api = ( + DistForwardAPI(api) if gen_dist_branch else ForwardAPI(api) + ) + if forward_api.is_dygraph_api: + dygraph_header_file.write(forward_api.gene_api_declaration()) + dygraph_source_file.write(forward_api.gene_api_code()) dygraph_header_file.write(sparse_namespace_pair[0]) dygraph_source_file.write(sparse_namespace_pair[0])