Skip to content

Commit

Permalink
Fix global LGTM Python/C++ warnings and fully enable it (onnx#4467)
Browse files Browse the repository at this point in the history
* Fix global LGTM warnings and fully enable it

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

* - Exclude: py/import-and-import-from

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

* fix the rest of warnings

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

* remove unnecessary if

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>
Co-authored-by: G. Ramalingam <grama@microsoft.com>
  • Loading branch information
2 people authored and Bjarke Roune committed May 6, 2023
1 parent e1fcb74 commit 361d788
Show file tree
Hide file tree
Showing 10 changed files with 32 additions and 23 deletions.
5 changes: 5 additions & 0 deletions .lgtm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,8 @@ extraction:
# Build protobuf from source with -fPIC
- mkdir protobuf && cd protobuf
- source ../workflow_scripts/protobuf/build_protobuf_unix.sh $(nproc) $(pwd)/protobuf_install
path_classifiers:
test:
- exclude: / # ignore our default classification for test code
queries:
- exclude: py/import-and-import-from
5 changes: 2 additions & 3 deletions onnx/backend/test/case/node/resize.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,10 +288,9 @@ def W(x: float) -> float:
x_3 = x * x_2
if x <= 1:
return (A + 2) * x_3 - (A + 3) * x_2 + 1
elif x > 1 and x < 2:
if x < 2:
return A * x_3 - 5 * A * x_2 + 8 * A * x - 4 * A
else:
return 0.0
return 0.0

i_start = int(np.floor(-2 / scale) + 1)
i_end = 2 - i_start
Expand Down
4 changes: 4 additions & 0 deletions onnx/checker.h
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ class LexicalScopeContext final {
// instance with the default constructor and populate output_names with the
// values from the parent scope so the values are copied instead.
LexicalScopeContext(const LexicalScopeContext& parent_context) : parent_context_{&parent_context} {}
LexicalScopeContext& operator=(const LexicalScopeContext& parent_context) {
parent_context_ = &parent_context;
return *this;
}

void add(const std::string& name) {
output_names.insert(name);
Expand Down
4 changes: 2 additions & 2 deletions onnx/defs/function.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class FunctionBodyHelper {
}

template <typename T>
AttributeProtoWrapper(const std::string& attr_name, T value) {
AttributeProtoWrapper(const std::string& attr_name, const T& value) {
proto = MakeAttribute(attr_name, value);
}
};
Expand Down Expand Up @@ -142,7 +142,7 @@ class FunctionBuilder {
}

template <typename T>
FunctionBuilder& Add(const char* node_txt, const std::string& attr_name, T attr_value) {
FunctionBuilder& Add(const char* node_txt, const std::string& attr_name, const T& attr_value) {
return Add(node_txt, MakeAttribute(attr_name, attr_value));
}

Expand Down
2 changes: 1 addition & 1 deletion onnx/shape_inference/implementation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@ std::vector<const TypeProto*> GraphInferencerImpl::doInferencing(
return graph_output_types;
}

std::string GetErrorWithNodeInfo(NodeProto n, std::runtime_error err) {
std::string GetErrorWithNodeInfo(const NodeProto& n, std::runtime_error err) {
std::string op_name = n.has_name() ? (", node name: " + n.name()) : "";
return "(op_type:" + n.op_type() + op_name + "): " + err.what();
}
Expand Down
2 changes: 1 addition & 1 deletion onnx/shape_inference/implementation.h
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ void InferShapeForFunctionNode(
SymbolTable* symbolTable = nullptr,
std::unordered_map<std::string, TensorShapeProto>* generated_shape_data_by_name = nullptr);

std::string GetErrorWithNodeInfo(NodeProto n, std::runtime_error err);
std::string GetErrorWithNodeInfo(const NodeProto& n, std::runtime_error err);

void TraverseGraphsToAddExistingSymbols(const GraphProto& g, SymbolTable& symbolTable);

Expand Down
12 changes: 6 additions & 6 deletions onnx/test/basic_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,14 @@ def test_save_and_load_model(self) -> None:

# Test if input is string
loaded_proto = onnx.load_model_from_string(proto_string)
self.assertTrue(proto == loaded_proto)
self.assertEqual(proto, loaded_proto)

# Test if input has a read function
f = io.BytesIO()
onnx.save_model(proto_string, f)
f = io.BytesIO(f.getvalue())
loaded_proto = onnx.load_model(f, cls)
self.assertTrue(proto == loaded_proto)
self.assertEqual(proto, loaded_proto)

# Test if input is a file name
try:
Expand All @@ -57,7 +57,7 @@ def test_save_and_load_model(self) -> None:
fi.close()

loaded_proto = onnx.load_model(fi.name, cls)
self.assertTrue(proto == loaded_proto)
self.assertEqual(proto, loaded_proto)
finally:
os.remove(fi.name)

Expand All @@ -68,14 +68,14 @@ def test_save_and_load_tensor(self) -> None:

# Test if input is string
loaded_proto = onnx.load_tensor_from_string(proto_string)
self.assertTrue(proto == loaded_proto)
self.assertEqual(proto, loaded_proto)

# Test if input has a read function
f = io.BytesIO()
onnx.save_tensor(loaded_proto, f)
f = io.BytesIO(f.getvalue())
loaded_proto = onnx.load_tensor(f, cls)
self.assertTrue(proto == loaded_proto)
self.assertEqual(proto, loaded_proto)

# Test if input is a file name
try:
Expand All @@ -84,7 +84,7 @@ def test_save_and_load_tensor(self) -> None:
tfile.close()

loaded_proto = onnx.load_tensor(tfile.name, cls)
self.assertTrue(proto == loaded_proto)
self.assertEqual(proto, loaded_proto)
finally:
os.remove(tfile.name)

Expand Down
1 change: 1 addition & 0 deletions onnx/test/checker_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def test_check_node_input_marked_optional(self) -> None:

# Explicitly pass the empty string as optional
node = helper.make_node("GivenTensorFill", [""], ["Y"], name="test")
checker.check_node(node)

# Input of RELU is not optional
node = helper.make_node("Relu", [""], ["Y"], name="test")
Expand Down
12 changes: 6 additions & 6 deletions onnx/test/parser_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

class TestBasicFunctions(unittest.TestCase):
def check_graph(self, graph: GraphProto) -> None:
self.assertTrue(len(graph.node) == 3)
self.assertTrue(graph.node[0].op_type == "MatMul")
self.assertTrue(graph.node[1].op_type == "Add")
self.assertTrue(graph.node[2].op_type == "Softmax")
self.assertEqual(len(graph.node), 3)
self.assertEqual(graph.node[0].op_type, "MatMul")
self.assertEqual(graph.node[1].op_type, "Add")
self.assertEqual(graph.node[2].op_type, "Softmax")

def test_parse_graph(self) -> None:
input = """
Expand Down Expand Up @@ -38,8 +38,8 @@ def test_parse_model(self) -> None:
}
"""
model = onnx.parser.parse_model(input)
self.assertTrue(model.ir_version == 7)
self.assertTrue(len(model.opset_import) == 2)
self.assertEqual(model.ir_version, 7)
self.assertEqual(len(model.opset_import), 2)
self.check_graph(model.graph)

def test_parse_graph_error(self) -> None:
Expand Down
8 changes: 4 additions & 4 deletions onnx/test/printer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

class TestBasicFunctions(unittest.TestCase):
def check_graph(self, graph: onnx.GraphProto) -> None:
self.assertTrue(len(graph.node) == 3)
self.assertTrue(graph.node[0].op_type == "MatMul")
self.assertTrue(graph.node[1].op_type == "Add")
self.assertTrue(graph.node[2].op_type == "Softmax")
self.assertEqual(len(graph.node), 3)
self.assertEqual(graph.node[0].op_type, "MatMul")
self.assertEqual(graph.node[1].op_type, "Add")
self.assertEqual(graph.node[2].op_type, "Softmax")

def test_parse_graph(self) -> None:
text0 = """
Expand Down

0 comments on commit 361d788

Please sign in to comment.