From 48adb6d6b5698754d72c14c5e0ab0bb181c4e95d Mon Sep 17 00:00:00 2001 From: Jignesh Parmar Date: Fri, 27 Sep 2019 22:12:45 +0000 Subject: [PATCH] resolve pylint errors --- tests/backend_test_base.py | 2 +- tests/common.py | 1 + tests/run_pretrained_models.py | 2 +- tf2onnx/graph.py | 16 +++++++++------- tf2onnx/optimizer/transpose_optimizer.py | 3 +-- tf2onnx/rewriter/transpose_rewriter.py | 2 +- tf2onnx/schemas.py | 2 +- tf2onnx/tfonnx.py | 4 ++-- tools/save_pretrained_model.py | 4 +++- 9 files changed, 20 insertions(+), 16 deletions(-) diff --git a/tests/backend_test_base.py b/tests/backend_test_base.py index 3010beb53..222964367 100644 --- a/tests/backend_test_base.py +++ b/tests/backend_test_base.py @@ -21,7 +21,7 @@ from tf2onnx import optimizer -# pylint: disable=missing-docstring,invalid-name,unused-argument,using-constant-test +# pylint: disable=missing-docstring,invalid-name,unused-argument,using-constant-test, import-outside-toplevel class Tf2OnnxBackendTestBase(unittest.TestCase): def setUp(self): diff --git a/tests/common.py b/tests/common.py index b540af727..41fa6c6f0 100644 --- a/tests/common.py +++ b/tests/common.py @@ -15,6 +15,7 @@ import tensorflow as tf from tf2onnx import constants, logging, utils +# pylint: disable=import-outside-toplevel __all__ = [ "TestConfig", "get_test_config", diff --git a/tests/run_pretrained_models.py b/tests/run_pretrained_models.py index 06d6872c4..07f728319 100644 --- a/tests/run_pretrained_models.py +++ b/tests/run_pretrained_models.py @@ -30,7 +30,7 @@ from tf2onnx import loader, logging, optimizer, utils from tf2onnx.tfonnx import process_tf_graph -# pylint: disable=broad-except,logging-not-lazy,unused-argument,unnecessary-lambda +# pylint: disable=broad-except,logging-not-lazy,unused-argument,unnecessary-lambda,import-outside-toplevel logger = logging.getLogger("run_pretrained") diff --git a/tf2onnx/graph.py b/tf2onnx/graph.py index 65e205281..bc5fa626b 100644 --- a/tf2onnx/graph.py +++ b/tf2onnx/graph.py @@ -40,8 +40,8 @@ def __init__(self, node, graph, skip_conversion=False): """ self._op = node self.graph = graph - self._input = [i for i in node.input] - self._output = [i for i in node.output] + self._input = list(node.input) + self._output = list(node.output) self._attr = {} graph.set_node_by_name(self) @@ -306,11 +306,11 @@ def set_body_graph_as_attr(self, attr_name, graph): def update_proto(self): """Update protobuf from internal structure.""" - nodes = [n for n in self._op.input] + nodes = list(self._op.input) for node in nodes: self._op.input.remove(node) self._op.input.extend(self.input) - nodes = [n for n in self._op.output] + nodes = list(self._op.output) for node in nodes: self._op.output.remove(node) self._op.output.extend(self.output) @@ -325,7 +325,7 @@ def update_proto(self): graph_proto = sub_graph.make_graph("graph for " + self.name + " " + attr_name) self.set_attr(attr_name, graph_proto) - attr = [a for a in self.attr_onnx.values()] + attr = list(self.attr_onnx.values()) if attr: self._op.attribute.extend(attr) @@ -772,10 +772,12 @@ def get_shape(self, name): if shape: for i, v in enumerate(shape): if v is None: + # pylint: disable=unsupported-assignment-operation shape[i] = -1 # hack to allow utils.ONNX_UNKNOWN_DIMENSION to override batchsize if needed. # default is -1. if shape[0] == -1: + # pylint: disable=unsupported-assignment-operation shape[0] = utils.ONNX_UNKNOWN_DIMENSION return shape return shape @@ -839,7 +841,7 @@ def _get_unvisited_child(g, node, not_visited): label = [-1 for _ in range(n)] stack = [] in_stack = dict() - not_visited = dict.fromkeys([i for i in range(n)]) + not_visited = dict.fromkeys(range(n)) label_counter = n - 1 while not_visited: @@ -884,7 +886,7 @@ def make_graph(self, doc, graph_name="tf2onnx"): if op.is_const(): const_ops.append(op) continue - elif op.is_graph_input(): + if op.is_graph_input(): if op not in self._order_sensitive_inputs: order_non_sensitive_placeholders.append(op) continue diff --git a/tf2onnx/optimizer/transpose_optimizer.py b/tf2onnx/optimizer/transpose_optimizer.py index 3d053837d..e92a11297 100644 --- a/tf2onnx/optimizer/transpose_optimizer.py +++ b/tf2onnx/optimizer/transpose_optimizer.py @@ -227,8 +227,7 @@ def _get_input_index_for_trans(self, node, trans): for i in node.input: if i == trans.output[0]: break - else: - input_index += 1 + input_index += 1 return input_index # the assumption is: both node and trans have only 1 output diff --git a/tf2onnx/rewriter/transpose_rewriter.py b/tf2onnx/rewriter/transpose_rewriter.py index 59f39646d..fb46add6b 100644 --- a/tf2onnx/rewriter/transpose_rewriter.py +++ b/tf2onnx/rewriter/transpose_rewriter.py @@ -26,7 +26,7 @@ def rewrite_transpose(g, ops): for match in match_results: output = match.get_op('output') shape = g.get_shape(output.input[0]) - dims = [i for i in range(len(shape) - 1, -1, -1)] + dims = range(len(shape) - 1, -1, -1) output.set_attr("perm", dims) g.remove_input(output, output.input[1]) to_delete = [n for n in match.get_nodes() if n != output] diff --git a/tf2onnx/schemas.py b/tf2onnx/schemas.py index 65a3eb320..5091dc10f 100644 --- a/tf2onnx/schemas.py +++ b/tf2onnx/schemas.py @@ -136,7 +136,7 @@ def build_onnx_op(node): copied_sub_graph = copy.deepcopy(sub_graph) graph_proto = copied_sub_graph.make_graph("graph for " + node.name + " " + attr_name) attr.append(helper.make_attribute(attr_name, graph_proto)) - attr.extend([a for a in node.attr_onnx.values()]) + attr.extend(node.attr_onnx.values()) if attr: onnx_node.attribute.extend(attr) return onnx_node diff --git a/tf2onnx/tfonnx.py b/tf2onnx/tfonnx.py index 28a68892f..0abdd2210 100644 --- a/tf2onnx/tfonnx.py +++ b/tf2onnx/tfonnx.py @@ -233,7 +233,7 @@ def rewrite_incomplete_type_support(g, ops, impacted_ops): "Where": [0], # Where's first input is bool } new_ops = [] - org_ops = [n for n in ops] + org_ops = list(ops) for op in org_ops: if op.type in impacted_ops: cast_inserted = [] @@ -312,7 +312,7 @@ def tensorflow_onnx_mapping(g, ops_mapping): unmapped_op = collections.Counter() exceptions = [] - ops = [n for n in g.get_nodes()] + ops = list(g.get_nodes()) for node in ops: logger.debug("Process node: %s\n%s", node.name, node.summary) diff --git a/tools/save_pretrained_model.py b/tools/save_pretrained_model.py index 94b8d5cdd..36b1c5d20 100644 --- a/tools/save_pretrained_model.py +++ b/tools/save_pretrained_model.py @@ -7,7 +7,8 @@ import tensorflow as tf import numpy as np -# pylint: disable=redefined-outer-name,reimported + +# pylint: disable=redefined-outer-name,reimported,import-outside-toplevel def save_pretrained_model(sess, outputs, feeds, out_dir, model_name="pretrained"): """Save pretrained model and config""" @@ -54,6 +55,7 @@ def save_pretrained_model(sess, outputs, feeds, out_dir, model_name="pretrained" # save graph and weights from tensorflow.saved_model import simple_save + # pylint: disable=unnecessary-comprehension simple_save(sess, saved_model, {n: i for n, i in zip(inputs.keys(), feeds.keys())}, {op.name: op for op in outputs})