Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add transpose optimizer, and integrate it after tf graph conversion #108

Merged
merged 7 commits into from
Aug 24, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 18 additions & 17 deletions tests/run_pretrained_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
from tensorflow.core.framework import graph_pb2
from tf2onnx.tfonnx import process_tf_graph
from tensorflow.python.framework.graph_util import convert_variables_to_constants
from tf2onnx.optimizer.onnx_graph import OnnxGraph
from tf2onnx.optimizer.transpose_optimizer import TransposeOptimizer

TMPPATH = tempfile.mkdtemp()
PERFITER = 1000
Expand Down Expand Up @@ -176,10 +178,9 @@ def to_onnx(tf_graph, opset=None, shape_override=None):
"""Convert graph to tensorflow."""
return process_tf_graph(tf_graph, continue_on_error=False, opset=opset, shape_override=shape_override)

def run_caffe2(self, name, onnx_graph, inputs):
def run_caffe2(self, name, model_proto, inputs):
"""Run test again caffe2 backend."""
import caffe2.python.onnx.backend
model_proto = onnx_graph.make_model("test", inputs.keys(), self.output_names)
prepared_backend = caffe2.python.onnx.backend.prepare(model_proto)
results = prepared_backend.run(inputs)
if self.perf:
Expand All @@ -189,12 +190,11 @@ def run_caffe2(self, name, onnx_graph, inputs):
self.onnx_runtime = time.time() - start
return results

def run_onnxmsrt(self, name, onnx_graph, inputs):
def run_onnxmsrt(self, name, model_proto, inputs):
"""Run test against onnxmsrt backend."""
import lotus
# create model and datafile in tmp path.
model_path = os.path.join(TMPPATH, name + "_model.pb")
model_proto = onnx_graph.make_model("test", inputs.keys(), self.output_names)
with open(model_path, "wb") as f:
f.write(model_proto.SerializeToString())
m = lotus.ModelExecutor(model_path)
Expand All @@ -206,11 +206,10 @@ def run_onnxmsrt(self, name, onnx_graph, inputs):
self.onnx_runtime = time.time() - start
return results

def run_onnxmsrtnext(self, name, onnx_graph, inputs):
def run_onnxmsrtnext(self, name, model_proto, inputs):
"""Run test against msrt-next backend."""
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

when running this with your changes:
mobilenet_v1_75_192
downloaded /tmp/pre-trained\mobilenet_v1_0.75_192_frozen\mobilenet_v1_0.75_192/frozen_graph.pb
2018-08-22 14:28:57.717831: I T:\src\github\tensorflow\tensorflow\tools\graph_transforms\transform_graph.cc:318] Applying fold_constants
2018-08-22 14:28:57.870910: I T:\src\github\tensorflow\tensorflow\tools\graph_transforms\transform_graph.cc:318] Applying fold_batch_norms
2018-08-22 14:28:58.012064: I T:\src\github\tensorflow\tensorflow\tools\graph_transforms\transform_graph.cc:318] Applying fold_old_batch_norms
tensorflow OK
before optimization: ops statistics: Counter({'Transpose': 58, 'Conv': 28, 'Add': 28, 'Max': 27, 'Min': 27, 'Mul': 13, 'Identity': 1, 'Softmax': 1, 'Reshape': 1, 'AveragePool': 1, 'Squeeze': 1})
to_onnx FAIL name 'TensorShapeProto' is not defined
run_onnx FAIL 'NoneType' object has no attribute 'SerializeToString'

mobilenet_v1_100_224
downloaded /tmp/pre-trained\mobilenet_v1_1.0_224_frozen\mobilenet_v1_1.0_224/frozen_graph.pb
2018-08-22 14:28:15.857182: I T:\src\github\tensorflow\tensorflow\tools\graph_transforms\transform_graph.cc:318] Applying fold_constants
2018-08-22 14:28:16.066649: I T:\src\github\tensorflow\tensorflow\tools\graph_transforms\transform_graph.cc:318] Applying fold_batch_norms
2018-08-22 14:28:16.295635: I T:\src\github\tensorflow\tensorflow\tools\graph_transforms\transform_graph.cc:318] Applying fold_old_batch_norms
tensorflow OK
before optimization: ops statistics: Counter({'Transpose': 58, 'Conv': 28, 'Add': 28, 'Max': 27, 'Min': 27, 'Mul': 13, 'Identity': 1, 'Softmax': 1, 'Reshape': 1, 'AveragePool': 1, 'Squeeze': 1})
to_onnx FAIL name 'TensorShapeProto' is not defined
run_onnx FAIL 'NoneType' object has no attribute 'SerializeToString'

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh, my bad, it's a last minute change. I'll fix it. sorry.

import lotus
model_path = os.path.join(TMPPATH, name + ".pb")
model_proto = onnx_graph.make_model("test", inputs.keys(), self.output_names)
with open(model_path, "wb") as f:
f.write(model_proto.SerializeToString())
m = lotus.InferenceSession(model_path)
Expand All @@ -222,11 +221,10 @@ def run_onnxmsrtnext(self, name, onnx_graph, inputs):
self.onnx_runtime = time.time() - start
return results

def run_onnxcntk(self, name, onnx_graph, inputs):
def run_onnxcntk(self, name, model_proto, inputs):
"""Run test against cntk backend."""
import cntk as C
model_path = os.path.join(TMPPATH, name + "_model.pb")
model_proto = onnx_graph.make_model("test", inputs.keys(), self.output_names)
with open(model_path, "wb") as f:
f.write(model_proto.SerializeToString())
z = C.Function.load(model_path, format=C.ModelFormat.ONNX)
Expand All @@ -242,10 +240,9 @@ def run_onnxcntk(self, name, onnx_graph, inputs):
self.onnx_runtime = time.time() - start
return results

def create_onnx_file(self, name, onnx_graph, inputs, outdir):
def create_onnx_file(self, name, model_proto, inputs, outdir):
os.makedirs(outdir, exist_ok=True)
model_path = os.path.join(outdir, name + ".onnx")
model_proto = onnx_graph.make_model(name, inputs.keys(), self.output_names)
with open(model_path, "wb") as f:
f.write(model_proto.SerializeToString())
print("\tcreated", model_path)
Expand Down Expand Up @@ -300,29 +297,33 @@ def run_test(self, name, backend="caffe2", debug=False, onnx_file=None, opset=No

# run the model with tensorflow
tf_results = self.run_tensorflow(sess, inputs)
onnx_graph = None
opt_model_proto = None
print("\ttensorflow", "OK")
try:
# convert model to onnx
onnx_graph = self.to_onnx(sess.graph, opset=opset, shape_override=shape_override)
model_proto = onnx_graph.make_model("test", inputs.keys(), self.output_names)
# optimize the onnx graph with TransposeOptimizer
optimizer = TransposeOptimizer(OnnxGraph(model_proto.graph))
opt_model_proto = optimizer.optimize()
print("\tto_onnx", "OK")
if debug:
onnx_graph.dump_graph()
opt_model_proto.dump_graph()
if onnx_file:
self.create_onnx_file(name, onnx_graph, inputs, onnx_file)
self.create_onnx_file(name, opt_model_proto, inputs, onnx_file)
except Exception as ex:
print("\tto_onnx", "FAIL", ex)

try:
onnx_results = None
if backend == "caffe2":
onnx_results = self.run_caffe2(name, onnx_graph, inputs)
onnx_results = self.run_caffe2(name, opt_model_proto, inputs)
elif backend == "onnxmsrt":
onnx_results = self.run_onnxmsrt(name, onnx_graph, inputs)
onnx_results = self.run_onnxmsrt(name, opt_model_proto, inputs)
elif backend == "onnxmsrtnext":
onnx_results = self.run_onnxmsrtnext(name, onnx_graph, inputs)
onnx_results = self.run_onnxmsrtnext(name, opt_model_proto, inputs)
elif backend == "cntk":
onnx_results = self.run_onnxcntk(name, onnx_graph, inputs)
onnx_results = self.run_onnxcntk(name, opt_model_proto, inputs)
else:
raise ValueError("unknown backend")
print("\trun_onnx OK")
Expand Down
9 changes: 7 additions & 2 deletions tf2onnx/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
import tf2onnx.utils
from tf2onnx.tfonnx import process_tf_graph, tf_optimize, DEFAULT_TARGET, POSSIBLE_TARGETS
from onnx import helper

from tf2onnx.optimizer.onnx_graph import OnnxGraph
from tf2onnx.optimizer.transpose_optimizer import TransposeOptimizer

_TENSORFLOW_DOMAIN = "ai.onnx.converters.tensorflow"

Expand Down Expand Up @@ -94,10 +95,14 @@ def main():
"converted from {}".format(args.input), args.inputs, args.outputs,
optimize=not args.continue_on_error)

onnx_graph = OnnxGraph(model_proto.graph)
optimizer = TransposeOptimizer(onnx_graph)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you make this optional via command line, don't think this tested well enough

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

okay, let me do that.

opt_model_proto = optimizer.optimize()

# write onnx graph
if args.output:
with open(args.output, "wb") as f:
f.write(model_proto.SerializeToString())
f.write(opt_model_proto.SerializeToString())


main()
11 changes: 11 additions & 0 deletions tf2onnx/optimizer/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.

from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals

__all__ = ["onnx_graph", "transpose_optimizer"]

#import tf2onnx
#from tf2onnx import tfonnx, utils, graph, graph_matcher
Loading