diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..e77bd090e --- /dev/null +++ b/LICENSE @@ -0,0 +1,203 @@ +Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 000000000..e479d81c2 --- /dev/null +++ b/README.md @@ -0,0 +1,46 @@ +### WORK-IN-PROGRESS + +# PaddlePaddle to ONNX model convertor + +Converts a PaddlePaddle model (`ProgramDesc` + parameters) into an ONNX graph. Uses the ONNX pip library and target PaddlePaddle **Fluid**. + +To understand PaddlePaddle's (non-)graph way of representing a deep learning program, a `ProgramDesc`, refer to: https://github.com/PaddlePaddle/Paddle/blob/develop/doc/fluid/design/concepts/program.md. + +## Status + +Targets Paddle->ONNX conversion for now, and will consequently support the reverse too. + +Currently a work-in-progress tool since there a features in PaddlePaddle not supported in ONNX today and vice-versa. + +## Usage + +First, generate model directory by running any fluid test / example and write the model using the `fluid.io.save_inference_model` API. + +Then, run `convert.py` by providing the generated model directory to the argument `---modeldir`. + + +## Installation + +(TBD) + +Create a virtual environment and install ONNX using PIP. +``` +pip install onnx==1.1 +``` + +Build PaddlePaddle's `develop` branch from source using info here: +http://paddlepaddle.org/docs/develop/documentation/en/build_and_install/build_from_source_en.html + +## Testing + +TBD + +## Supported models + +We aim to at least support all the models from our model bank. During our preliminary stage, we plan to support the models generated from: + +- [fit_a_line](https://github.com/PaddlePaddle/Paddle/blob/develop/python/paddle/fluid/tests/book/test_fit_a_line.py) +- [fit_a_line](https://github.com/PaddlePaddle/Paddle/blob/develop/python/paddle/fluid/tests/book/test_fit_a_line.py) + +## License +Provided under the [Apache-2.0 license](LICENSE). diff --git a/convert.py b/convert.py new file mode 100644 index 000000000..d6e3451cf --- /dev/null +++ b/convert.py @@ -0,0 +1,29 @@ +import argparse + +from onnx import * +import paddle.fluid as fluid + +# import ops + +def convert(dirname=''): + # Read the model files. + # place = fluid.CPUPlace() + # exe = fluid.Executor(place) + + # inference_scope = fluid.core.Scope() + # with fluid.scope_guard(inference_scope): + # [inference_program, feed_target_names, + # fetch_targets] = fluid.io.load_inference_model(dirname, exe) + + # Using blocks in programs, create nodes using: + # helper.make_program + # helper.make_node + # ops.PADDLE_TO_ONNX + + +if __name__ == "__main__": + # Read arguments: path to model. + # parser = argparse.ArgumentParser() + # parser.add_argument("--modeldir", required=True, help="input model") + + convert() diff --git a/ops.py b/ops.py new file mode 100644 index 000000000..b29ef16aa --- /dev/null +++ b/ops.py @@ -0,0 +1,548 @@ +""" +Priority of ops (uniques) to figure out support for. + +test_fit_a_line.py +- mean +- mul +- elementwise_add +- elementwise_sub +- fill_constant + +^ Try to make this run before proceeding. + +test_machine_translation.py +- lookup_table +- tanh +- lstm +- sequence_pool +- lookup_table +- lod_rank_table +- max_sequence_len +- less_than +- lod_tensor_to_array +- write_to_array +- while +- array_to_lod_tensor +- cross_entropy +- lod_tensor_to_array +- read_from_array +- sum +- scale +- adagrad +- shrink_rnn_memory +- softmax +- write_to_array +- increment +""" + +# Based on the ONNX 1.0 operator list generated on March 26th, 2018. +# Reference for paddle operator availability taken from: +# https://github.com/PaddlePaddle/Paddle/issues/8028 + +# ONNX Ops that use multiple Paddle ops are keyed by ',' fed into the +# modifier. + +PADDLE_TO_ONNX = { + # Paddle op name : (ONNX op name, modifier) + 'abs': ('Abs', abs_op), + 'elementwise_add': ('Add', add_op), + + # '': 'And', # ? + # 'ArgMax', NEEDS ATTENTION. + # 'ArgMin', NEEDS ATTENTION. + '': ('AveragePool', averagepool_op), + 'batch_norm': ('BatchNormalization', batchnorm_op), + 'cast': ('Cast', cast_op), + # 'Ceil', NEEDS ATTENTION. + 'cast': ('Clip', clip_op), + 'concat': ('Concat', concat_op), + + ',': ('Constant', constant_op), + 'conv': ('Conv', conv_op), + + # Need to continue the mapping below. + 'ConvTranspose', + 'DepthToSpace', + 'Div', + 'Dropout', + 'Elu', + 'Equal', + 'Exp', + 'Flatten', + # 'Floor', NEEDS ATTENTION. + 'GRU', + 'Gather', + 'Gemm', + 'GlobalAveragePool', + 'GlobalLpPool', + 'GlobalMaxPool', + 'Greater', + 'HardSigmoid', + # 'Hardmax', NEEDS ATTENTION. + # 'InstanceNormalization', NEEDS ATTENTION. + 'LRN', + 'LSTM', + 'LeakyRelu', + 'Less', + 'Log', + ',': 'LogSoftmax', + 'LpNormalization', + 'LpPool', + 'MatMul', + 'Max', + # 'MaxPool', NEEDS ATTENTION. + 'MaxRoiPool', + 'mean': ('Mean', mean_op), + 'Min', + 'mul': ('Mul', mul_op), + ',': 'Neg', + 'Not', + 'Or', + 'PRelu', + 'Pad', + 'Pow', + ',': 'RNN', + 'RandomNormal', + # 'RandomNormalLike', NEEDS ATTENTION. + # 'RandomUniform', NEEDS ATTENTION. + # 'RandomUniformLike', NEEDS ATTENTION. + 'Reciprocal', + 'ReduceL1', + 'ReduceL2', + ',': 'ReduceLogSum', + ',': 'ReduceLogSumExp', + 'ReduceMax', + 'ReduceMean', + 'ReduceMin', + # 'ReduceProd', NEEDS ATTENTION. + 'ReduceSum', + ',': 'ReduceSumSquare', + 'Relu', + 'Reshape', + # 'Selu', NEEDS ATTENTION. + 'Shape', + 'Sigmoid', + 'Size', + # 'Slice', NEEDS ATTENTION. + 'Softmax', + 'Softplus', + 'Softsign', + 'SpaceToDepth', + 'Split', + 'Sqrt', + # 'Squeeze', NEEDS ATTENTION. + 'elementwise_sub': ('Sub', sub_op), + 'Sum', + 'Tanh', + 'Tile', + 'TopK', + 'Transpose', + # 'Unsqueeze', NEEDS ATTENTION. + 'Xor', + # 'experimental ATen' + # ',': 'experimental Affine' + # 'experimental ConstantFill' + # 'experimental Crop' + # 'experimental FC' + # 'experimental GRUUnit' + # 'experimental GivenTensorFill' + # 'assign': 'experimental Identity' + # 'experimental If' + # ',': 'experimental ImageScaler' + # 'experimental Loop' + # 'experimental LoopIndexTensor' + # 'experimental MeanVarianceNormalization' + # 'experimental ParametricSoftplus' + # 'experimental Scale' + # 'experimental ScaledTanh' + # 'experimental ThresholdedRelu' + # 'experimental Upsample' +} + +def abs_op(): + pass + + +def add_op(): + pass + + +def and_op(): + """ + Need to support broadcast. + """ + pass + + +def argmax_op(): + pass + + +def argmin_op(): + pass + + +def averagepool_op(): + """ + Need to support more pad mode. + """ + pass + + +def batchnorm_op(): + pass + + +def cast_op(): + pass + + +def ceil_op(): + pass + + +def clip_op(): + pass + + +def concat_op(): + pass + + +def constant_op(): + pass + + +def conv_op(): + """ + Need to support broadcast. + """ + pass + + +def convtranspose_op(): + pass + + +def depthtospace_op(): + pass + + +def div_op(): + pass + + +def dropout_op(): + pass + + +def elu_op(): + pass + + +def equal_op(): + pass + + +def dropout_op(): + pass + + +def exp_op(): + pass + + +def flatten_op(): + pass + + +def floor_op(): + pass + + +def gru_op(): + pass + + +def gather_op(): + pass + + +def gemm_op(): + pass + + +def globalaveragepool_op(): + pass + + +def globallppool_op(): + pass + + +def globalmaxpool_op(): + pass + + +def greater_op(): + pass + + +def hardsigmoid_op(): + pass + + +def hardmax_op(): + pass + + +def instancenormalization_op(): + pass + + +def lrn_op(): + pass + + +def lstm_op(): + pass + + +def leakyrelu_op(): + pass + + +def less_op(): + pass + + +def log_op(): + pass + + +def logsoftmax_op(): + pass + + +def lpnormalization_op(): + pass + + +def lppool_op(): + pass + + +def matmul_op(): + pass + + +def max_op(): + pass + + +def maxpool_op(): + """ + Need to support broadcast. + """ + pass + + +def maxroipool_op(): + pass + + +def mean_op(): + pass + + +def min_op(): + pass + + +def mul_op(): + pass + + +def neg_op(): + pass + + +def not_op(): + """ + Need to support broadcast. + """ + pass + + +def or_op(): + """ + Need to support broadcast. + """ + pass + + +def prelu_op(): + pass + + +def pad_op(): + pass + + +def pow_op(): + pass + + +def rnn_op(): + pass + + +def randomnormal_op(): + pass + + +def randomnormallike_op(): + pass + + +def randomuniform_op(): + pass + + +def randomuniformlike_op(): + pass + + +def reciprocal_op(): + pass + + +def reducel1_op(): + pass + + +def reducel2_op(): + pass + + +def reducelogsum_op(): + pass + + +def reducelogsumexp_op(): + pass + + +def reducemax_op(): + pass + + +def reducemean_op(): + pass + + +def reducemin_op(): + pass + + +def reduceprod_op(): + pass + + +def reducesum_op(): + pass + + +def reducesumsquare_op(): + pass + + +def relu_op(): + pass + + +def reshape_op(): + pass + + +def selu_op(): + pass + + +def shape_op(): + pass + + +def sigmoid_op(): + pass + + +def size_op(): + pass + + +def slice_op(): + pass + + +def softmax_op(): + pass + + +def softplus_op(): + pass + + +def softsign_op(): + pass + + +def spacetodepth_op(): + pass + + +def split_op(): + pass + + +def sqrt_op(): + pass + + +def squeeze_op(): + pass + + +def sub_op(): + pass + + +def sum_op(): + pass + + +def tanh_op(): + pass + + +def tile_op(): + pass + + +def topk_op(): + pass + + +def transpose_op(): + pass + + +def unsqueeze_op(): + pass + + +def xor_op(): + """ + Need to support broadcast. + """ + pass diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/types.py b/types.py new file mode 100644 index 000000000..459ee9032 --- /dev/null +++ b/types.py @@ -0,0 +1,19 @@ +from onnx import onnx_pb2 +import paddle.fluid.core as core + + +PADDLE_TO_ONNX_DTYPE = { + core.VarDesc.VarType.FP32: onnx_pb2.TensorProto.FLOAT, + core.VarDesc.VarType.FP64: onnx_pb2.TensorProto.FLOAT16, + '': onnx_pb2.TensorProto.DOUBLE, + core.VarDesc.VarType.INT32: onnx_pb2.TensorProto.INT32, + core.VarDesc.VarType.INT16: onnx_pb2.TensorProto.INT16, + '': onnx_pb2.TensorProto.INT8, + '': onnx_pb2.TensorProto.UINT8, + core.VarDesc.VarType.INT16: onnx_pb2.TensorProto.UINT16, + core.VarDesc.VarType.INT64: onnx_pb2.TensorProto.INT64, + '': onnx_pb2.TensorProto.STRING, + '': onnx_pb2.TensorProto.COMPLEX64, + '': onnx_pb2.TensorProto.COMPLEX128, + core.VarDesc.VarType.BOOL: onnx_pb2.TensorProto.BOOL, +}