diff --git a/ngraph_bridge/ngraph_builder.cc b/ngraph_bridge/ngraph_builder.cc index b41a4de3c..5f9d67b39 100644 --- a/ngraph_bridge/ngraph_builder.cc +++ b/ngraph_bridge/ngraph_builder.cc @@ -2513,6 +2513,21 @@ static Status TranslateLogSoftmaxOp( return Status::OK(); } +static Status TranslateSoftplusOp( + const Node* op, const std::vector& static_input_map, + Builder::OpMap& ng_op_map) { + shared_ptr ng_inp; + TF_RETURN_IF_ERROR(GetInputNodes(ng_op_map, op, &ng_inp)); + auto ng_exp = ConstructNgNode(op->name(), ng_inp); + auto constant_1 = ConstructNgNode( + op->name(), ng_inp->get_element_type(), ng_inp->get_shape(), + std::vector(ng::shape_size(ng_inp->get_shape()), "1")); + auto ng_output = ConstructNgNode( + op->name(), ConstructNgNode(op->name(), ng_exp, constant_1)); + SaveNgOp(ng_op_map, op->name(), ng_output); + return Status::OK(); +} + static Status TranslateMatMulOp( const Node* op, const std::vector& static_input_map, Builder::OpMap& ng_op_map) { @@ -4870,7 +4885,7 @@ const static std::map< {"Sigmoid", TranslateSigmoidOp}, {"SigmoidGrad", TranslateSigmoidGradOp}, {"Size", TranslateSizeOp}, {"Sign", TranslateUnaryOp}, {"Slice", TranslateSliceOp}, {"Snapshot", TranslateIdentityOp}, - {"Softmax", TranslateSoftmaxOp}, + {"Softmax", TranslateSoftmaxOp}, {"Softplus", TranslateSoftplusOp}, {"SpaceToDepth", TranslateSpaceToDepthOp}, {"SparseSoftmaxCrossEntropyWithLogits", TranslateSparseSoftmaxCrossEntropyWithLogitsOp}, diff --git a/ngraph_bridge/ngraph_mark_for_clustering.cc b/ngraph_bridge/ngraph_mark_for_clustering.cc index 6aa4fea2f..28c03ef1e 100644 --- a/ngraph_bridge/ngraph_mark_for_clustering.cc +++ b/ngraph_bridge/ngraph_mark_for_clustering.cc @@ -375,6 +375,7 @@ Status MarkForClustering(Graph* graph, const std::set skip_these_nodes, confirmation_function_map["Slice"] = SimpleConfirmationFunction(); confirmation_function_map["Snapshot"] = SimpleConfirmationFunction(); confirmation_function_map["Softmax"] = SimpleConfirmationFunction(); + confirmation_function_map["Softplus"] = SimpleConfirmationFunction(); confirmation_function_map["SpaceToDepth"] = confirmation_function_map["DepthToSpace"]; confirmation_function_map["SparseSoftmaxCrossEntropyWithLogits"] = @@ -569,6 +570,7 @@ Status MarkForClustering(Graph* graph, const std::set skip_these_nodes, type_constraint_map["Slice"]["Index"] = NGraphIndexDTypes(); type_constraint_map["Snapshot"]["T"] = NGraphDTypes(); type_constraint_map["Softmax"]["T"] = NGraphNumericDTypes(); + type_constraint_map["Softplus"]["T"] = NGraphRealDTypes(); type_constraint_map["SpaceToDepth"]["T"] = NGraphDTypes(); type_constraint_map["SparseSoftmaxCrossEntropyWithLogits"]["T"] = NGraphNumericDTypes(); diff --git a/test/python/test_softplus.py b/test/python/test_softplus.py new file mode 100644 index 000000000..25b7ff684 --- /dev/null +++ b/test/python/test_softplus.py @@ -0,0 +1,55 @@ +# ============================================================================== +# Copyright 2018-2019 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""nGraph TensorFlow softplus test + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import pytest + +import numpy as np +import tensorflow as tf + +from common import NgraphTest + + +class TestSoftplus(NgraphTest): + + def test_softplus(self): + x = tf.placeholder(tf.float32, shape=(2, 3)) + y = tf.placeholder(tf.float32, shape=(2, 3)) + z = tf.placeholder(tf.float32, shape=(2, 3)) + + a = x + y + z + b = x + y + z + c = a * b + d = tf.nn.softplus(c) + + # input value and expected value + x_np = np.full((2, 3), 1.0) + y_np = np.full((2, 3), 1.0) + z_np = np.full((2, 3), 1.0) + + sess_fn = lambda sess: sess.run((a, c, d), + feed_dict={ + x: x_np, + y: y_np, + z: z_np + }) + assert np.allclose( + self.with_ngraph(sess_fn), self.without_ngraph(sess_fn)) diff --git a/test/test_nn_ops.cpp b/test/test_nn_ops.cpp index f898acc39..63c14ae65 100644 --- a/test/test_nn_ops.cpp +++ b/test/test_nn_ops.cpp @@ -1575,6 +1575,30 @@ TEST(NNOps, SoftmaxZeroDimTest2) { opexecuter.RunTest(); } +// Test Op :"Softplus" +TEST(NNOps, Softplus) { + std::vector> input_sizes = { + {3}, {3, 2}, {5, 6}, {3, 4, 5}, {2, 3, 4, 5}}; + + vector static_input_indexes = {}; + + for (auto const& input_size : input_sizes) { + Scope root = Scope::NewRootScope(); + + Tensor input_data(DT_FLOAT, TensorShape(input_size)); + AssignInputValuesRandom(input_data, -2, 2); + + auto R = ops::Softplus(root, input_data); + vector output_datatypes = {DT_FLOAT}; + std::vector sess_run_fetchoutputs = {R}; + + OpExecuter opexecuter(root, "Softplus", static_input_indexes, + output_datatypes, sess_run_fetchoutputs); + + opexecuter.RunTest(); + } +} + // Computes softmax cross entropy cost and gradients to backpropagate. TEST(NNOps, SparseSoftmaxCrossEntropyWithLogits) { Scope root = Scope::NewRootScope();