diff --git a/src/ngraph_builder.cc b/src/ngraph_builder.cc index af6eee6ca..3705b2213 100644 --- a/src/ngraph_builder.cc +++ b/src/ngraph_builder.cc @@ -2232,6 +2232,34 @@ static Status TranslateL2LossOp( return Status::OK(); } +static Status TranslateLogSoftmaxOp( + const Node* op, const std::vector& static_input_map, + Builder::OpMap& ng_op_map) { + shared_ptr ng_inp; + TF_RETURN_IF_ERROR(GetInputNodes(ng_op_map, op, &ng_inp)); + auto inp_shape = ng_inp->get_shape(); + int rank = inp_shape.size(); + auto ng_axis = ng::AxisSet{rank - 1}; + // Batch i, class j + // logsoftmax[i, j] = logits[i, j] - log(sum(exp(logits[i]))) + // Actually implementing: logsoftmax[i, j] = logits[i, j] - max(logits[i]) - + // log(sum(exp(logits[i] - max(logits[i])))) + auto ng_max = ConstructNgNode( + op->name(), ConstructNgNode(op->name(), ng_inp, ng_axis), + inp_shape, ng_axis); + auto ng_inp_minus_max = + ConstructNgNode(op->name(), ng_inp, ng_max); + auto ng_exp = ConstructNgNode(op->name(), ng_inp_minus_max); + auto ng_log_sum = ConstructNgNode( + op->name(), ConstructNgNode(op->name(), ng_exp, ng_axis)); + auto ng_broadcast = ConstructNgNode( + op->name(), ng_log_sum, ng_inp->get_shape(), ng_axis); + auto ng_output = ConstructNgNode( + op->name(), ng_inp_minus_max, ng_broadcast); + SaveNgOp(ng_op_map, op->name(), ng_output); + return Status::OK(); +} + static Status TranslateMatMulOp( const Node* op, const std::vector& static_input_map, Builder::OpMap& ng_op_map) { @@ -4525,6 +4553,7 @@ const static std::map< {"HorovodAllreduce", TranslateAllreduceOp}, {"Identity", TranslateIdentityOp}, {"L2Loss", TranslateL2LossOp}, + {"LogSoftmax", TranslateLogSoftmaxOp}, {"Less", TranslateBinaryOp}, {"LessEqual", TranslateBinaryOp}, {"Log", TranslateUnaryOp}, diff --git a/src/ngraph_mark_for_clustering.cc b/src/ngraph_mark_for_clustering.cc index 035d2e055..b12faa750 100644 --- a/src/ngraph_mark_for_clustering.cc +++ b/src/ngraph_mark_for_clustering.cc @@ -281,6 +281,7 @@ Status MarkForClustering(Graph* graph, #endif confirmation_function_map["Identity"] = SimpleConfirmationFunction(); confirmation_function_map["L2Loss"] = SimpleConfirmationFunction(); + confirmation_function_map["LogSoftmax"] = SimpleConfirmationFunction(); confirmation_function_map["Less"] = SimpleConfirmationFunction(); confirmation_function_map["LessEqual"] = SimpleConfirmationFunction(); confirmation_function_map["Log"] = SimpleConfirmationFunction(); @@ -449,6 +450,7 @@ Status MarkForClustering(Graph* graph, #endif type_constraint_map["Identity"]["T"] = NGraphDTypes(); type_constraint_map["L2Loss"]["T"] = NGraphNumericDTypes(); + type_constraint_map["LogSoftmax"]["T"] = NGraphRealDTypes(); type_constraint_map["Less"]["T"] = NGraphDTypes(); type_constraint_map["LessEqual"]["T"] = NGraphDTypes(); type_constraint_map["Log"]["T"] = NGraphNumericDTypes(); diff --git a/test/python/tensorflow/python_tests_list_gpu.txt b/test/python/tensorflow/python_tests_list_gpu.txt index 04b3320c0..ee660b612 100644 --- a/test/python/tensorflow/python_tests_list_gpu.txt +++ b/test/python/tensorflow/python_tests_list_gpu.txt @@ -407,10 +407,10 @@ slice_op_test.SliceTest.testSliceOfSlice #softmax_op_test.SoftmaxTest.test1DTensorAsInputNoReshape #softmax_op_test.SoftmaxTest.test3DTensorAsInput #softmax_op_test.SoftmaxTest.test3DTensorAsInputNoReshape -softmax_op_test.SoftmaxTest.testAlongFirstDimension -softmax_op_test.SoftmaxTest.testAlongSecondDimension +#softmax_op_test.SoftmaxTest.testAlongFirstDimension +#softmax_op_test.SoftmaxTest.testAlongSecondDimension softmax_op_test.SoftmaxTest.testDimTooLarge -softmax_op_test.SoftmaxTest.testDouble +#softmax_op_test.SoftmaxTest.testDouble softmax_op_test.SoftmaxTest.testEmptyInput softmax_op_test.SoftmaxTest.testFloat #softmax_op_test.SoftmaxTest.testFloatGPU diff --git a/test/test_nn_ops.cpp b/test/test_nn_ops.cpp index 6845f3687..a05a582f9 100644 --- a/test/test_nn_ops.cpp +++ b/test/test_nn_ops.cpp @@ -1116,6 +1116,30 @@ TEST(NNOps, L2Loss) { } } +// Test Op :"LogSoftmax" +TEST(NNOps, LogSoftmax) { + std::vector> input_sizes = { + {3}, {3, 2}, {5, 6}, {3, 4, 5}, {2, 3, 4, 5}}; + + vector static_input_indexes = {}; + + for (auto const& input_size : input_sizes) { + Scope root = Scope::NewRootScope(); + + Tensor input_data(DT_FLOAT, TensorShape(input_size)); + AssignInputValuesRandom(input_data, -2, 2); + + auto R = ops::LogSoftmax(root, input_data); + vector output_datatypes = {DT_FLOAT}; + std::vector sess_run_fetchoutputs = {R}; + + OpExecuter opexecuter(root, "LogSoftmax", static_input_indexes, + output_datatypes, sess_run_fetchoutputs); + + opexecuter.RunTest(); + } +} + // Test Op :"MaxPool3D" TEST(NNOps, MaxPool3DNDHWCSame) { std::vector> input_sizes;