diff --git a/CMakeLists.txt b/CMakeLists.txt index fbb5232bb..15d4896db 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -248,7 +248,7 @@ if (NOT USE_PRE_BUILT_NGRAPH) ExternalProject_Add( ext_ngraph GIT_REPOSITORY https://github.com/NervanaSystems/ngraph - GIT_TAG v0.25.1-rc.7 + GIT_TAG v0.25.1-rc.8 CMAKE_ARGS -DNGRAPH_DISTRIBUTED_ENABLE=${NGRAPH_DISTRIBUTED_ENABLE} -DNGRAPH_INSTALL_PREFIX=${NGRAPH_ARTIFACTS_DIR} diff --git a/README.md b/README.md index c6a9dff82..416f93d8e 100644 --- a/README.md +++ b/README.md @@ -88,7 +88,7 @@ Once TensorFlow's dependencies are installed, clone the `ngraph-bridge` repo: git clone https://github.com/tensorflow/ngraph-bridge.git cd ngraph-bridge - git checkout v0.19.0-rc4 + git checkout v0.19.0-rc5 Run the following Python script to build TensorFlow, nGraph, and the bridge. Use Python 3.5: diff --git a/bazel/WORKSPACE b/bazel/WORKSPACE index 8e9341462..0a26e0fa9 100644 --- a/bazel/WORKSPACE +++ b/bazel/WORKSPACE @@ -55,11 +55,11 @@ tf_workspace(path_prefix = "", tf_repo_name = "org_tensorflow") http_archive( name = "ngraph", build_file = "//:bazel/ngraph.BUILD", - sha256 = "345f0566c3ae4a968daea116c26ce9a90a94305148d6b8b56b6309448432492e", - strip_prefix = "ngraph-0.25.1-rc.7", + sha256 = "4cd939dcf07544dc71f46f422d82a175b210fe7b21417fa67436168d5c40c9dc", + strip_prefix = "ngraph-0.25.1-rc.8", urls = [ - "https://mirror.bazel.build/github.com/NervanaSystems/ngraph/archive/v0.25.1-rc.7.tar.gz", - "https://github.com/NervanaSystems/ngraph/archive/v0.25.1-rc.7.tar.gz" + "https://mirror.bazel.build/github.com/NervanaSystems/ngraph/archive/v0.25.1-rc.8.tar.gz", + "https://github.com/NervanaSystems/ngraph/archive/v0.25.1-rc.8.tar.gz" ], ) diff --git a/bazel/ngraph.BUILD b/bazel/ngraph.BUILD index 0b5b7aa84..67b6238cc 100644 --- a/bazel/ngraph.BUILD +++ b/bazel/ngraph.BUILD @@ -84,7 +84,7 @@ cc_library( "-fstack-protector-all", '-D SHARED_LIB_PREFIX=\\"lib\\"', '-D SHARED_LIB_SUFFIX=\\".so\\"', - '-D NGRAPH_VERSION=\\"v0.25.1-rc.7\\"', + '-D NGRAPH_VERSION=\\"v0.25.1-rc.8\\"', "-D NGRAPH_DEX_ONLY", '-D PROJECT_ROOT_DIR=\\"\\"', '-D NGRAPH_STATIC_LIB_ENABLE' @@ -117,7 +117,7 @@ cc_library( "-fstack-protector-all", '-D SHARED_LIB_PREFIX=\\"lib\\"', '-D SHARED_LIB_SUFFIX=\\".so\\"', - '-D NGRAPH_VERSION=\\"v0.25.1-rc.7\\"', + '-D NGRAPH_VERSION=\\"v0.25.1-rc.8\\"', "-D NGRAPH_DEX_ONLY", '-D PROJECT_ROOT_DIR=\\"\\"', ] + CXX_ABI, diff --git a/build_ngtf.py b/build_ngtf.py index f649dd326..9a9f46cee 100755 --- a/build_ngtf.py +++ b/build_ngtf.py @@ -53,7 +53,7 @@ def main(): ''' # Component versions - ngraph_version = "v0.25.1-rc.7" + ngraph_version = "v0.25.1-rc.8" tf_version = "v1.14.0" # Command line parser options diff --git a/ngraph_bridge/ngraph_builder.cc b/ngraph_bridge/ngraph_builder.cc index 51adb9c12..035e663ae 100644 --- a/ngraph_bridge/ngraph_builder.cc +++ b/ngraph_bridge/ngraph_builder.cc @@ -92,15 +92,20 @@ static void SaveNgOp(Builder::OpMap& ng_op_map, const std::string& op_name, ng_op_map[op_name].push_back(output_node); } -template -std::shared_ptr ConstructNgNode(const std::string& op_name, - TArg&&... Args) { - auto ng_node = std::make_shared(std::forward(Args)...); +void Builder::SetTracingInfo(const std::string& op_name, + const shared_ptr ng_node) { ng_node->set_friendly_name(op_name); ng_node->add_provenance_tag(op_name); if (config::IsLoggingPlacement()) { cout << "TF_to_NG: " << op_name << " --> " << ng_node->get_name() << "\n"; } +} + +template +std::shared_ptr ConstructNgNode(const std::string& op_name, + TArg&&... Args) { + auto ng_node = std::make_shared(std::forward(Args)...); + Builder::SetTracingInfo(op_name, ng_node); return ng_node; } @@ -346,6 +351,25 @@ Builder::TF_NGRAPH_CONST_MAP() { return the_map; } +std::pair, std::shared_ptr> +Builder::PerformNgBroadcast(const string& prov_tag, + std::shared_ptr ng_lhs, + std::shared_ptr ng_rhs) { + // builder::numpy_broadcast is the only known builder that has the possibility + // that the output node is same as the input node + // So we take special care to check, before calling SetTracingInfo + std::shared_ptr ng_lhs_new, ng_rhs_new; + std::tie(ng_lhs_new, ng_rhs_new) = + ng::builder::numpy_broadcast(std::make_pair(ng_lhs, ng_rhs)); + if (ng_lhs_new != ng_lhs) { + Builder::SetTracingInfo(prov_tag, ng_lhs_new); + } + if (ng_rhs_new != ng_rhs) { + Builder::SetTracingInfo(prov_tag, ng_rhs_new); + } + return make_pair(ng_lhs_new, ng_rhs_new); +} + // Helper function to translate a unary op. // // Parameters: @@ -376,7 +400,9 @@ static Status TranslateUnaryOp( shared_ptr ng_input; TF_RETURN_IF_ERROR(GetInputNodes(ng_op_map, op, &ng_input)); auto ng_node = create_unary_op(ng_input); - ng_node->add_provenance_tag(op->name()); + if (ng_node != ng_input) { + Builder::SetTracingInfo(op->name(), ng_node); + } SaveNgOp(ng_op_map, op->name(), ng_node); return Status::OK(); } @@ -435,12 +461,12 @@ static Status TranslateBinaryOp( TF_RETURN_IF_ERROR(GetInputNodes(ng_op_map, op, &ng_lhs, &ng_rhs)); std::tie(ng_lhs, ng_rhs) = - ng::builder::numpy_broadcast(std::make_pair(ng_lhs, ng_rhs)); - ng_lhs->add_provenance_tag(op->name()); - ng_rhs->add_provenance_tag(op->name()); + Builder::PerformNgBroadcast(op->name(), ng_lhs, ng_rhs); auto ng_node = create_binary_op(ng_lhs, ng_rhs); - ng_node->add_provenance_tag(op->name()); + if (ng_node != ng_lhs && ng_node != ng_rhs) { + Builder::SetTracingInfo(op->name(), ng_node); + } SaveNgOp(ng_op_map, op->name(), ng_node); @@ -502,8 +528,7 @@ static Status TranslateQuantizedPoolOp(const Node* op, BatchedOpParamToNGraph(is_nhwc, ng_input->get_output_shape(0), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_ksize, ng_kernel_shape); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_image_shape: " << ng::join(ng_image_shape); @@ -533,10 +558,9 @@ static Status TranslateQuantizedPoolOp(const Node* op, ng_input, ng_kernel_shape, ng_strides, ng_padding_below, ng_padding_above, dummy_min, dummy_max); } - ng_quant_pool->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_quant_pool); - BatchToTensorflow(is_nhwc, ng_quant_pool); - ng_quant_pool->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_quant_pool); SaveNgOp(ng_op_map, op->name(), ng_quant_pool); // For QuantizedAvgPool and QuantizedMaxPool input min-max remains unchanged // and is just propagated along @@ -641,8 +665,7 @@ static Status TranslateAvgPoolOp(const Node* op, BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_ksize, ng_kernel_shape); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_image_shape: " << ng::join(ng_image_shape); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -661,8 +684,7 @@ static Status TranslateAvgPoolOp(const Node* op, op->name(), ng_input, ng_kernel_shape, ng_strides, ng_padding_below, ng_padding_above, false); - BatchToTensorflow(is_nhwc, ng_avgpool); - ng_avgpool->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_avgpool); NGRAPH_VLOG(3) << "avgpool outshape: {" << ng::join(ng_avgpool->get_shape()) << "}"; @@ -712,8 +734,7 @@ static Status TranslateAvgPoolGradOp( ng::Shape ng_window_shape(2); BatchedOpParamReshape(is_nhwc, ng_orig_input_shape, ng_forward_arg_shape); - BatchToNGraph(is_nhwc, ng_grad); - ng_grad->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_grad); BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, ng_orig_input_shape, ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_ksize, ng_window_shape); @@ -738,8 +759,7 @@ static Status TranslateAvgPoolGradOp( op->name(), ng_forward_arg_shape, ng_grad, ng_window_shape, ng_strides, ng_padding_below, ng_padding_above, false); - BatchToTensorflow(is_nhwc, ng_avgpool_backprop); - ng_avgpool_backprop->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_avgpool_backprop); NGRAPH_VLOG(3) << "avgpoolbackprop outshape: {" << ng::join(ng_avgpool_backprop->get_shape()) << "}"; @@ -798,7 +818,7 @@ static Status TranslateBatchMatMulOp( ng_lhs_axes.push_back(n_dims - 1); ng_lhs_axes.push_back(n_dims - 2); ng_lhs = ng::builder::numpy_transpose(ng_lhs, ng_lhs_axes); - ng_lhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_lhs); ng_lhs_shape = ng_lhs->get_shape(); } else { ng_lhs_axes.push_back(n_dims - 2); @@ -809,7 +829,7 @@ static Status TranslateBatchMatMulOp( ng_rhs_axes.push_back(n_dims - 1); ng_rhs_axes.push_back(n_dims - 2); ng_rhs = ng::builder::numpy_transpose(ng_rhs, ng_rhs_axes); - ng_rhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_rhs); ng_rhs_shape = ng_rhs->get_shape(); } else { ng_rhs_axes.push_back(n_dims - 2); @@ -853,18 +873,18 @@ static Status TranslateBatchMatMulOp( ng_lhs_axes.push_back(n_dims - 1); ng_lhs_axes.push_back(n_dims - 2); ng_lhs = ng::builder::numpy_transpose(ng_lhs, ng_lhs_axes); - ng_lhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_lhs); } if (tf_adj_y) { ng_rhs_axes.insert(ng_rhs_axes.begin(), n_dims - 2); ng_rhs_axes.insert(ng_rhs_axes.begin(), n_dims - 1); ng_rhs = ng::builder::numpy_transpose(ng_rhs, ng_rhs_axes); - ng_rhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_rhs); } else { ng_rhs_axes.insert(ng_rhs_axes.begin(), n_dims - 1); ng_rhs_axes.insert(ng_rhs_axes.begin(), n_dims - 2); ng_rhs = ng::builder::numpy_transpose(ng_rhs, ng_rhs_axes); - ng_rhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_rhs); } ng_lhs_shape = ng_lhs->get_shape(); @@ -1129,6 +1149,7 @@ static Status TranslateCombinedNonMaxSuppressionOp( op->name(), " backend could not return valid ngraph node"); } + Builder::SetTracingInfo(op->name(), ng_cnms); shared_ptr ng_nmsed_boxes = ConstructNgNode(op->name(), ng_cnms, 0); shared_ptr ng_nmsed_scores = @@ -1249,8 +1270,7 @@ static Status TranslateConv2DOp(const Node* op, BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_dilations, ng_dilations); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_dilations: " << ng::join(ng_dilations); @@ -1260,7 +1280,7 @@ static Status TranslateConv2DOp(const Node* op, ng_kernel_shape[0] = ng_filter_shape[0]; ng_kernel_shape[1] = ng_filter_shape[1]; Reshape<3, 2, 0, 1>(ng_filter); - ng_filter->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_filter); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -1275,8 +1295,7 @@ static Status TranslateConv2DOp(const Node* op, op->name(), ng_input, ng_filter, ng_strides, ng_dilations, ng_padding_below, ng_padding_above); - BatchToTensorflow(is_nhwc, ng_conv); - ng_conv->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_conv); SaveNgOp(ng_op_map, op->name(), ng_conv); return Status::OK(); } @@ -1347,8 +1366,7 @@ static Status TranslateConv2DBackpropFilterOp( // nGraph Padding Below [f] // nGraph Padding Above [f] // nGraph Dilation Stride [f] - BatchToNGraph(is_nhwc, ng_data_batch); - ng_data_batch->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_data_batch); // tf_filter shape : // [filter_height, filter_width, in_channels, out_channels] // reshape for nGraph @@ -1356,8 +1374,7 @@ static Status TranslateConv2DBackpropFilterOp( static_cast(tf_filter_sizes[2]), static_cast(tf_filter_sizes[0]), static_cast(tf_filter_sizes[1])}; - BatchToNGraph(is_nhwc, ng_output_delta); - ng_output_delta->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_output_delta); BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_window_movement_strides_forward); BatchedOpParamToNGraph(is_nhwc, tf_dilations, @@ -1399,7 +1416,7 @@ static Status TranslateConv2DBackpropFilterOp( // Reshape the output to tf format : [filter_height, filter_width, // in_channels, out_channels] Reshape<2, 3, 1, 0>(ng_back_prop_filter); - ng_back_prop_filter->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_back_prop_filter); SaveNgOp(ng_op_map, op->name(), ng_back_prop_filter); return Status::OK(); @@ -1454,8 +1471,7 @@ static Status TranslateConv2DBackpropInputOp( BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, tf_input_sizes, ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_dilations, ng_dilations); - BatchToNGraph(is_nhwc, ng_out_backprop); - ng_out_backprop->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_out_backprop); if (is_nhwc) { ng_batch_shape = {static_cast(tf_input_sizes[0]), static_cast(tf_input_sizes[3]), @@ -1476,7 +1492,7 @@ static Status TranslateConv2DBackpropInputOp( ng_kernel_shape[0] = ng_filter_shape[0]; ng_kernel_shape[1] = ng_filter_shape[1]; Reshape<3, 2, 0, 1>(ng_filter); - ng_filter->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_filter); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -1493,8 +1509,7 @@ static Status TranslateConv2DBackpropInputOp( ng_dilations, ng_padding_below, ng_padding_above, ng::Strides(ng_batch_shape.size() - 2, 1)); - BatchToTensorflow(is_nhwc, ng_data); - ng_data->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_data); SaveNgOp(ng_op_map, op->name(), ng_data); return Status::OK(); @@ -1545,8 +1560,7 @@ static Status TranslateConv3DOp(const Node* op, BatchedOpParam3DToNGraph(is_ndhwc, tf_strides, ng_strides); BatchedOpParam3DToNGraph(is_ndhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParam3DToNGraph(is_ndhwc, tf_dilations, ng_dilations); - BatchToNGraph3D(is_ndhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph3D(op->name(), is_ndhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_dilations: " << ng::join(ng_dilations); @@ -1557,7 +1571,7 @@ static Status TranslateConv3DOp(const Node* op, ng_kernel_shape[1] = ng_filter_shape[1]; ng_kernel_shape[2] = ng_filter_shape[2]; Reshape3D<4, 3, 0, 1, 2>(ng_filter); - ng_filter->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_filter); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -1572,8 +1586,7 @@ static Status TranslateConv3DOp(const Node* op, op->name(), ng_input, ng_filter, ng_strides, ng_dilations, ng_padding_below, ng_padding_above); - BatchToTensorflow3D(is_ndhwc, ng_conv); - ng_conv->add_provenance_tag(op->name()); + BatchToTensorflow3D(op->name(), is_ndhwc, ng_conv); SaveNgOp(ng_op_map, op->name(), ng_conv); return Status::OK(); } @@ -1727,7 +1740,7 @@ static Status TranslateDepthToSpaceOp(const Node* op, auto transposed = ng::builder::numpy_transpose(reshaped, ng_transpose_permutation); - transposed->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), transposed); ng::AxisVector ng_axis_order_second_reshape(transposed->get_shape().size()); std::iota(ng_axis_order_second_reshape.begin(), @@ -1774,8 +1787,7 @@ static Status TranslateDepthwiseConv2dNativeOp( BatchedOpParamToNGraph(is_nhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, tf_dilations, ng_dilations); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_dilations: " << ng::join(ng_dilations); @@ -1785,7 +1797,7 @@ static Status TranslateDepthwiseConv2dNativeOp( ng_kernel_shape[0] = ng_filter_shape[0]; ng_kernel_shape[1] = ng_filter_shape[1]; Reshape<3, 2, 0, 1>(ng_filter); - ng_filter->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_filter); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -1830,8 +1842,7 @@ static Status TranslateDepthwiseConv2dNativeOp( std::shared_ptr ng_concat = ConstructNgNode( op->name(), ng_args, ng_concatenation_axis); - BatchToTensorflow(is_nhwc, ng_concat); - ng_concat->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_concat); SaveNgOp(ng_op_map, op->name(), ng_concat); return Status::OK(); } @@ -1960,8 +1971,7 @@ static Status TranslateFusedBatchNormOp( NGRAPH_VLOG(3) << "epsilon: " << tf_epsilon; - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); std::shared_ptr ng_batch_norm; @@ -1989,8 +1999,7 @@ static Status TranslateFusedBatchNormOp( auto variance = ConstructNgNode(op->name(), ng_variance, Bessel_scale); - BatchToTensorflow(is_nhwc, ng_y); - ng_y->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_y); SaveNgOp(ng_op_map, op->name(), ng_y); SaveNgOp(ng_op_map, op->name(), ng_mean); @@ -2014,8 +2023,7 @@ static Status TranslateFusedBatchNormOp( ng_batch_norm = ConstructNgNode( op->name(), tf_epsilon, ng_scale, ng_offset, ng_input, ng_mean, ng_variance); - BatchToTensorflow(is_nhwc, ng_batch_norm); - ng_batch_norm->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_batch_norm); SaveNgOp(ng_op_map, op->name(), ng_batch_norm); if (is_v3) { SaveNgOp(ng_op_map, op->name(), ng_mean); @@ -2094,10 +2102,8 @@ static Status TranslateFusedBatchNormGradOp(const Node* op, op->name(), ng_scale->get_element_type(), ng_scale->get_shape(), std::vector{ng::shape_size(ng_scale->get_shape()), "0"}); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); - BatchToNGraph(is_nhwc, ng_delta); - ng_delta->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); + BatchToNGraph(op->name(), is_nhwc, ng_delta); std::shared_ptr ng_batch_norm_backprop; @@ -2115,8 +2121,7 @@ static Status TranslateFusedBatchNormGradOp(const Node* op, ConstructNgNode(op->name(), ng_batch_norm_backprop, 2); - BatchToTensorflow(is_nhwc, ng_input_delta_op); - ng_input_delta_op->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_input_delta_op); SaveNgOp(ng_op_map, op->name(), ng_input_delta_op); SaveNgOp(ng_op_map, op->name(), ng_scale_delta_op); @@ -2189,12 +2194,12 @@ static Status TranslateFusedMatMulOp(const Node* op, if (GetNodeAttr(op->attrs(), "transpose_a", &transpose_a) == Status::OK() && transpose_a) { ng_lhs = ng::builder::numpy_transpose(ng_lhs, ng::AxisVector{1, 0}); - ng_lhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_lhs); } if (GetNodeAttr(op->attrs(), "transpose_b", &transpose_b) == Status::OK() && transpose_b) { ng_rhs = ng::builder::numpy_transpose(ng_rhs, ng::AxisVector{1, 0}); - ng_rhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_rhs); } // The default axis count for nGraph's Dot op is 1, which is just what @@ -2307,6 +2312,7 @@ static Status TranslateGatherV2Op( return errors::Internal("In translating GatherV2 op ", op->name(), " backend could not return valid ngraph node"); } + Builder::SetTracingInfo(op->name(), ng_gather); SaveNgOp(ng_op_map, op->name(), ng_gather); return Status::OK(); @@ -2361,8 +2367,7 @@ static Status TranslateFusedConv2DOp(const Node* op, BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_dilations, ng_dilations); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_dilations: " << ng::join(ng_dilations); @@ -2372,7 +2377,7 @@ static Status TranslateFusedConv2DOp(const Node* op, ng_kernel_shape[0] = ng_filter_shape[0]; ng_kernel_shape[1] = ng_filter_shape[1]; Reshape<3, 2, 0, 1>(ng_filter); - ng_filter->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_filter); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -2416,8 +2421,7 @@ static Status TranslateFusedConv2DOp(const Node* op, TF_RETURN_IF_ERROR(CreateNgConv(ng_input, ng_filter, ng_conv)); - BatchToTensorflow(is_nhwc, ng_conv); - ng_conv->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_conv); auto ng_conv_shape = ng_conv->get_shape(); auto ng_bias_shape = ng_bias->get_shape(); @@ -2478,8 +2482,7 @@ static Status TranslateFusedConv2DOp(const Node* op, op->name() + "_FusedConv2D_BatchNorm", tf_epsilon, ng_scale, ng_offset, ng_conv, ng_mean, ng_variance); - BatchToTensorflow(is_nhwc, ng_batch_norm); - ng_batch_norm->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_batch_norm); if (VecStrCmp(fused_ops, {"FusedBatchNorm", "Relu"})) { SaveNgOp(ng_op_map, op->name(), @@ -2589,12 +2592,12 @@ static Status TranslateMatMulOp(const Node* op, if (GetNodeAttr(op->attrs(), "transpose_a", &transpose_a) == Status::OK() && transpose_a) { ng_lhs = ng::builder::numpy_transpose(ng_lhs, ng::AxisVector{1, 0}); - ng_lhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_lhs); } if (GetNodeAttr(op->attrs(), "transpose_b", &transpose_b) == Status::OK() && transpose_b) { ng_rhs = ng::builder::numpy_transpose(ng_rhs, ng::AxisVector{1, 0}); - ng_rhs->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_rhs); } // The default axis count for nGraph's Dot op is 1, which is just what @@ -2638,8 +2641,7 @@ static Status TranslateMaxPoolOp(const Node* op, BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_ksize, ng_kernel_shape); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_image_shape: " << ng::join(ng_image_shape); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -2658,8 +2660,7 @@ static Status TranslateMaxPoolOp(const Node* op, op->name(), ng_input, ng_kernel_shape, ng_strides, ng_padding_below, ng_padding_above); - BatchToTensorflow(is_nhwc, ng_maxpool); - ng_maxpool->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_maxpool); NGRAPH_VLOG(3) << "maxpool outshape: {" << ng::join(ng_maxpool->get_shape()) << "}"; @@ -2702,8 +2703,7 @@ static Status TranslateMaxPool3DOp(const Node* op, BatchedOpParam3DToNGraph(is_ndhwc, tf_strides, ng_strides); BatchedOpParam3DToNGraph(is_ndhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParam3DToNGraph(is_ndhwc, tf_ksize, ng_kernel_shape); - BatchToNGraph3D(is_ndhwc, ng_input); - ng_input->add_provenance_tag(op->name()); + BatchToNGraph3D(op->name(), is_ndhwc, ng_input); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_image_shape: " << ng::join(ng_image_shape); NGRAPH_VLOG(3) << "ng_kernel_shape: " << ng::join(ng_kernel_shape); @@ -2722,8 +2722,7 @@ static Status TranslateMaxPool3DOp(const Node* op, op->name(), ng_input, ng_kernel_shape, ng_strides, ng_padding_below, ng_padding_above); - BatchToTensorflow3D(is_ndhwc, ng_maxpool); - ng_maxpool->add_provenance_tag(op->name()); + BatchToTensorflow3D(op->name(), is_ndhwc, ng_maxpool); NGRAPH_VLOG(3) << "maxpool outshape: {" << ng::join(ng_maxpool->get_shape()) << "}"; @@ -2765,12 +2764,9 @@ static Status TranslateMaxPoolGradOp(const Node* op, BatchedOpParamToNGraph(is_nhwc, ng_input->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_strides, ng_strides); BatchedOpParamToNGraph(is_nhwc, tf_ksize, ng_kernel_shape); - BatchToNGraph(is_nhwc, ng_input); - ng_input->add_provenance_tag(op->name()); - BatchToNGraph(is_nhwc, ng_grad); - ng_grad->add_provenance_tag(op->name()); - BatchToNGraph(is_nhwc, ng_fwd); - ng_fwd->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, ng_input); + BatchToNGraph(op->name(), is_nhwc, ng_grad); + BatchToNGraph(op->name(), is_nhwc, ng_fwd); NGRAPH_VLOG(3) << "ng_strides: " << ng::join(ng_strides); NGRAPH_VLOG(3) << "ng_image_shape: " << ng::join(ng_image_shape); @@ -2786,8 +2782,7 @@ static Status TranslateMaxPoolGradOp(const Node* op, ConstructNgNode( op->name(), ng_input, ng_grad, ng_fwd, ng_kernel_shape, ng_strides, ng_padding_below, ng_padding_above); - BatchToTensorflow(is_nhwc, ng_maxpool_backprop); - ng_maxpool_backprop->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_maxpool_backprop); NGRAPH_VLOG(3) << "maxpoolbackprop outshape: {" << ng::join(ng_maxpool_backprop->get_shape()) << "}"; SaveNgOp(ng_op_map, op->name(), ng_maxpool_backprop); @@ -2858,6 +2853,7 @@ static Status TranslateNonMaxSuppressionV4Op( op->name(), " backend could not return valid ngraph node"); } + Builder::SetTracingInfo(op->name(), ng_nmsv4); shared_ptr ng_selected_indices = ConstructNgNode(op->name(), ng_nmsv4, 0); shared_ptr ng_valid_output = @@ -2898,7 +2894,7 @@ static Status TranslateReduceOp( std::shared_ptr ng_node = create_ng_node(ng_input, ng_reduction_axes); - ng_node->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_node); // If keep_dims is specified we need to reshape to put back the reduced // axes, with length 1. @@ -2930,7 +2926,7 @@ static Status TranslateMeanOp( ng::AxisSet ng_reduction_axes) { auto mean_node = ng::builder::mean(ng_input, ng_reduction_axes); - mean_node->add_provenance_tag(op_name); + Builder::SetTracingInfo(op_name, mean_node); return mean_node; }); } @@ -2995,13 +2991,9 @@ static Status TranslateOneHotOp( // broadcast to make all tensors same shape, as required by ngraph select op std::tie(ng_onehot_bool, ng_on) = - ng::builder::numpy_broadcast(std::make_pair(ng_onehot_bool, ng_on)); - ng_onehot_bool->add_provenance_tag(op->name()); - ng_on->add_provenance_tag(op->name()); + Builder::PerformNgBroadcast(op->name(), ng_onehot_bool, ng_on); std::tie(ng_onehot_bool, ng_off) = - ng::builder::numpy_broadcast(std::make_pair(ng_onehot_bool, ng_off)); - ng_onehot_bool->add_provenance_tag(op->name()); - ng_off->add_provenance_tag(op->name()); + Builder::PerformNgBroadcast(op->name(), ng_onehot_bool, ng_off); auto ng_onehot = ConstructNgNode(op->name(), ng_onehot_bool, ng_on, ng_off); @@ -3376,7 +3368,7 @@ static Status TranslateQuantizedConcatOpHelper( auto ng_qconcat = ng::builder::ScaledQuantizedConcat( ng_args, size_t(concat_axis), ng_all_mins, ng_all_maxs); - ng_qconcat->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_qconcat); SaveNgOp(ng_op_map, op->name(), ng_qconcat); SaveNgOp(ng_op_map, op->name(), ng_min_of_mins); @@ -3428,18 +3420,16 @@ static Status TranslateQuantizedConv( BatchedOpParamToNGraph(is_nhwc, node_inps[0]->get_shape(), ng_image_shape); BatchedOpParamToNGraph(is_nhwc, tf_dilations, ng_dilations); // Generally, the mapping is: 0->input, 1->filter, 2->bias, 3->sum input - BatchToNGraph(is_nhwc, node_inps[0]); - node_inps[0]->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, node_inps[0]); // QconvBiasAdd variants if (num_node_inputs == 12) { - BatchToNGraph(is_nhwc, node_inps[9]); - node_inps[9]->add_provenance_tag(op->name()); + BatchToNGraph(op->name(), is_nhwc, node_inps[9]); } auto& ng_filter_shape = node_inps[1]->get_shape(); ng_kernel_shape[0] = ng_filter_shape[0]; ng_kernel_shape[1] = ng_filter_shape[1]; Reshape<3, 2, 0, 1>(node_inps[1]); - node_inps[1]->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), node_inps[1]); ng::CoordinateDiff ng_padding_below{0, 0}; ng::CoordinateDiff ng_padding_above{0, 0}; Builder::MakePadding(tf_padding_type, ng_image_shape, ng_kernel_shape, @@ -3453,10 +3443,9 @@ static Status TranslateQuantizedConv( std::shared_ptr ng_quant_conv_bias = create_quantized_conv_node( node_inps, ng_strides, ng_dilations, ng_padding_below, ng_padding_above, ng_data_dilations); - ng_quant_conv_bias->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_quant_conv_bias); - BatchToTensorflow(is_nhwc, ng_quant_conv_bias); - ng_quant_conv_bias->add_provenance_tag(op->name()); + BatchToTensorflow(op->name(), is_nhwc, ng_quant_conv_bias); SaveNgOp(ng_op_map, op->name(), ng_quant_conv_bias); // QconvBiasAdd variants have summand and its min/max as the last input // nodes @@ -3482,7 +3471,7 @@ static Status TranslateQuantizedConv2DWithBiasMaybeReluAndRequantizeOp( ng_padding_below, ng_padding_above, ng_data_dilations, node_inps[3], node_inps[4], node_inps[5], node_inps[6], node_inps[7], node_inps[8], IsRelu); - ng_node->add_provenance_tag(op_name); + Builder::SetTracingInfo(op_name, ng_node); return ng_node; }; return TranslateQuantizedConv(op, ng_op_map, create_quantized_conv_node); @@ -3501,7 +3490,7 @@ static Status TranslateQuantizedConv2DWithBiasSumAndReluAndRequantizeOp( ng_dilations, ng_padding_below, ng_padding_above, ng_data_dilations, node_inps[3], node_inps[4], node_inps[5], node_inps[6], node_inps[7], node_inps[8], node_inps[10], node_inps[11], true); - ng_node->add_provenance_tag(op_name); + Builder::SetTracingInfo(op_name, ng_node); return ng_node; }; return TranslateQuantizedConv(op, ng_op_map, create_quantized_conv_node); @@ -3520,7 +3509,7 @@ static Status TranslateQuantizedConv2DWithBiasSignedSumAndReluAndRequantizeOp( ng_dilations, ng_padding_below, ng_padding_above, ng_data_dilations, node_inps[3], node_inps[4], node_inps[5], node_inps[6], node_inps[7], node_inps[8], node_inps[10], node_inps[11], true); - ng_node->add_provenance_tag(op_name); + Builder::SetTracingInfo(op_name, ng_node); return ng_node; }; return TranslateQuantizedConv(op, ng_op_map, create_quantized_conv_node); @@ -3552,7 +3541,7 @@ static Status TranslateQuantizeV2Op(const Node* op, auto ng_node = ng::builder::ScaledQuantize(ng_input, ng_min, ng_max, ng_et, ng::AxisSet(), ng_round_mode); - ng_node->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_node); SaveNgOp(ng_op_map, op->name(), ng_node); SaveNgOp(ng_op_map, op->name(), ng_min); SaveNgOp(ng_op_map, op->name(), ng_max); @@ -3569,7 +3558,7 @@ static Status TranslateDequantizeOp(const Node* op, // TF only dequantizes to fp32 auto ng_node = ng::builder::ScaledDequantize(ng_input, ng_min, ng_max, ng::element::f32, ng::AxisSet()); - ng_node->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_node); SaveNgOp(ng_op_map, op->name(), ng_node); return Status::OK(); } @@ -4744,7 +4733,7 @@ static Status TranslateTransposeOp( NGRAPH_VLOG(3) << ng::join(ng_axis_order); auto ng_node = ng::builder::numpy_transpose(ng_input, ng_axis_order); - ng_node->add_provenance_tag(op->name()); + Builder::SetTracingInfo(op->name(), ng_node); SaveNgOp(ng_op_map, op->name(), ng_node); return Status::OK(); } @@ -4851,14 +4840,10 @@ static Status TranslateSelectOp(const Node* op, op->name(), ng_input1, ng::AxisVector{0}, tmp_vector); } - std::tie(ng_input1, ng_input2) = ng::builder::numpy_broadcast( - std::make_pair(length != 0 ? ng_input_new : ng_input1, ng_input2)); - ng_input1->add_provenance_tag(op->name()); - ng_input2->add_provenance_tag(op->name()); + std::tie(ng_input1, ng_input2) = Builder::PerformNgBroadcast( + op->name(), (length != 0 ? ng_input_new : ng_input1), ng_input2); std::tie(ng_input2, ng_input3) = - ng::builder::numpy_broadcast(std::make_pair(ng_input2, ng_input3)); - ng_input2->add_provenance_tag(op->name()); - ng_input3->add_provenance_tag(op->name()); + Builder::PerformNgBroadcast(op->name(), ng_input2, ng_input3); ng_select = ConstructNgNode(op->name(), ng_input1, ng_input2, ng_input3); @@ -5153,12 +5138,43 @@ Status Builder::TranslateGraph( return n->is_parameter() || is_result; }; + size_t num_tags = 0; for (auto n : ng_function->get_ordered_ops()) { // Results and Parameters are not expected to have provenance tags if (!check_if_result_or_parameter(n)) { - if (n->get_provenance_tags().size() == 0) { - return errors::Internal("Found ngraph node ", n->get_name(), - " which does not have provenance tag set"); + num_tags = n->get_provenance_tags().size(); + if (num_tags != 1) { + // In case of an error (num_tags != 1), we dump the ngraph json + // However by default the json will not contain the provenance + // information. So enable NGRAPH_PROVENANCE_ENABLE, and then reset it + // back in the end after NgraphSerialize is done + char* original_provenance_flag_value = + getenv("NGRAPH_PROVENANCE_ENABLE"); + // No need to free original_provenance_flag_value according to the + // standard + + char enable_provenance[] = "NGRAPH_PROVENANCE_ENABLE=1"; + putenv(enable_provenance); + NgraphSerialize( + "tf_function_error_" + ng_function->get_name() + ".json", + ng_function); + if (original_provenance_flag_value == NULL) { + unsetenv("NGRAPH_PROVENANCE_ENABLE"); + } else { + string provenance_flag_original_val{original_provenance_flag_value}; + char* reset = new char[26 + provenance_flag_original_val.size()]; + strcpy(reset, + ("NGRAPH_PROVENANCE_ENABLE=" + provenance_flag_original_val) + .c_str()); + putenv(reset); + delete[] reset; + } + + return errors::Internal( + "Found ngraph node ", n->get_name(), + " which has provenance tag set of size ", num_tags, + ". Expected all ngraph nodes created in TranslateGraph to have " + "exactly one provenance tag"); } } } diff --git a/ngraph_bridge/ngraph_builder.h b/ngraph_bridge/ngraph_builder.h index c453e990b..cdaad09cf 100644 --- a/ngraph_bridge/ngraph_builder.h +++ b/ngraph_bridge/ngraph_builder.h @@ -137,6 +137,10 @@ class Builder { NGRAPH_VLOG(3) << "ng_padding_above: " << ngraph::join(ng_padding_above); } + static std::pair, std::shared_ptr> + PerformNgBroadcast(const string&, std::shared_ptr, + std::shared_ptr); + static const std::map< DataType, std::pair>& TF_NGRAPH_CONST_MAP(); + // This function is used to trace which ng node came from which tf node + // It does 3 things: + // 1. Attaches provenance tags. This is guaranteed to propagate the tag info + // to all nodes. + // The next 2 are not guaranteed to be present for all nodes. + // But when present they are correct and agree with provenance tags + // 2. Attaches friendly names. + // 3. Prints a log if NGRAPH_TF_LOG_PLACEMENT=1 + static void SetTracingInfo(const std::string& op_name, + const std::shared_ptr ng_node); + private: static void ComputeScaleOffsetFolded(const uint& num_bits, const bool& unsigned_type, diff --git a/ngraph_bridge/ngraph_conversions.cc b/ngraph_bridge/ngraph_conversions.cc index 9e23cc31e..12ee4b365 100644 --- a/ngraph_bridge/ngraph_conversions.cc +++ b/ngraph_bridge/ngraph_conversions.cc @@ -14,7 +14,8 @@ * limitations under the License. *******************************************************************************/ -#include "ngraph_conversions.h" +#include "ngraph_bridge/ngraph_conversions.h" +#include "ngraph_bridge/ngraph_api.h" namespace tensorflow { @@ -31,31 +32,38 @@ void NdhwcToNGraph(std::shared_ptr& ng_node) { } } // namespace detail -void BatchToNGraph(bool is_nhwc, std::shared_ptr& ng_input) { +void BatchToNGraph(const string& op_name, bool is_nhwc, + std::shared_ptr& ng_input) { if (is_nhwc) { detail::NhwcToNGraph(ng_input); + Builder::SetTracingInfo(op_name, ng_input); } } -void BatchToNGraph3D(bool is_ndhwc, std::shared_ptr& ng_input) { +void BatchToNGraph3D(const string& op_name, bool is_ndhwc, + std::shared_ptr& ng_input) { if (is_ndhwc) { detail::NdhwcToNGraph(ng_input); + Builder::SetTracingInfo(op_name, ng_input); } } -void BatchToTensorflow(bool is_nhwc, std::shared_ptr& ng_node) { +void BatchToTensorflow(const string& op_name, bool is_nhwc, + std::shared_ptr& ng_node) { if (!is_nhwc) { return; } Reshape<0, 2, 3, 1>(ng_node); + Builder::SetTracingInfo(op_name, ng_node); } -void BatchToTensorflow3D(bool is_ndhwc, +void BatchToTensorflow3D(const string& op_name, bool is_ndhwc, std::shared_ptr& ng_node) { if (!is_ndhwc) { return; } Reshape3D<0, 2, 3, 4, 1>(ng_node); + Builder::SetTracingInfo(op_name, ng_node); } } // namespace ngraph_bridge diff --git a/ngraph_bridge/ngraph_conversions.h b/ngraph_bridge/ngraph_conversions.h index a03a99dce..c829391c4 100644 --- a/ngraph_bridge/ngraph_conversions.h +++ b/ngraph_bridge/ngraph_conversions.h @@ -103,9 +103,11 @@ void NdhwcToNcdhw(const std::vector& src, std::vector& dst) { } } -void BatchToNGraph(bool is_nhwc, std::shared_ptr& ng_input); +void BatchToNGraph(const string& op_name, bool is_nhwc, + std::shared_ptr& ng_input); -void BatchToNGraph3D(bool is_ndhwc, std::shared_ptr& ng_input); +void BatchToNGraph3D(const string& op_name, bool is_ndhwc, + std::shared_ptr& ng_input); template void BatchedOpParamToNGraph(bool is_nhwc, const std::vector& src, @@ -147,9 +149,11 @@ void BatchedOpParamReshape3D(bool is_ndhwc, const std::vector& src, } } -void BatchToTensorflow(bool is_nhwc, std::shared_ptr& ng_node); +void BatchToTensorflow(const string& op_name, bool is_nhwc, + std::shared_ptr& ng_node); -void BatchToTensorflow3D(bool is_ndhwc, std::shared_ptr& ng_node); +void BatchToTensorflow3D(const string& op_name, bool is_ndhwc, + std::shared_ptr& ng_node); } // namespace ngraph_bridge } // namespace tensorflow diff --git a/ngraph_bridge/version.cc b/ngraph_bridge/version.cc index da3c6e649..84bb0704a 100644 --- a/ngraph_bridge/version.cc +++ b/ngraph_bridge/version.cc @@ -32,7 +32,7 @@ // candidate such as v0.7.0-rc0 // The code in master will always have the last released version number // with a suffix of '-master' -#define NG_TF_VERSION_SUFFIX "-rc4" +#define NG_TF_VERSION_SUFFIX "-rc5" #define VERSION_STR_HELPER(x) #x #define VERSION_STR(x) VERSION_STR_HELPER(x) diff --git a/python/setup.in.py b/python/setup.in.py index f71d432ad..fdbae3e51 100644 --- a/python/setup.in.py +++ b/python/setup.in.py @@ -59,7 +59,7 @@ def get_tag(self): setup( name='ngraph_tensorflow_bridge', - version='0.19.0rc4', + version='0.19.0rc5', description='Intel nGraph compiler and runtime for TensorFlow', long_description=long_description, long_description_content_type="text/markdown", diff --git a/test/conversions.cpp b/test/conversions.cpp index 37000009f..7a6bbe046 100644 --- a/test/conversions.cpp +++ b/test/conversions.cpp @@ -37,7 +37,7 @@ TEST(conversions, batch_to_tensorflow_nchw) { auto shape = ng::Shape{2, 3, 4, 5}; std::shared_ptr ng_node = make_shared(ng::element::f32, shape); - BatchToTensorflow(false, ng_node); + BatchToTensorflow("tag", false, ng_node); ASSERT_EQ(ng_node->get_shape(), shape); } @@ -45,7 +45,7 @@ TEST(conversions, batch_to_tensorflow_nhwc) { auto shape = ng::Shape{2, 3, 4, 5}; std::shared_ptr ng_node = make_shared(ng::element::f32, shape); - BatchToTensorflow(true, ng_node); + BatchToTensorflow("tag", true, ng_node); ASSERT_EQ(ng_node->get_shape(), (ng::Shape{2, 4, 5, 3})); } @@ -53,7 +53,7 @@ TEST(conversions, batch_to_ngraph_nchw) { auto shape = ng::Shape{2, 3, 4, 5}; std::shared_ptr ng_node = make_shared(ng::element::f32, shape); - BatchToNGraph(false, ng_node); + BatchToNGraph("tag", false, ng_node); ASSERT_EQ(ng_node->get_shape(), shape); } @@ -69,7 +69,7 @@ TEST(conversions, batch_to_ngraph_nhwc) { auto shape = ng::Shape{2, 3, 4, 5}; std::shared_ptr ng_node = make_shared(ng::element::f32, shape); - BatchToNGraph(true, ng_node); + BatchToNGraph("tag", true, ng_node); ASSERT_EQ(ng_node->get_shape(), (ng::Shape{2, 5, 3, 4})); } diff --git a/test/python/test_provenance_tags_attachment.py b/test/python/test_provenance_tags_attachment.py new file mode 100644 index 000000000..e134e15eb --- /dev/null +++ b/test/python/test_provenance_tags_attachment.py @@ -0,0 +1,69 @@ +# ============================================================================== +# Copyright 2019 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""nGraph TensorFlow bridge prod operations test + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import pytest +import numpy as np +import tensorflow as tf + +from common import NgraphTest + + +class TestProductOperations(NgraphTest): + + def test_provenance_for_no_effect_broadcast(self): + # Creates a network: y = x + |x| + # --------- + # / \ + # inp-----> + ---> out_node + # \ / + # ---abs---- + # The translation of the Add node, first broadcasts the 2 inputs + # it receives and then creates an ngraph Add node. + # Since the shapes of the inputs to the TF add node are same, + # the broadcast builder will return the exact same inputs (lhs and rhs) + # without creating new ng nodes + # If we do not take care, we could be adding a tag to the ng abs node + # when tagging the return of the broadcast builder + + # This test makes sure that TranslateGraph checks that the + # builder returned nodes are different from its inputs, + # and only in that case it adds provenance tags + + inp = tf.placeholder(tf.float64, shape=[1, 32, 32, 2], name='input') + out_node = tf.add(tf.math.abs(inp, name="abs"), inp, name="add") + self.with_ngraph(lambda sess: sess.run( + out_node, feed_dict={inp: np.ones([1, 32, 32, 2])})) + + def test_provenance_for_broadcast_with_effect(self): + # In this test, the broadcast actually produces new ng nodes + # as opposed to test_provenance_for_no_effect_broadcast, + # which is a dummy broadcast + # so test that they are tagged appropriately + inp0 = tf.placeholder(tf.float64, shape=[2, 2], name='input0') + inp1 = tf.placeholder(tf.float64, shape=[2], name='input1') + out_node0 = inp0 / inp1 + out_node1 = inp1 / inp0 + self.with_ngraph(lambda sess: sess.run([out_node0, out_node1], + feed_dict={ + inp0: np.ones([2, 2]), + inp1: np.ones([2]) + }))