Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions bazel/ngraph.BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ cc_library(
"src/ngraph/runtime/cpu/builder/convert.cpp",
"src/ngraph/runtime/cpu/builder/convert_layout.cpp",
"src/ngraph/runtime/cpu/builder/convolution.cpp",
"src/ngraph/runtime/cpu/builder/cum_sum.cpp",
"src/ngraph/runtime/cpu/builder/dot.cpp",
"src/ngraph/runtime/cpu/builder/dropout.cpp",
"src/ngraph/runtime/cpu/builder/embedding_lookup.cpp",
Expand Down
116 changes: 113 additions & 3 deletions ngraph_bridge/ngraph_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
#include "ngraph/builder/quantize_builder.hpp"
#include "ngraph/op/argmax.hpp"
#include "ngraph/op/argmin.hpp"
#include "ngraph/op/experimental/layers/interpolate.hpp"
#include "ngraph/op/util/logical_reduction.hpp"
#include "ngraph/slice_plan.hpp"

Expand Down Expand Up @@ -1630,6 +1631,56 @@ static Status TranslateConv3DOp(const Node* op,
return Status::OK();
}

// Translate TranslateCropAndResizeOp op
static Status TranslateCropAndResizeOp(const Node* op,
const std::vector<const Tensor*>&,
Builder::OpMap& ng_op_map) {
shared_ptr<ng::Node> image, boxes, box_ind, crop_size;
TF_RETURN_IF_ERROR(
GetInputNodes(ng_op_map, op, &image, &boxes, &box_ind, &crop_size));

// Get the attributes
float extrapolation_value;
std::string method;
TF_RETURN_IF_ERROR(
GetNodeAttr(op->attrs(), "extrapolation_value", &extrapolation_value));
TF_RETURN_IF_ERROR(GetNodeAttr(op->attrs(), "method", &method));

ng::op::CropAndResize::ResizeMethod ng_method =
ng::op::CropAndResize::ResizeMethod::unspecified;
if (method == "bilinear") {
ng_method = ng::op::CropAndResize::ResizeMethod::bilinear;
} else if (method == "nearest") {
ng_method = ng::op::CropAndResize::ResizeMethod::nearest;
} else {
return errors::Internal(
"Expected crop and resize's interpolation mode to be bilinear or "
"nearest, but got ",
extrapolation_value, " in op ", op->name());
}

SaveNgOp(ng_op_map, op->name(),
ConstructNgNode<ng::op::CropAndResize>(op->name(), image, boxes,
box_ind, crop_size, ng_method,
extrapolation_value));
return Status::OK();
}

static Status TranslateCumsumOp(const Node* op,
const std::vector<const Tensor*>&,
Builder::OpMap& ng_op_map) {
shared_ptr<ng::Node> ng_x, ng_axis;
TF_RETURN_IF_ERROR(GetInputNodes(ng_op_map, op, &ng_x, &ng_axis));
bool exclusive, reverse;
TF_RETURN_IF_ERROR(GetNodeAttr(op->attrs(), "exclusive", &exclusive));
TF_RETURN_IF_ERROR(GetNodeAttr(op->attrs(), "reverse", &reverse));

SaveNgOp(ng_op_map, op->name(),
ConstructNgNode<ng::op::CumSum>(op->name(), ng_x, ng_axis, exclusive,
reverse));
return Status::OK();
}

// Translate DepthToSpace op
static Status TranslateDepthToSpaceOp(const Node* op,
const std::vector<const Tensor*>&,
Expand Down Expand Up @@ -3763,6 +3814,33 @@ static Status TranslateReshapeOp(
return Status::OK();
}

static Status TranslateResizeBilinearOp(
const Node* op, const std::vector<const Tensor*>& static_input_map,
Builder::OpMap& ng_op_map) {
shared_ptr<ng::Node> images, size;
TF_RETURN_IF_ERROR(GetInputNodes(ng_op_map, op, &images, &size));

bool align_corners;
TF_RETURN_IF_ERROR(GetNodeAttr(op->attrs(), "align_corners", &align_corners));

ngraph::op::InterpolateAttrs attrs;
attrs.align_corners = align_corners;
attrs.mode = "linear";
attrs.antialias = false;
// The TF "images" is has dimensions [batch, height, width, channels].
// So 1 and 2 are the spatial axes
// TODO check this parameter
attrs.axes = {1, 2};
// TODO: pads_begin and pads_end are not populated. Check correctness

auto size_int64 =
ConstructNgNode<ng::op::Convert>(op->name(), size, ngraph::element::i64);
SaveNgOp(ng_op_map, op->name(), ConstructNgNode<ng::op::Interpolate>(
op->name(), images, size_int64, attrs));

return Status::OK();
}

static Status TranslateRsqrtOp(
const Node* op, const std::vector<const Tensor*>& static_input_map,
Builder::OpMap& ng_op_map) {
Expand All @@ -3781,6 +3859,34 @@ static Status TranslateRsqrtOp(
});
}

static Status TranslateScatterNdOp(
const Node* op, const std::vector<const Tensor*>& static_input_map,
Builder::OpMap& ng_op_map) {
shared_ptr<ng::Node> ng_indices;
shared_ptr<ng::Node> ng_updates;
TF_RETURN_IF_ERROR(
GetInputNodes(ng_op_map, op, &ng_indices, &ng_updates, nullptr));

std::vector<int> ng_shape;
TF_RETURN_IF_ERROR(GetStaticInputVector(op, 2, static_input_map, &ng_shape));
// Copy the int vector to a size_t vector, because that is what ng::Shape
// accepts
std::vector<size_t> ng_shape_size_t(ng_shape.begin(), ng_shape.end());

// Create a tensor and populate the tensor with "0" to Add to ScatterNd
auto et = ng_updates->get_element_type();
std::vector<std::string> constant_values(ng::shape_size(ng_shape_size_t),
"0");
auto ng_inputs = ConstructNgNode<ng::op::Constant>(
op->name(), et, ng::Shape(ng_shape_size_t), constant_values);

SaveNgOp(ng_op_map, op->name(),
ConstructNgNode<ng::op::ScatterNDAdd>(op->name(), ng_inputs,
ng_indices, ng_updates));

return Status::OK();
}

static Status TranslateRsqrtGradOp(const Node* op,
const std::vector<const Tensor*>&,
Builder::OpMap& ng_op_map) {
Expand Down Expand Up @@ -5005,6 +5111,7 @@ const static std::map<
{"All", TranslateDirectReduceOp<ng::op::All>},
{"ArgMax", TranslateArgMinMaxOp<ng::op::ArgMax>},
{"ArgMin", TranslateArgMinMaxOp<ng::op::ArgMin>},
{"Atan2", TranslateBinaryOp<ngraph::op::Atan2>},
{"AvgPool", TranslateAvgPoolOp}, {"AvgPoolGrad", TranslateAvgPoolGradOp},
{"BatchMatMul", TranslateBatchMatMulOp},
{"BatchMatMulV2", TranslateBatchMatMulV2Op},
Expand All @@ -5016,7 +5123,8 @@ const static std::map<
{"Conv2DBackpropFilter", TranslateConv2DBackpropFilterOp},
{"Conv2DBackpropInput", TranslateConv2DBackpropInputOp},
{"Conv3D", TranslateConv3DOp}, {"Cos", TranslateUnaryOp<ngraph::op::Cos>},
{"DepthToSpace", TranslateDepthToSpaceOp},
{"CropAndResize", TranslateCropAndResizeOp},
{"Cumsum", TranslateCumsumOp}, {"DepthToSpace", TranslateDepthToSpaceOp},
{"DepthwiseConv2dNative", TranslateDepthwiseConv2dNativeOp},
{"Dequantize", TranslateDequantizeOp},
{"Equal", TranslateBinaryOp<ngraph::op::Equal>},
Expand Down Expand Up @@ -5085,9 +5193,11 @@ const static std::map<
{"Reciprocal", TranslateReciprocalOp},
{"Relu", TranslateUnaryOp<ngraph::op::Relu>}, {"Relu6", TranslateRelu6Op},
{"ReluGrad", TranslateReluGradOp}, {"Reshape", TranslateReshapeOp},
{"ResizeBilinear", TranslateResizeBilinearOp},
{"Rsqrt", TranslateRsqrtOp}, {"RsqrtGrad", TranslateRsqrtGradOp},
{"Select", TranslateSelectOp}, {"Shape", TranslateShapeOp},
{"Sigmoid", TranslateSigmoidOp}, {"SigmoidGrad", TranslateSigmoidGradOp},
{"ScatterNd", TranslateScatterNdOp}, {"Select", TranslateSelectOp},
{"Shape", TranslateShapeOp}, {"Sigmoid", TranslateSigmoidOp},
{"SigmoidGrad", TranslateSigmoidGradOp},
{"Sin", TranslateUnaryOp<ngraph::op::Sin>}, {"Size", TranslateSizeOp},
{"Sign", TranslateUnaryOp<ngraph::op::Sign>}, {"Slice", TranslateSliceOp},
{"Snapshot", TranslateIdentityOp}, {"Softmax", TranslateSoftmaxOp},
Expand Down
23 changes: 22 additions & 1 deletion ngraph_bridge/ngraph_mark_for_clustering.cc
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
{"All", {std::make_shared<ngraph::op::All>()}},
{"ArgMax", {std::make_shared<ngraph::op::ArgMax>()}},
{"ArgMin", {std::make_shared<ngraph::op::ArgMin>()}},
{"Atan2", {std::make_shared<ngraph::op::Atan2>()}},
{"AvgPool", {std::make_shared<ngraph::op::AvgPool>()}},
{"AvgPoolGrad", {std::make_shared<ngraph::op::AvgPoolBackprop>()}},
{"BatchMatMul",
Expand Down Expand Up @@ -275,6 +276,8 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
{std::make_shared<ngraph::op::Convolution>(),
std::make_shared<ngraph::op::Reshape>()}},
{"Cos", {std::make_shared<ngraph::op::Cos>()}},
{"CropAndResize", {std::make_shared<ngraph::op::CropAndResize>()}},
{"Cumsum", {std::make_shared<ngraph::op::CumSum>()}},
{"DepthToSpace", {std::make_shared<ngraph::op::Reshape>()}},
{"DepthwiseConv2dNative",
{std::make_shared<ngraph::op::Slice>(),
Expand Down Expand Up @@ -481,7 +484,10 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
{constant, std::make_shared<ngraph::op::Minimum>(),
std::make_shared<ngraph::op::Relu>()}},
{"ReluGrad", {relu}},
{"Reshape", {std::make_shared<ngraph::op::Reshape>()}},
// TODO: remove Convert later
{"ResizeBilinear",
{std::make_shared<ngraph::op::Convert>(),
std::make_shared<ngraph::op::Interpolate>()}},
{"Rsqrt", {constant, std::make_shared<ngraph::op::Power>()}},
{"RsqrtGrad",
{constant, std::make_shared<ngraph::op::Power>(),
Expand All @@ -491,6 +497,7 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
std::make_shared<ngraph::op::Broadcast>(),
std::make_shared<ngraph::op::Select>()}},
{"Reshape", {constant}},
{"ScatterNd", {constant, std::make_shared<ngraph::op::ScatterNDAdd>()}},
{"Shape", {constant}},
{"Sigmoid",
{constant, std::make_shared<ngraph::op::Exp>(),
Expand Down Expand Up @@ -608,6 +615,7 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
confirmation_function_map["All"] = SimpleConfirmationFunction();
confirmation_function_map["ArgMax"] = SimpleConfirmationFunction();
confirmation_function_map["ArgMin"] = SimpleConfirmationFunction();
confirmation_function_map["Atan2"] = SimpleConfirmationFunction();
confirmation_function_map["AvgPool"] = SimpleConfirmationFunction();
confirmation_function_map["AvgPoolGrad"] = SimpleConfirmationFunction();
confirmation_function_map["BatchMatMul"] = SimpleConfirmationFunction();
Expand All @@ -623,7 +631,9 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
confirmation_function_map["Conv2DBackpropInput"] =
SimpleConfirmationFunction();
confirmation_function_map["Conv3D"] = SimpleConfirmationFunction();
confirmation_function_map["CropAndResize"] = SimpleConfirmationFunction();
confirmation_function_map["Cos"] = SimpleConfirmationFunction();
confirmation_function_map["Cumsum"] = SimpleConfirmationFunction();
confirmation_function_map["DepthwiseConv2dNative"] =
SimpleConfirmationFunction();
confirmation_function_map["DepthToSpace"] = [](Node* n, bool* result) {
Expand Down Expand Up @@ -747,8 +757,11 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
confirmation_function_map["Relu6"] = SimpleConfirmationFunction();
confirmation_function_map["ReluGrad"] = SimpleConfirmationFunction();
confirmation_function_map["Reshape"] = SimpleConfirmationFunction();
confirmation_function_map["ResizeBilinear"] =
SimpleConfirmationFunction();
confirmation_function_map["Rsqrt"] = SimpleConfirmationFunction();
confirmation_function_map["RsqrtGrad"] = SimpleConfirmationFunction();
confirmation_function_map["ScatterNd"] = SimpleConfirmationFunction();
confirmation_function_map["Select"] = SimpleConfirmationFunction();
confirmation_function_map["Shape"] = SimpleConfirmationFunction();
confirmation_function_map["Sigmoid"] = SimpleConfirmationFunction();
Expand Down Expand Up @@ -808,6 +821,7 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
type_constraint_map["ArgMax"]["Tidx"] = NGraphIndexDTypes();
type_constraint_map["ArgMin"]["T"] = NGraphNumericDTypes();
type_constraint_map["ArgMin"]["Tidx"] = NGraphIndexDTypes();
type_constraint_map["Atan2"]["T"] = NGraphRealDTypes();
type_constraint_map["AvgPool"]["T"] = NGraphNumericDTypes();
type_constraint_map["AvgPoolGrad"]["T"] = NGraphNumericDTypes();
type_constraint_map["BatchMatMul"]["T"] = NGraphNumericDTypes();
Expand All @@ -822,7 +836,10 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
type_constraint_map["Conv2D"]["T"] = NGraphNumericDTypes();
type_constraint_map["Conv2DBackpropInput"]["T"] = NGraphNumericDTypes();
type_constraint_map["Conv3D"]["T"] = NGraphNumericDTypes();
type_constraint_map["CropAndResize"]["T"] = NGraphNumericDTypes();
type_constraint_map["Cos"]["T"] = NGraphRealDTypes();
type_constraint_map["Cumsum"]["T"] = NGraphNumericDTypes();
type_constraint_map["Cumsum"]["Tidx"] = NGraphIndexDTypes();
type_constraint_map["DepthToSpace"]["T"] = NGraphDTypes();
type_constraint_map["DepthwiseConv2dNative"]["T"] = NGraphNumericDTypes();
type_constraint_map["Dequantize"]["T"] = NGraphSupportedQuantizedDTypes();
Expand Down Expand Up @@ -939,8 +956,11 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
type_constraint_map["ReluGrad"]["T"] = NGraphNumericDTypes();
type_constraint_map["Reshape"]["T"] = NGraphDTypes();
type_constraint_map["Reshape"]["Tshape"] = NGraphIndexDTypes();
type_constraint_map["ResizeBilinear"]["T"] = NGraphNumericDTypes();
type_constraint_map["Rsqrt"]["T"] = NGraphDTypes();
type_constraint_map["RsqrtGrad"]["T"] = NGraphRealDTypes();
type_constraint_map["ScatterNd"]["T"] = NGraphDTypes();
type_constraint_map["ScatterNd"]["Tindices"] = NGraphIndexDTypes();
type_constraint_map["Select"]["T"] = NGraphDTypes();
type_constraint_map["Shape"]["T"] = NGraphDTypes();
type_constraint_map["Shape"]["out_type"] = NGraphIndexDTypes();
Expand Down Expand Up @@ -1035,6 +1055,7 @@ Status MarkForClustering(Graph* graph, const std::set<string> skip_these_nodes,
};
set_attributes_map["RandomUniform"] = SetStaticInputs({0});
set_attributes_map["Reshape"] = SetStaticInputs({1});
set_attributes_map["ScatterNd"] = SetStaticInputs({2});
set_attributes_map["Slice"] = SetStaticInputs({1, 2});
set_attributes_map["Split"] = SetStaticInputs({0});
set_attributes_map["SplitV"] = SetStaticInputs({1, 2});
Expand Down
1 change: 1 addition & 0 deletions test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ set(SRC
test_index_library.cpp
test_ngraph_data_cache.cpp
test_utilities.cpp
test_image_ops.cpp
test_math_ops.cpp
test_nn_ops.cpp
test_array_ops.cpp
Expand Down
86 changes: 86 additions & 0 deletions test/python/tensorflow/python_tests_list.txt
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,92 @@ relu_op_test.ReluTest.testGradientFloat32
relu_op_test.ReluTest.testGradientFloat64
relu_op_test.ReluTest.testGradientScalar

scan_ops_test.CumsumTest.test1D
scan_ops_test.CumsumTest.test2D
scan_ops_test.CumsumTest.test3D
scan_ops_test.CumsumTest.test6D
scan_ops_test.CumsumTest.testAxisType
scan_ops_test.CumsumTest.testEmpty
scan_ops_test.CumsumTest.testGradient
scan_ops_test.CumsumTest.testGradient2D
scan_ops_test.CumsumTest.testGradientExclusive
scan_ops_test.CumsumTest.testGradientExclusiveReverse
scan_ops_test.CumsumTest.testGradientReverse
#This test fails due to error string matching issue
#scan_ops_test.CumsumTest.testInvalidAxis
scan_ops_test.CumsumTest.testLarge
scan_ops_test.CumsumTest.test_session

scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testBool
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testEmptyOutputShape1
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testEmptyOutputShape2
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testEmptyOutputShape3
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testExtraIndicesDimensions
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testGradientsRank2ElementUpdate
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testGradientsRank2SliceUpdate
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testGradientsRank3SliceUpdate
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testGradientsRank7SliceUpdate
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testInvalidShape
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testRank3InvalidShape1
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testRank3InvalidShape2
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testRank3ValidShape
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testScatterNdRepatedIndicesAdd
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testSmokeScatterNdBatch1DSliceDim2
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testSmokeScatterNdBatch1DSliceDim3ShapeRank7
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testSmokeScatterNdBatch2DSliceDim2
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testSmokeScatterNdBatch2DSliceDim3ShapeRank7
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testString
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testUndefinedIndicesShape
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testUndefinedOutputShape
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.testUndefinedUpdatesShape
scatter_nd_ops_test.ScatterNdNonAliasingAddTest.test_session
scatter_nd_ops_test.ScatterNdTensorTest.testTensorScatterUpdateWithForwarding
scatter_nd_ops_test.ScatterNdTensorTest.testUpdateAddSub
scatter_nd_ops_test.ScatterNdTensorTest.testUpdateAddSubGradients
scatter_nd_ops_test.ScatterNdTensorTest.test_session
# This test fails on CPU backend on char (and maybe other data types)
# scatter_nd_ops_test.ScatterNdTest.testBool
scatter_nd_ops_test.ScatterNdTest.testEmptyOutputShape1
# TODO: Test failing due to incorrect error message
# scatter_nd_ops_test.ScatterNdTest.testEmptyOutputShape2
scatter_nd_ops_test.ScatterNdTest.testEmptyOutputShape3
# This test fails on CPU backend on some unsupported data types
# scatter_nd_ops_test.ScatterNdTest.testExtraIndicesDimensions
scatter_nd_ops_test.ScatterNdTest.testGradientsRank2ElementUpdate
scatter_nd_ops_test.ScatterNdTest.testGradientsRank2SliceUpdate
scatter_nd_ops_test.ScatterNdTest.testGradientsRank3SliceUpdate
scatter_nd_ops_test.ScatterNdTest.testGradientsRank7SliceUpdate
scatter_nd_ops_test.ScatterNdTest.testInvalidShape
scatter_nd_ops_test.ScatterNdTest.testRank3InvalidShape1
scatter_nd_ops_test.ScatterNdTest.testRank3InvalidShape2
scatter_nd_ops_test.ScatterNdTest.testRank3ValidShape
scatter_nd_ops_test.ScatterNdTest.testScatterNdRepatedIndicesAdd
scatter_nd_ops_test.ScatterNdTest.testSmokeScatterNdBatch1DSliceDim2
scatter_nd_ops_test.ScatterNdTest.testSmokeScatterNdBatch1DSliceDim3ShapeRank7
scatter_nd_ops_test.ScatterNdTest.testSmokeScatterNdBatch2DSliceDim2
scatter_nd_ops_test.ScatterNdTest.testSmokeScatterNdBatch2DSliceDim3ShapeRank7
scatter_nd_ops_test.ScatterNdTest.testString
scatter_nd_ops_test.ScatterNdTest.testUndefinedIndicesShape
scatter_nd_ops_test.ScatterNdTest.testUndefinedOutputShape
scatter_nd_ops_test.ScatterNdTest.testUndefinedUpdatesShape
scatter_nd_ops_test.ScatterNdTest.test_session
scatter_nd_ops_test.StatefulScatterNdTest.testConcurrentUpdates
scatter_nd_ops_test.StatefulScatterNdTest.testExtraIndicesDimensions
scatter_nd_ops_test.StatefulScatterNdTest.testRank3InvalidShape1
scatter_nd_ops_test.StatefulScatterNdTest.testRank3InvalidShape2
scatter_nd_ops_test.StatefulScatterNdTest.testRank3ValidShape
scatter_nd_ops_test.StatefulScatterNdTest.testResVarInvalidOutputShape
scatter_nd_ops_test.StatefulScatterNdTest.testScatterOutOfRangeCpu
#scatter_nd_ops_test.StatefulScatterNdTest.testScatterRepeatIndices
scatter_nd_ops_test.StatefulScatterNdTest.testSimple
scatter_nd_ops_test.StatefulScatterNdTest.testSimple2
scatter_nd_ops_test.StatefulScatterNdTest.testSimple3
scatter_nd_ops_test.StatefulScatterNdTest.testSimpleResource
#scatter_nd_ops_test.StatefulScatterNdTest.testVariableRankAdd
#scatter_nd_ops_test.StatefulScatterNdTest.testVariableRankSub
#scatter_nd_ops_test.StatefulScatterNdTest.testVariableRankUpdate
scatter_nd_ops_test.StatefulScatterNdTest.test_session

slice_op_test.SliceTest.testComplex
slice_op_test.SliceTest.testEmpty
slice_op_test.SliceTest.testGradientsAll
Expand Down
Loading