Skip to content

Commit

Permalink
Merge pull request #16699 from case540/branch_184236409
Browse files Browse the repository at this point in the history
Branch 184236409
  • Loading branch information
Amit Patankar committed Feb 2, 2018
2 parents 0464602 + 60266f5 commit 995d836
Show file tree
Hide file tree
Showing 7 changed files with 104 additions and 49 deletions.
4 changes: 3 additions & 1 deletion tensorflow/cc/saved_model/loader.cc
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@ Status FindMetaGraphDefToLoad(const SavedModel& saved_model_proto,
Status LoadMetaGraphIntoSession(const MetaGraphDef& meta_graph_def,
const SessionOptions& session_options,
std::unique_ptr<Session>* session) {
session->reset(NewSession(session_options));
Session* session_p = nullptr;
TF_RETURN_IF_ERROR(NewSession(session_options, &session_p));
session->reset(session_p);
return (*session)->Create(meta_graph_def.graph_def());
}

Expand Down
18 changes: 18 additions & 0 deletions tensorflow/cc/saved_model/loader_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,24 @@ TEST_F(LoaderTest, NoTagMatchMultiple) {
<< st.error_message();
}

TEST_F(LoaderTest, SessionCreationFailure) {
SavedModelBundle bundle;
// Use invalid SessionOptions to cause session creation to fail. Default
// options work, so provide an invalid value for the target field.
SessionOptions session_options;
constexpr char kInvalidTarget[] = "invalid target";
session_options.target = kInvalidTarget;
RunOptions run_options;

const string export_dir =
io::JoinPath(testing::TensorFlowSrcRoot(), kTestDataSharded);
Status st = LoadSavedModel(session_options, run_options, export_dir,
{kSavedModelTagServe}, &bundle);
EXPECT_FALSE(st.ok());
EXPECT_TRUE(StringPiece(st.error_message()).contains(kInvalidTarget))
<< st.error_message();
}

TEST_F(LoaderTest, PbtxtFormat) {
SavedModelBundle bundle;
SessionOptions session_options;
Expand Down
9 changes: 9 additions & 0 deletions tensorflow/contrib/lite/kernels/internal/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,13 @@ config_setting(
},
)

config_setting(
name = "darwin_x86_64",
values = {
"cpu": "darwin_x86_64",
},
)

config_setting(
name = "freebsd",
values = {
Expand Down Expand Up @@ -154,6 +161,7 @@ cc_library(
":x86": tflite_deps_intel,
":x86_64": tflite_deps_intel,
":darwin": tflite_deps_intel,
":darwin_x86_64": tflite_deps_intel,
":freebsd": tflite_deps_intel,
"//conditions:default": [],
}),
Expand Down Expand Up @@ -232,6 +240,7 @@ cc_library(
":x86": tflite_deps_intel,
":x86_64": tflite_deps_intel,
":darwin": tflite_deps_intel,
":darwin_x86_64": tflite_deps_intel,
":freebsd": tflite_deps_intel,
"//conditions:default": [],
}),
Expand Down
4 changes: 4 additions & 0 deletions tensorflow/core/grappler/costs/op_level_cost_estimator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,9 @@ OpLevelCostEstimator::DeviceInfo OpLevelCostEstimator::GetDeviceInfo(
VLOG(1) << "Device: " << device.type() << " gflops: " << gflops
<< " gb_per_sec: " << gb_per_sec;

DCHECK_LT(0, gflops) << device.DebugString();
DCHECK_LT(0, gb_per_sec) << device.DebugString();

return {gflops, gb_per_sec};
}

Expand Down Expand Up @@ -408,6 +411,7 @@ Costs OpLevelCostEstimator::PredictCostOfAnUnknownOp(
Costs OpLevelCostEstimator::PredictOpCountBasedCost(
double operations, const OpInfo& op_features) const {
DeviceInfo device_perf = GetDeviceInfo(op_features.device());

Costs::NanoSeconds compute_cost(std::ceil(operations / device_perf.gigaops));
VLOG(1) << "Op:" << op_features.op() << " GOps:" << operations / 1e9
<< " Execution Time (ns):" << compute_cost.count();
Expand Down
2 changes: 2 additions & 0 deletions tensorflow/python/grappler/layout_optimizer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,8 @@ def _get_cluster():
named_device = device_properties_pb2.NamedDevice()
named_device.name = '/GPU:0'
named_device.properties.type = 'GPU'
named_device.properties.num_cores = 24
named_device.properties.frequency = 1000
named_device.properties.environment['architecture'] = '4'
cluster = gcluster.Cluster(devices=[named_device])
return cluster
Expand Down
53 changes: 38 additions & 15 deletions tensorflow/tools/graph_transforms/sparsify_gather.cc
Original file line number Diff line number Diff line change
Expand Up @@ -86,48 +86,71 @@ void CreateConstNode(const Tensor& tensor, const string& name,
SetNodeTensorAttr<float>("value", tensor, node_def);
}

string GetMonolithicTensorKey(const string& tensor_slice_name) {
std::vector<string> names = Split(tensor_slice_name, "/");
if (StringPiece(names[names.size() - 1]).starts_with("part_")) {
CHECK_GE(names.size(), 2);
names.pop_back();
}
return Join(names, "/");
}

Status ObtainTensorSlice(const GraphDef& input_graph_def,
const string& tensor_name,
const string& target_name,
string* shape_slice_string) {
string restore_node_name;
for (const auto& node : input_graph_def.node()) {
std::vector<string> node_name_parts = Split(node.name(), "/");
if (node_name_parts.size() == 2 &&
StringPiece(node_name_parts[0]).starts_with("save") &&
StringPiece(node_name_parts[1]).starts_with("Assign") &&
node.input(0) == tensor_name) {
node.input(0) == target_name) {
restore_node_name = node.input(1);
break;
}
}

std::vector<string> restore_node_parts = Split(restore_node_name, ":");
CHECK_LE(restore_node_parts.size(), 2);
string tensor_names_node;
string shape_and_slices_node;
for (const auto& node : input_graph_def.node()) {
if ((node.name() == restore_node_name) && (node.op() == "RestoreV2")) {
if ((node.name() == restore_node_parts[0]) && (node.op() == "RestoreV2")) {
tensor_names_node = node.input(1);
shape_and_slices_node = node.input(2);
break;
}
}

int offset = -1;
for (const auto& node : input_graph_def.node()) {
if (node.name() == tensor_names_node) {
Tensor tensor_names_tensor;
TF_RETURN_IF_ERROR(GetNodeAttr(node, "value", &tensor_names_tensor));
const auto& tensor_names_value = tensor_names_tensor.flat<string>();
for (int i = 0; i < tensor_names_value.size(); i++) {
if (tensor_names_value(i) == GetMonolithicTensorKey(target_name)) {
offset = i;
break;
}
}
}
}
if (offset == -1) {
return errors::Internal("Unable to find RestoreV2 entry for variable: ",
target_name);
}
for (const auto& node : input_graph_def.node()) {
if (node.name() == shape_and_slices_node) {
Tensor shape_and_slices_tensor;
TF_RETURN_IF_ERROR(GetNodeAttr(node, "value", &shape_and_slices_tensor));
const auto& shape_and_slices_value =
shape_and_slices_tensor.flat<string>();
*shape_slice_string = shape_and_slices_value(0);
*shape_slice_string = shape_and_slices_value(offset);
return Status::OK();
}
}
return errors::Internal("Unable to find slice for variable: ", tensor_name);
}

string GetMonolithicTensorKey(const string& tensor_slice_name) {
std::vector<string> names = Split(tensor_slice_name, "/");
CHECK_GE(names.size(), 2);
CHECK(StringPiece(names[names.size() - 1]).starts_with("part_"));

// Remove the "part_x" suffix
names.pop_back();
return Join(names, "/");
return errors::Internal("Unable to find slice for variable: ", target_name);
}

Status ReadTensorFromCheckpoint(
Expand Down
63 changes: 30 additions & 33 deletions tensorflow/tools/graph_transforms/sparsify_gather_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -106,11 +106,15 @@ class SparsifyGatherTest : public ::testing::Test {
NodeDef* save_const_node =
CreateNode("save/Const", "Const", {}, &graph_def);

Tensor tensor_names_values(DT_STRING, TensorShape({1}));
test::FillValues<string>(&tensor_names_values, {"w"});
NodeDef* tensor_names_node =
CreateNode("save/RestoreV2/tensor_names", "Const", {}, &graph_def);
SetNodeTensorAttr<string>("value", tensor_names_values,
tensor_names_node);

NodeDef* tensor_shapes_slices_node = CreateNode(
"save/RestoreV2/shape_and_slices", "Const", {}, &graph_def);

Tensor shapes_slices_val(DT_STRING, TensorShape({1}));
shapes_slices_val.flat<string>()(0) = "4 1 0,4:0,1";
SetNodeTensorAttr<string>("value", shapes_slices_val,
Expand Down Expand Up @@ -310,6 +314,29 @@ class SparsifyGatherTest : public ::testing::Test {
SetNodeTensorAttr<float>("value", weights, w_node1);
SetNodeTensorAttr<float>("value", weights, w_node2);
} else {
NodeDef* save_const_node =
CreateNode("save/Const", "Const", {}, &graph_def);

NodeDef* tensor_names_node =
CreateNode("save/RestoreV2/tensor_names", "Const", {}, &graph_def);
Tensor tensor_names_values(DT_STRING, TensorShape({2}));
test::FillValues<string>(&tensor_names_values, {"w1", "w2"});
SetNodeTensorAttr<string>("value", tensor_names_values,
tensor_names_node);

NodeDef* tensor_shapes_slices_node = CreateNode(
"save/RestoreV2/shape_and_slices", "Const", {}, &graph_def);
Tensor shapes_slices_val(DT_STRING, TensorShape({2}));
shapes_slices_val.flat<string>()(0) = "4 1 0,4:0,1";
shapes_slices_val.flat<string>()(1) = "4 1 0,4:0,1";
SetNodeTensorAttr<string>("value", shapes_slices_val,
tensor_shapes_slices_node);

NodeDef* restore_node = CreateNode(
"save/RestoreV2", "RestoreV2",
{save_const_node, tensor_names_node, tensor_shapes_slices_node},
&graph_def);

w_node1 = CreateNode("w1/part_1", "VariableV2", {}, &graph_def);

zeros_shape1 = CreateNode("w1/part_1/Initializer/zeros/shape_as_tensor",
Expand All @@ -321,23 +348,7 @@ class SparsifyGatherTest : public ::testing::Test {
assign_node1 = CreateNode("w1/part_1/Assign", "Assign",
{w_node1, zeros_node1}, &graph_def);

NodeDef* save_const_node =
CreateNode("save/Const", "Const", {}, &graph_def);
NodeDef* tensor_names_node1 =
CreateNode("save/RestoreV2/tensor_names", "Const", {}, &graph_def);
NodeDef* tensor_shapes_slices_node1 = CreateNode(
"save/RestoreV2/shape_and_slices", "Const", {}, &graph_def);

Tensor shapes_slices_val1(DT_STRING, TensorShape({1}));
shapes_slices_val1.flat<string>()(0) = "4 1 0,4:0,1";
SetNodeTensorAttr<string>("value", shapes_slices_val1,
tensor_shapes_slices_node1);

NodeDef* restore_node1 = CreateNode(
"save/RestoreV2", "RestoreV2",
{save_const_node, tensor_names_node1, tensor_shapes_slices_node1},
&graph_def);
CreateNode("save/Assign", "Assign", {w_node1, restore_node1}, &graph_def);
CreateNode("save/Assign", "Assign", {w_node1, restore_node}, &graph_def);

w_node2 = CreateNode("w2/part_1", "VariableV2", {}, &graph_def);
zeros_shape2 = CreateNode("w2/part_1/Initializer/zeros/shape_as_tensor",
Expand All @@ -349,21 +360,7 @@ class SparsifyGatherTest : public ::testing::Test {
assign_node2 = CreateNode("w2/part_1/Assign", "Assign",
{w_node2, zeros_node2}, &graph_def);

NodeDef* tensor_names_node2 =
CreateNode("save/RestoreV2_1/tensor_names", "Const", {}, &graph_def);
NodeDef* tensor_shapes_slices_node2 = CreateNode(
"save/RestoreV2_1/shape_and_slices", "Const", {}, &graph_def);

Tensor shapes_slices_val2(DT_STRING, TensorShape({1}));
shapes_slices_val2.flat<string>()(0) = "4 1 0,4:0,1";
SetNodeTensorAttr<string>("value", shapes_slices_val2,
tensor_shapes_slices_node2);

NodeDef* restore_node2 = CreateNode(
"save/RestoreV2_1", "RestoreV2",
{save_const_node, tensor_names_node2, tensor_shapes_slices_node2},
&graph_def);
CreateNode("save/Assign_1", "Assign", {w_node2, restore_node2},
CreateNode("save/Assign_1", "Assign", {w_node2, restore_node},
&graph_def);

BundleWriter writer(Env::Default(), checkpoint_path);
Expand Down

0 comments on commit 995d836

Please sign in to comment.