Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support TensorFlow pluggable devices #2144

Open
wants to merge 17 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions .bazelrc
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ build:nativeopt --copt=-march=native
build:nativeopt --host_copt=-march=native
build:nativeopt --copt=-O3

# Support TF pluggable device
build --copt=-DSUPPORT_TF_PLUGINS --define=with_plugins_support=true

build --keep_going
build --verbose_failures=true
build --spawn_strategy=standalone
Expand Down
7 changes: 7 additions & 0 deletions tensorflow_serving/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,10 @@ filegroup(
],
),
)


config_setting(
name = "with_plugins_support",
define_values = {"with_plugins_support": "true"},
visibility = ["//visibility:public"],
)
19 changes: 13 additions & 6 deletions tensorflow_serving/model_servers/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ load("@rules_pkg//:pkg.bzl", "pkg_deb", "pkg_tar")

# Placeholder: load py_binary
# Placeholder: load py_test
load("//tensorflow_serving:serving.bzl", "if_with_plugins_support")
load("@org_tensorflow//tensorflow:tensorflow.bzl", "if_google", "if_libtpu", "if_with_tpu_support")
load("//tensorflow_serving:tensorflow_version.bzl", "if_not_v2", "if_v2")

Expand Down Expand Up @@ -423,7 +424,11 @@ cc_library(
"@org_tensorflow//tensorflow/core:protos_all_cc",
"@org_tensorflow//tensorflow/core:tensorflow",
"@org_tensorflow//tensorflow/core/profiler/rpc:profiler_service_impl",
] + SUPPORTED_TENSORFLOW_OPS,
] + SUPPORTED_TENSORFLOW_OPS + if_with_plugins_support([
"@org_tensorflow//tensorflow/c:c_api_experimental",
"@org_tensorflow//tensorflow/c:kernels_experimental",
"@org_tensorflow//tensorflow/c/experimental/next_pluggable_device:c_api",
]),
)

cc_library(
Expand All @@ -441,7 +446,6 @@ cc_library(
],
deps = [
":server_lib",
"@org_tensorflow//tensorflow/c:c_api",
"@org_tensorflow//tensorflow/compiler/jit:xla_cpu_jit",
"@org_tensorflow//tensorflow/core:lib",
"@org_tensorflow//tensorflow/core/platform/cloud:gcs_file_system",
Expand All @@ -458,10 +462,13 @@ cc_library(

cc_binary(
name = "tensorflow_model_server",
linkopts = [
# Exports Tensorflow APIs
"-rdynamic",
],
additional_linker_inputs =
if_with_plugins_support([
"tf_c_api_exported_symbols.lds",
]),
linkopts = if_with_plugins_support([
"-Wl,-dynamic-list,$(location :tf_c_api_exported_symbols.lds)",
]),
stamp = 1,
visibility = [
":testing",
Expand Down
35 changes: 34 additions & 1 deletion tensorflow_serving/model_servers/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,12 @@ void InitializeTPU(tensorflow::serving::main::Server::Options& server_options) {
}
#endif

int main(int argc, char** argv) {
#ifdef SUPPORT_TF_PLUGINS
#include <filesystem>
#include "tensorflow/c/c_api_experimental.h"
#endif

int main(int argc, char **argv) {
tensorflow::serving::main::Server::Options options;
bool display_version = false;
bool xla_cpu_compilation_enabled = false;
Expand Down Expand Up @@ -296,6 +301,12 @@ int main(int argc, char** argv) {
&options.thread_pool_factory_config_file,
"If non-empty, read an ascii ThreadPoolConfig protobuf "
"from the supplied file name."),
#ifdef SUPPORT_TF_PLUGINS
tensorflow::Flag("tensorflow_plugins", &options.tensorflow_plugins,
"Enable tensorflow plugins by giving a path to folder. "
"If non-empty, load all .so files under this folder "
"as tensorflow plugins."),
#endif
tensorflow::Flag("skip_initialize_tpu", &options.skip_initialize_tpu,
"Whether to skip auto initializing TPU.")};

Expand All @@ -306,6 +317,28 @@ int main(int argc, char** argv) {
}

tensorflow::port::InitMain(argv[0], &argc, &argv);

#ifdef SUPPORT_TF_PLUGINS
if (std::filesystem::exists(options.tensorflow_plugins)) {
for (const auto &entry :
std::filesystem::directory_iterator(options.tensorflow_plugins)) {
std::string plugin_file = entry.path().string();
if (plugin_file.size() > 3 &&
plugin_file.compare(plugin_file.size() - 3, 3, ".so") == 0) {
TF_Status *plugin_status = TF_NewStatus();
TF_LoadPluggableDeviceLibrary(entry.path().c_str(), plugin_status);
TF_Code code = TF_GetCode(plugin_status);
if (code == TF_OK) {
VLOG(0) << "plugin library " << entry.path() << " load successfully!";
} else {
std::string status_msg(TF_Message(plugin_status));
VLOG(0) << "Could not load " << entry.path() << ": " << status_msg;
}
}
}
}
#endif

#if defined(LIBTPU_ON_GCE) || defined(PLATFORM_CLOUD_TPU)
InitializeTPU(options);
#endif
Expand Down
3 changes: 3 additions & 0 deletions tensorflow_serving/model_servers/server.h
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,9 @@ class Server {
tensorflow::string thread_pool_factory_config_file;
bool enable_signature_method_name_check = false;
bool enable_profiler = true;
#ifdef SUPPORT_TF_PLUGINS
string tensorflow_plugins = "";
#endif
bool skip_initialize_tpu = false;
Options();
};
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
*TF_*;
};
7 changes: 7 additions & 0 deletions tensorflow_serving/serving.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,13 @@ def serving_tensorflow_proto_dep(dep):
"""
return "{}_cc".format(dep)

def if_with_plugins_support(if_true, if_false = []):
"""Shorthand for select()ing whether to build API support for TensorFlow Plugins"""
return select({
"//tensorflow_serving:with_plugins_support": if_true,
"//conditions:default": if_false,
})

def oss_only_cc_test(name, srcs = [], deps = [], data = [], size = "medium", linkstatic = 0):
"""cc_test that is only run in open source environment."""
return native.cc_test(
Expand Down