From d18bccc675dbd476afc4374fb34630a8fa95c681 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Tue, 7 Jul 2020 08:10:44 +0000 Subject: [PATCH] fix log of deltann --- deltann/README.md | 29 ++++++++++++++++++++++++ deltann/core/config.cc | 11 ++++----- deltann/core/graph.cc | 35 ++++++++++++++++------------- deltann/core/io.h | 9 ++++---- deltann/core/runtime.cc | 33 ++++++++++++++------------- deltann/core/shape.h | 1 + deltann/examples/speaker/model.yaml | 2 +- deltann/examples/speaker/test.cc | 21 ++++++++++++++--- 8 files changed, 97 insertions(+), 44 deletions(-) create mode 100644 deltann/README.md diff --git a/deltann/README.md b/deltann/README.md new file mode 100644 index 00000000..706522cb --- /dev/null +++ b/deltann/README.md @@ -0,0 +1,29 @@ +# DETLA-NN + +```bash +total 104 +-rw-rw-r-- 1 60847 60847 3541 Jul 7 01:24 Makefile +-rw-r--r-- 1 root root 0 Jul 7 07:55 README.md +drwxrwxr-x 2 60847 60847 47 Jul 7 01:24 api/ +-rwxrwxr-x 1 60847 60847 405 Jul 7 01:24 build.sh* +drwxrwxr-x 3 60847 60847 4096 Jul 7 07:52 core/ +-rw-rw-r-- 1 60847 60847 46 Jul 7 01:24 deltann_version_script.lds +lrwxrwxrwx 1 60847 60847 7 Jul 7 01:24 run.sh -> test.sh* +drwxrwxr-x 2 60847 60847 97 Jul 7 01:24 targets/ +drwxrwxr-x 2 60847 60847 28 Jul 7 04:34 test/ +-rwxrwxr-x 1 60847 60847 726 Jul 7 01:24 test.sh* + +drwxrwxr-x 4 60847 60847 85 Jul 7 01:24 examples/ + +drwxrwxr-x 8 60847 60847 4096 Jul 7 01:24 infer/ + +drwxrwxr-x 5 60847 60847 118 Jul 7 01:24 server/ +``` + +`api`, `core`, `targets`, `test`, `deltann_version_script.lds` are src for `deltann` + +`examples` are some demos using `deltann` + +`server` are RESTful API for `deltann` wrapper by golang + +`infer` are Graph Compiler using `TensorFlow grappler` diff --git a/deltann/core/config.cc b/deltann/core/config.cc index f919a13e..0cb3e706 100644 --- a/deltann/core/config.cc +++ b/deltann/core/config.cc @@ -37,9 +37,9 @@ BaseConfig::BaseConfig(std::string path) : _file_path(path) { auto search = _global_config.find(_file_path); if (search != _global_config.end()) { - LOG_WARN << "config path:" << _file_path << " already loaded!"; + LOG_WARN << "config path: [" << _file_path << "] already loaded!"; } else { - LOG_INFO << "config path:" << _file_path; + LOG_INFO << "config path: [" << _file_path << "]"; try { _global_config[path] = YAML::LoadFile(_file_path); LOG_INFO << "load config success"; @@ -50,7 +50,7 @@ BaseConfig::BaseConfig(std::string path) : _file_path(path) { } _custom_ops_path = config()["model"]["custom_ops_path"].as(); - LOG_INFO << "custom ops path:" << _custom_ops_path; + LOG_INFO << "custom ops path: [" << _custom_ops_path << "]"; } BaseConfig::~BaseConfig() {} @@ -70,7 +70,8 @@ DeltaStatus Config::load_graphs() { const YAML::Node& model_cfg = config()["model"]; int graph_num = model_cfg["graphs"].size(); - LOG_INFO << "graph num is: " << graph_num; + LOG_INFO << "graph num is: [" << graph_num << "]"; + for (int i = 0; i < graph_num; ++i) { const YAML::Node& graph_cfg = model_cfg["graphs"][i]; std::string name = model_cfg["graphs"][i]["name"].as(); @@ -90,7 +91,7 @@ RuntimeConfig::RuntimeConfig(std::string path) : Config(path) { DELTA_ASSERT_OK(load_runtime()); _rt_cfg = config()["runtime"]; _num_threads = _rt_cfg["num_threads"].as(); - LOG_WARN << "_num_threads is " << _num_threads; + LOG_WARN << "_num_threads is: [" << _num_threads << "]"; } RuntimeConfig::~RuntimeConfig() {} diff --git a/deltann/core/graph.cc b/deltann/core/graph.cc index 02784fbd..c206cfed 100644 --- a/deltann/core/graph.cc +++ b/deltann/core/graph.cc @@ -31,17 +31,18 @@ Graph::Graph(const YAML::Node& cfg) : _cfg(cfg) { _engine_type = cfg["engine"].as(); string model_path = cfg["local"]["path"].as(); DELTA_CHECK(version > 0); - LOG_INFO << "graph name: [" << _name << "]"; - LOG_INFO << "server type: " << server_type; - LOG_INFO << "engine: " << _engine_type; - LOG_INFO << "version: " << version; + + LOG_INFO << "graph name: [" << _name << " ]"; + LOG_INFO << "server type: [ " << server_type << " ]"; + LOG_INFO << "engine: [ " << _engine_type << " ]"; + LOG_INFO << "version: [ " << version << " ]"; _model_meta.server_type = server_type; _model_meta.version = version; if (server_type == "local") { LOG_INFO << "load local model"; _model_meta.local.model_path = model_path + "/" + std::to_string(version); - LOG_INFO << "model path: " << _model_meta.local.model_path; + LOG_INFO << "model path: [ " << _model_meta.local.model_path << " ]"; string model_type = cfg["local"]["model_type"].as(); if (model_type == kSavedModel) { @@ -54,9 +55,10 @@ Graph::Graph(const YAML::Node& cfg) : _cfg(cfg) { LOG_FATAL << "Error, not support model_type " << model_type; } - LOG_INFO << "model type : " + LOG_INFO << "model type : [ " << static_cast::type>( - _model_meta.local.model_type); + _model_meta.local.model_type) + << " ]"; } else if (server_type == "remote") { LOG_INFO << "load remote model"; @@ -87,17 +89,18 @@ Graph::~Graph() { // inputs DeltaStatus Graph::add_inputs() { int in_num = _cfg["inputs"].size(); - LOG_INFO << "inputs num is " << in_num; + LOG_INFO << "Inputs num : [ " << in_num << " ]"; for (int i = 0; i < in_num; ++i) { - LOG_INFO << "in name is " << _cfg["inputs"][i]["name"]; - LOG_INFO << "in dtype is " << _cfg["inputs"][i]["dtype"]; + LOG_INFO << "inputs : " << i; + LOG_INFO << "\t name [ " << _cfg["inputs"][i]["name"] << " ]"; + LOG_INFO << "\t dtype [ " << _cfg["inputs"][i]["dtype"] << " ]"; string name = _cfg["inputs"][i]["name"].as(); string dtype = _cfg["inputs"][i]["dtype"].as(); int id = _cfg["inputs"][i]["id"].as(); if (_cfg["inputs"][i]["shape"]) { - LOG_INFO << "in shape is " << _cfg["inputs"][i]["shape"]; + LOG_INFO << "\t shape [ " << _cfg["inputs"][i]["shape"] << " ]"; YAML::Node s = _cfg["inputs"][i]["shape"]; std::vector v; for (std::size_t i = 0; i < s.size(); i++) { @@ -106,7 +109,7 @@ DeltaStatus Graph::add_inputs() { _inputs.insert(pair( name, Input(name, id, Shape(v), delta_str_dtype(dtype)))); } else { - LOG_INFO << "graph " << _name << " shape is None" + LOG_INFO << "graph [" << _name << "] shape is None" << _cfg["inputs"][i]["shape"]; _inputs.insert( pair(name, Input(name, id, delta_str_dtype(dtype)))); @@ -119,11 +122,13 @@ DeltaStatus Graph::add_inputs() { // outputs DeltaStatus Graph::add_outputs() { int out_num = _cfg["outputs"].size(); - LOG_INFO << "output num is " << out_num; + LOG_INFO << "Output num : [ " << out_num << " ]"; for (int i = 0; i < out_num; ++i) { - LOG_INFO << "out name is " << _cfg["outputs"][i]["name"]; - LOG_INFO << "out dtype is " << _cfg["outputs"][i]["dtype"]; + LOG_INFO << "\tout name is [ " << _cfg["outputs"][i]["name"] << " ]"; + LOG_INFO << "\tout dtype is [ " << _cfg["outputs"][i]["dtype"] << " ]"; + LOG_INFO << "\tout shape is [ " << _cfg["outputs"][i]["shape"] << " ]"; + string name = _cfg["outputs"][i]["name"].as(); string dtype = _cfg["outputs"][i]["dtype"].as(); diff --git a/deltann/core/io.h b/deltann/core/io.h index 628f55f0..abbac19f 100644 --- a/deltann/core/io.h +++ b/deltann/core/io.h @@ -54,6 +54,7 @@ class BaseInOut { Shape& shape(void) { return _shape; } + // num delements const size_t size(void) const { return _shape.size(); } DataType dtype(void) const { return _dtype; } @@ -71,12 +72,12 @@ class BaseInOut { friend std::ostream& operator<<(std::ostream& os, const BaseInOut& inout) { if (inout.inout_type() == InOut::DELTA_IN) { - os << "Input: "; + os << "Input: ["; } else { - os << "Output: "; + os << "Output: ["; } - os << inout._name << " id:" << inout._id << " shape: " << inout._shape - << " dtype: " << delta_dtype_str(inout._dtype); + os << inout._name << "] id: [" << inout._id << "] shape: [" << inout._shape + << "] dtype: [" << delta_dtype_str(inout._dtype) << "]"; return os; } diff --git a/deltann/core/runtime.cc b/deltann/core/runtime.cc index 8b6cc060..8ce8a17a 100644 --- a/deltann/core/runtime.cc +++ b/deltann/core/runtime.cc @@ -29,7 +29,7 @@ DeltaStatus load_custom_ops_lib(const std::string& lib) { } #ifdef USE_TF - LOG_INFO << "custom op lib is: " << lib; + LOG_INFO << "custom op lib is: [" << lib << "]"; TF_Status* status = TF_NewStatus(); TF_Library* custom_op_lib = TF_LoadLibrary(lib.c_str(), status); @@ -39,13 +39,13 @@ DeltaStatus load_custom_ops_lib(const std::string& lib) { if (TF_OK != code) { LOG_FATAL << status_msg; } - LOG_INFO << "custom op lib load succesfully" << lib; + LOG_INFO << "custom op lib load succesfully!"; TF_Buffer op_list_buf = TF_GetOpList(custom_op_lib); tensorflow::OpList op_list; DELTA_CHECK(op_list.ParseFromArray(op_list_buf.data, op_list_buf.length)); for (int i = 0; i != op_list.op_size(); ++i) { - LOG_INFO << "cutsom op: " << op_list.op(i).name(); + LOG_INFO << "cutsom op: [" << op_list.op(i).name() << "]"; } TF_DeleteLibraryHandle(custom_op_lib); @@ -59,12 +59,12 @@ DeltaStatus load_models(const RuntimeConfig& rt_cfg, const int num_threads = rt_cfg.num_threads(); if (!graphs->size()) { - LOG_WARN << "graphs size is empty, " << graphs->size(); + LOG_WARN << "graphs size is empty, size: [" << graphs->size() << "]"; return DeltaStatus::STATUS_ERROR; } for (auto& iter : *graphs) { - LOG_INFO << "Load model for graph " << iter.first; + LOG_INFO << "Load model for graph: [" << iter.first << "]"; Graph& graph = iter.second; std::string engine_type = graph.engine_type(); @@ -76,25 +76,25 @@ DeltaStatus load_models(const RuntimeConfig& rt_cfg, if (search != _global_engines.end()) { #ifdef USE_TF if (EngineType::DELTA_EIGINE_TF == _global_engines[engine_type]) { - LOG_INFO << "User engine tf"; + LOG_INFO << "User engine: [TF]"; model = new TFModel(model_meta, num_threads); #endif } else if (EngineType::DELTA_EIGINE_TFTRT == _global_engines[engine_type]) { - LOG_INFO << "User engine tftrt"; + LOG_INFO << "User engine: [TFTRT]"; #ifdef USE_TFLITE } else if (EngineType::DELTA_EIGINE_TFLITE == _global_engines[engine_type]) { - LOG_INFO << "User engine tf lite"; + LOG_INFO << "User engine: [TFLITE]"; model = new TFLiteModel(model_meta, num_threads); #endif #ifdef USE_TF_SERVING } else if (EngineType::DELTA_EIGINE_TFSERVING == _global_engines[engine_type]) { - LOG_INFO << "User engine TFSERVING"; + LOG_INFO << "User engine: [TFSERVING]"; model = new TFServingModel(model_meta, num_threads); #endif @@ -136,12 +136,13 @@ DeltaStatus Runtime::set_inputs(const std::vector& ins) { _inputs_data.clear(); for (auto& in : ins) { - LOG_INFO << "Graph name: " << in._graph_name; + LOG_INFO << "Graph name: [" << in._graph_name << "]"; + auto search = _graphs.find(in._graph_name); if (search != _graphs.end()) { Graph& graph = search->second; - LOG_INFO << "input name: " << in._input_name; + LOG_INFO << "input name: [" << in._input_name << "]"; try { Input& input = graph.get_inputs().at(in._input_name); @@ -162,7 +163,7 @@ DeltaStatus Runtime::set_inputs(const std::vector& ins) { << in._graph_name << "] graph: " << e.what(); } } else { - LOG_FATAL << "Error, Graph " << in._graph_name << " not exist!"; + LOG_FATAL << "Error, Graph [" << in._graph_name << "] not exist!"; } } return DeltaStatus::STATUS_OK; @@ -189,15 +190,15 @@ DeltaStatus Runtime::warmup() { for (auto& input : inputs) { Input& in(input.second); - LOG_INFO << in; - LOG_INFO << in.shape().ndim(); + LOG_INFO << " in : " << in; + LOG_INFO << " ndim : " << in.shape().ndim(); if (in.shape().is_partial()) { in.shape().set_dim(0, 1); } - LOG_INFO << in; - LOG_INFO << in.size(); + LOG_INFO << " in : " << in; + LOG_INFO << " size : " << in.size(); InputData in_data(in); in_data.feed_random_data(); diff --git a/deltann/core/shape.h b/deltann/core/shape.h index eebeb64f..6d58832c 100644 --- a/deltann/core/shape.h +++ b/deltann/core/shape.h @@ -52,6 +52,7 @@ class Shape { const int operator[](int i) const; + // num elemetns size_t size(void) const; void set_dim(int idx, int size); diff --git a/deltann/examples/speaker/model.yaml b/deltann/examples/speaker/model.yaml index a57f3440..e48fd96d 100644 --- a/deltann/examples/speaker/model.yaml +++ b/deltann/examples/speaker/model.yaml @@ -44,8 +44,8 @@ model: - id: 0 name: "speaker_res_net_raw_model_1/model/output_layer/dense-bn/batchnorm/add_1:0" - dtype: "float" shape: [-1, 512] + dtype: "float" runtime: num_threads: 10 diff --git a/deltann/examples/speaker/test.cc b/deltann/examples/speaker/test.cc index e4c49072..0765d70f 100644 --- a/deltann/examples/speaker/test.cc +++ b/deltann/examples/speaker/test.cc @@ -16,6 +16,7 @@ limitations under the License. #include #include +#include #include #include #include @@ -86,6 +87,10 @@ struct DeltaModel { ins[0].input_name = "inputs:0"; ins[0].shape = shape; ins[0].ndims = ndims; + printf("%d\n", sizeof(ins)); + printf("%d\n", sizeof(ins[0])); + printf("%d\n", sizeof(ins) / sizeof(ins[0])); + printf("%s\n", ins[0].graph_name); return DeltaSetInputs(inf_, ins, sizeof(ins) / sizeof(ins[0])); } @@ -158,29 +163,38 @@ template struct DeltaModel; int main(int argc, char** argv) { const char* yaml_file = argv[1]; + if (argc != 2){ + std::cout << "usage: " << argv[0] << " config.yaml" << "\n"; + return 0; + } DeltaModel m(yaml_file); float avg = 0; - int cnt = 1000; + int cnt = 10; for (int i = 0; i < cnt; i++) { + fprintf(stderr, "====================\n"); std::vector shape = {1, 260, 40, 1}; - shape[0] = i + 1; + //shape[0] = i + 1; + // set inputs float* buf = m.AllocInputs(shape); auto nelems = m.NumElems(shape); for (auto i = 0; i < nelems; i++) { - buf[i] = 0; + buf[i] = 0.01; } m.SetInputs(buf, shape); + //run float dur = m.TimeRun(); fprintf(stderr, "Duration %04f sec.\n", dur); avg += dur; + // free buf free(buf); buf = nullptr; + //get output Output* outs = m.AllocOutputs(); m.GetOutputs(outs); @@ -195,6 +209,7 @@ int main(int argc, char** argv) { } } + // free output m.FreeOutputs(outs); } fprintf(stderr, "Avg Duration %04f sec.\n", avg / cnt);