Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix log of deltann #218

Merged
merged 1 commit into from
Jul 7, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions deltann/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# DETLA-NN

```bash
total 104
-rw-rw-r-- 1 60847 60847 3541 Jul 7 01:24 Makefile
-rw-r--r-- 1 root root 0 Jul 7 07:55 README.md
drwxrwxr-x 2 60847 60847 47 Jul 7 01:24 api/
-rwxrwxr-x 1 60847 60847 405 Jul 7 01:24 build.sh*
drwxrwxr-x 3 60847 60847 4096 Jul 7 07:52 core/
-rw-rw-r-- 1 60847 60847 46 Jul 7 01:24 deltann_version_script.lds
lrwxrwxrwx 1 60847 60847 7 Jul 7 01:24 run.sh -> test.sh*
drwxrwxr-x 2 60847 60847 97 Jul 7 01:24 targets/
drwxrwxr-x 2 60847 60847 28 Jul 7 04:34 test/
-rwxrwxr-x 1 60847 60847 726 Jul 7 01:24 test.sh*

drwxrwxr-x 4 60847 60847 85 Jul 7 01:24 examples/

drwxrwxr-x 8 60847 60847 4096 Jul 7 01:24 infer/

drwxrwxr-x 5 60847 60847 118 Jul 7 01:24 server/
```

`api`, `core`, `targets`, `test`, `deltann_version_script.lds` are src for `deltann`

`examples` are some demos using `deltann`

`server` are RESTful API for `deltann` wrapper by golang

`infer` are Graph Compiler using `TensorFlow grappler`
11 changes: 6 additions & 5 deletions deltann/core/config.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ BaseConfig::BaseConfig(std::string path) : _file_path(path) {
auto search = _global_config.find(_file_path);

if (search != _global_config.end()) {
LOG_WARN << "config path:" << _file_path << " already loaded!";
LOG_WARN << "config path: [" << _file_path << "] already loaded!";
} else {
LOG_INFO << "config path:" << _file_path;
LOG_INFO << "config path: [" << _file_path << "]";
try {
_global_config[path] = YAML::LoadFile(_file_path);
LOG_INFO << "load config success";
Expand All @@ -50,7 +50,7 @@ BaseConfig::BaseConfig(std::string path) : _file_path(path) {
}

_custom_ops_path = config()["model"]["custom_ops_path"].as<std::string>();
LOG_INFO << "custom ops path:" << _custom_ops_path;
LOG_INFO << "custom ops path: [" << _custom_ops_path << "]";
}

BaseConfig::~BaseConfig() {}
Expand All @@ -70,7 +70,8 @@ DeltaStatus Config::load_graphs() {
const YAML::Node& model_cfg = config()["model"];

int graph_num = model_cfg["graphs"].size();
LOG_INFO << "graph num is: " << graph_num;
LOG_INFO << "graph num is: [" << graph_num << "]";

for (int i = 0; i < graph_num; ++i) {
const YAML::Node& graph_cfg = model_cfg["graphs"][i];
std::string name = model_cfg["graphs"][i]["name"].as<std::string>();
Expand All @@ -90,7 +91,7 @@ RuntimeConfig::RuntimeConfig(std::string path) : Config(path) {
DELTA_ASSERT_OK(load_runtime());
_rt_cfg = config()["runtime"];
_num_threads = _rt_cfg["num_threads"].as<int>();
LOG_WARN << "_num_threads is " << _num_threads;
LOG_WARN << "_num_threads is: [" << _num_threads << "]";
}

RuntimeConfig::~RuntimeConfig() {}
Expand Down
35 changes: 20 additions & 15 deletions deltann/core/graph.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,18 @@ Graph::Graph(const YAML::Node& cfg) : _cfg(cfg) {
_engine_type = cfg["engine"].as<string>();
string model_path = cfg["local"]["path"].as<string>();
DELTA_CHECK(version > 0);
LOG_INFO << "graph name: [" << _name << "]";
LOG_INFO << "server type: " << server_type;
LOG_INFO << "engine: " << _engine_type;
LOG_INFO << "version: " << version;

LOG_INFO << "graph name: [" << _name << " ]";
LOG_INFO << "server type: [ " << server_type << " ]";
LOG_INFO << "engine: [ " << _engine_type << " ]";
LOG_INFO << "version: [ " << version << " ]";

_model_meta.server_type = server_type;
_model_meta.version = version;
if (server_type == "local") {
LOG_INFO << "load local model";
_model_meta.local.model_path = model_path + "/" + std::to_string(version);
LOG_INFO << "model path: " << _model_meta.local.model_path;
LOG_INFO << "model path: [ " << _model_meta.local.model_path << " ]";

string model_type = cfg["local"]["model_type"].as<string>();
if (model_type == kSavedModel) {
Expand All @@ -54,9 +55,10 @@ Graph::Graph(const YAML::Node& cfg) : _cfg(cfg) {
LOG_FATAL << "Error, not support model_type " << model_type;
}

LOG_INFO << "model type : "
LOG_INFO << "model type : [ "
<< static_cast<std::underlying_type<ModelType>::type>(
_model_meta.local.model_type);
_model_meta.local.model_type)
<< " ]";

} else if (server_type == "remote") {
LOG_INFO << "load remote model";
Expand Down Expand Up @@ -87,17 +89,18 @@ Graph::~Graph() {
// inputs
DeltaStatus Graph::add_inputs() {
int in_num = _cfg["inputs"].size();
LOG_INFO << "inputs num is " << in_num;
LOG_INFO << "Inputs num : [ " << in_num << " ]";
for (int i = 0; i < in_num; ++i) {
LOG_INFO << "in name is " << _cfg["inputs"][i]["name"];
LOG_INFO << "in dtype is " << _cfg["inputs"][i]["dtype"];
LOG_INFO << "inputs : " << i;
LOG_INFO << "\t name [ " << _cfg["inputs"][i]["name"] << " ]";
LOG_INFO << "\t dtype [ " << _cfg["inputs"][i]["dtype"] << " ]";

string name = _cfg["inputs"][i]["name"].as<string>();
string dtype = _cfg["inputs"][i]["dtype"].as<string>();
int id = _cfg["inputs"][i]["id"].as<int>();

if (_cfg["inputs"][i]["shape"]) {
LOG_INFO << "in shape is " << _cfg["inputs"][i]["shape"];
LOG_INFO << "\t shape [ " << _cfg["inputs"][i]["shape"] << " ]";
YAML::Node s = _cfg["inputs"][i]["shape"];
std::vector<int> v;
for (std::size_t i = 0; i < s.size(); i++) {
Expand All @@ -106,7 +109,7 @@ DeltaStatus Graph::add_inputs() {
_inputs.insert(pair<string, Input>(
name, Input(name, id, Shape(v), delta_str_dtype(dtype))));
} else {
LOG_INFO << "graph " << _name << " shape is None"
LOG_INFO << "graph [" << _name << "] shape is None"
<< _cfg["inputs"][i]["shape"];
_inputs.insert(
pair<string, Input>(name, Input(name, id, delta_str_dtype(dtype))));
Expand All @@ -119,11 +122,13 @@ DeltaStatus Graph::add_inputs() {
// outputs
DeltaStatus Graph::add_outputs() {
int out_num = _cfg["outputs"].size();
LOG_INFO << "output num is " << out_num;
LOG_INFO << "Output num : [ " << out_num << " ]";

for (int i = 0; i < out_num; ++i) {
LOG_INFO << "out name is " << _cfg["outputs"][i]["name"];
LOG_INFO << "out dtype is " << _cfg["outputs"][i]["dtype"];
LOG_INFO << "\tout name is [ " << _cfg["outputs"][i]["name"] << " ]";
LOG_INFO << "\tout dtype is [ " << _cfg["outputs"][i]["dtype"] << " ]";
LOG_INFO << "\tout shape is [ " << _cfg["outputs"][i]["shape"] << " ]";

string name = _cfg["outputs"][i]["name"].as<string>();
string dtype = _cfg["outputs"][i]["dtype"].as<string>();

Expand Down
9 changes: 5 additions & 4 deletions deltann/core/io.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class BaseInOut {

Shape& shape(void) { return _shape; }

// num delements
const size_t size(void) const { return _shape.size(); }

DataType dtype(void) const { return _dtype; }
Expand All @@ -71,12 +72,12 @@ class BaseInOut {

friend std::ostream& operator<<(std::ostream& os, const BaseInOut& inout) {
if (inout.inout_type() == InOut::DELTA_IN) {
os << "Input: ";
os << "Input: [";
} else {
os << "Output: ";
os << "Output: [";
}
os << inout._name << " id:" << inout._id << " shape: " << inout._shape
<< " dtype: " << delta_dtype_str(inout._dtype);
os << inout._name << "] id: [" << inout._id << "] shape: [" << inout._shape
<< "] dtype: [" << delta_dtype_str(inout._dtype) << "]";
return os;
}

Expand Down
33 changes: 17 additions & 16 deletions deltann/core/runtime.cc
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ DeltaStatus load_custom_ops_lib(const std::string& lib) {
}

#ifdef USE_TF
LOG_INFO << "custom op lib is: " << lib;
LOG_INFO << "custom op lib is: [" << lib << "]";
TF_Status* status = TF_NewStatus();
TF_Library* custom_op_lib = TF_LoadLibrary(lib.c_str(), status);

Expand All @@ -39,13 +39,13 @@ DeltaStatus load_custom_ops_lib(const std::string& lib) {
if (TF_OK != code) {
LOG_FATAL << status_msg;
}
LOG_INFO << "custom op lib load succesfully" << lib;
LOG_INFO << "custom op lib load succesfully!";

TF_Buffer op_list_buf = TF_GetOpList(custom_op_lib);
tensorflow::OpList op_list;
DELTA_CHECK(op_list.ParseFromArray(op_list_buf.data, op_list_buf.length));
for (int i = 0; i != op_list.op_size(); ++i) {
LOG_INFO << "cutsom op: " << op_list.op(i).name();
LOG_INFO << "cutsom op: [" << op_list.op(i).name() << "]";
}

TF_DeleteLibraryHandle(custom_op_lib);
Expand All @@ -59,12 +59,12 @@ DeltaStatus load_models(const RuntimeConfig& rt_cfg,
const int num_threads = rt_cfg.num_threads();

if (!graphs->size()) {
LOG_WARN << "graphs size is empty, " << graphs->size();
LOG_WARN << "graphs size is empty, size: [" << graphs->size() << "]";
return DeltaStatus::STATUS_ERROR;
}

for (auto& iter : *graphs) {
LOG_INFO << "Load model for graph " << iter.first;
LOG_INFO << "Load model for graph: [" << iter.first << "]";

Graph& graph = iter.second;
std::string engine_type = graph.engine_type();
Expand All @@ -76,25 +76,25 @@ DeltaStatus load_models(const RuntimeConfig& rt_cfg,
if (search != _global_engines.end()) {
#ifdef USE_TF
if (EngineType::DELTA_EIGINE_TF == _global_engines[engine_type]) {
LOG_INFO << "User engine tf";
LOG_INFO << "User engine: [TF]";
model = new TFModel(model_meta, num_threads);
#endif

} else if (EngineType::DELTA_EIGINE_TFTRT ==
_global_engines[engine_type]) {
LOG_INFO << "User engine tftrt";
LOG_INFO << "User engine: [TFTRT]";

#ifdef USE_TFLITE
} else if (EngineType::DELTA_EIGINE_TFLITE ==
_global_engines[engine_type]) {
LOG_INFO << "User engine tf lite";
LOG_INFO << "User engine: [TFLITE]";
model = new TFLiteModel(model_meta, num_threads);
#endif

#ifdef USE_TF_SERVING
} else if (EngineType::DELTA_EIGINE_TFSERVING ==
_global_engines[engine_type]) {
LOG_INFO << "User engine TFSERVING";
LOG_INFO << "User engine: [TFSERVING]";
model = new TFServingModel(model_meta, num_threads);
#endif

Expand Down Expand Up @@ -136,12 +136,13 @@ DeltaStatus Runtime::set_inputs(const std::vector<In>& ins) {
_inputs_data.clear();

for (auto& in : ins) {
LOG_INFO << "Graph name: " << in._graph_name;
LOG_INFO << "Graph name: [" << in._graph_name << "]";

auto search = _graphs.find(in._graph_name);
if (search != _graphs.end()) {
Graph& graph = search->second;

LOG_INFO << "input name: " << in._input_name;
LOG_INFO << "input name: [" << in._input_name << "]";
try {
Input& input = graph.get_inputs().at(in._input_name);

Expand All @@ -162,7 +163,7 @@ DeltaStatus Runtime::set_inputs(const std::vector<In>& ins) {
<< in._graph_name << "] graph: " << e.what();
}
} else {
LOG_FATAL << "Error, Graph " << in._graph_name << " not exist!";
LOG_FATAL << "Error, Graph [" << in._graph_name << "] not exist!";
}
}
return DeltaStatus::STATUS_OK;
Expand All @@ -189,15 +190,15 @@ DeltaStatus Runtime::warmup() {
for (auto& input : inputs) {
Input& in(input.second);

LOG_INFO << in;
LOG_INFO << in.shape().ndim();
LOG_INFO << " in : " << in;
LOG_INFO << " ndim : " << in.shape().ndim();

if (in.shape().is_partial()) {
in.shape().set_dim(0, 1);
}

LOG_INFO << in;
LOG_INFO << in.size();
LOG_INFO << " in : " << in;
LOG_INFO << " size : " << in.size();

InputData in_data(in);
in_data.feed_random_data();
Expand Down
1 change: 1 addition & 0 deletions deltann/core/shape.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class Shape {

const int operator[](int i) const;

// num elemetns
size_t size(void) const;

void set_dim(int idx, int size);
Expand Down
2 changes: 1 addition & 1 deletion deltann/examples/speaker/model.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ model:
-
id: 0
name: "speaker_res_net_raw_model_1/model/output_layer/dense-bn/batchnorm/add_1:0"
dtype: "float"
shape: [-1, 512]
dtype: "float"

runtime:
num_threads: 10
Expand Down
21 changes: 18 additions & 3 deletions deltann/examples/speaker/test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ limitations under the License.

#include <assert.h>
#include <stdio.h>
#include <iostream>
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
Expand Down Expand Up @@ -86,6 +87,10 @@ struct DeltaModel {
ins[0].input_name = "inputs:0";
ins[0].shape = shape;
ins[0].ndims = ndims;
printf("%d\n", sizeof(ins));
printf("%d\n", sizeof(ins[0]));
printf("%d\n", sizeof(ins) / sizeof(ins[0]));
printf("%s\n", ins[0].graph_name);
return DeltaSetInputs(inf_, ins, sizeof(ins) / sizeof(ins[0]));
}

Expand Down Expand Up @@ -158,29 +163,38 @@ template struct DeltaModel<float>;

int main(int argc, char** argv) {
const char* yaml_file = argv[1];
if (argc != 2){
std::cout << "usage: " << argv[0] << " config.yaml" << "\n";
return 0;
}

DeltaModel<float> m(yaml_file);

float avg = 0;
int cnt = 1000;
int cnt = 10;
for (int i = 0; i < cnt; i++) {
fprintf(stderr, "====================\n");
std::vector<int> shape = {1, 260, 40, 1};
shape[0] = i + 1;
//shape[0] = i + 1;

// set inputs
float* buf = m.AllocInputs(shape);
auto nelems = m.NumElems(shape);
for (auto i = 0; i < nelems; i++) {
buf[i] = 0;
buf[i] = 0.01;
}
m.SetInputs(buf, shape);

//run
float dur = m.TimeRun();
fprintf(stderr, "Duration %04f sec.\n", dur);
avg += dur;

// free buf
free(buf);
buf = nullptr;

//get output
Output* outs = m.AllocOutputs();
m.GetOutputs(outs);

Expand All @@ -195,6 +209,7 @@ int main(int argc, char** argv) {
}
}

// free output
m.FreeOutputs(outs);
}
fprintf(stderr, "Avg Duration %04f sec.\n", avg / cnt);
Expand Down