Skip to content

Commit

Permalink
Islive (#5)
Browse files Browse the repository at this point in the history
* modified:   mace/tools/mace_run.cc
	modified:   third_party/nnlib/arm64-v8a/libcdsprpc.so
	modified:   third_party/nnlib/v66/libhexagon_nn_skel.so
	modified:   tools/python/run_model.py
	modified:   tools/python/transform/hexagon_converter.py
	modified:   tools/python/utils/util.py
	modified:   tools/python/validate.py

* modified:   mace/tools/mace_run.cc
	modified:   third_party/nnlib/arm64-v8a/libcdsprpc.so
	modified:   third_party/nnlib/v66/libhexagon_nn_skel.so
	modified:   tools/python/run_model.py
	modified:   tools/python/transform/hexagon_converter.py
	modified:   tools/python/utils/util.py
	modified:   tools/python/validate.py

* modified:   tools/python/run_model.py
	modified:   tools/python/validate.py

* modified:   tools/python/validate.py
  • Loading branch information
islive233 committed May 22, 2024
1 parent ae23482 commit b1023af
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 31 deletions.
14 changes: 9 additions & 5 deletions mace/tools/mace_run.cc
Original file line number Diff line number Diff line change
Expand Up @@ -452,6 +452,7 @@ bool RunModel(const std::string &model_name,
tutor,
FLAGS_fake_warmup);
#endif
//std::this_thread::sleep_for(std::chrono::seconds(20));
int64_t t1 = NowMicros();

if (create_engine_status != MaceStatus::MACE_SUCCESS) {
Expand Down Expand Up @@ -701,12 +702,13 @@ bool RunModel(const std::string &model_name,
}

int Main(int argc, char **argv) {
LOG(INFO) << "hello";
std::string usage = "MACE run model tool, please specify proper arguments.\n"
"usage: " + std::string(argv[0])
+ " --help";
gflags::SetUsageMessage(usage);
gflags::ParseCommandLineFlags(&argc, &argv, true);

std::vector<std::string> input_names = Split(FLAGS_input_node, ',');
std::vector<std::string> output_names = Split(FLAGS_output_node, ',');
if (input_names.empty() || output_names.empty()) {
Expand All @@ -726,8 +728,8 @@ int Main(int argc, char **argv) {
LOG(INFO) << "input shape: " << FLAGS_input_shape;
LOG(INFO) << "input data_type: " << FLAGS_input_data_type;
LOG(INFO) << "input data_format: " << FLAGS_input_data_format;
LOG(INFO) << "output node: " << FLAGS_output_node;
LOG(INFO) << "output shape: " << FLAGS_output_shape;
//LOG(INFO) << "output node: " << FLAGS_output_node;
//LOG(INFO) << "output shape: " << FLAGS_output_shape;
LOG(INFO) << "input data_type: " << FLAGS_output_data_type;
LOG(INFO) << "output data_format: " << FLAGS_output_data_format;
LOG(INFO) << "input_file: " << FLAGS_input_file;
Expand All @@ -747,6 +749,7 @@ int Main(int argc, char **argv) {
LOG(INFO) << "gpu_priority_hint: " << FLAGS_gpu_priority_hint;
LOG(INFO) << "num_threads: " << FLAGS_num_threads;
LOG(INFO) << "cpu_affinity_policy: " << FLAGS_cpu_affinity_policy;

auto limit_opencl_kernel_time = getenv("MACE_LIMIT_OPENCL_KERNEL_TIME");
if (limit_opencl_kernel_time) {
LOG(INFO) << "limit_opencl_kernel_time: "
Expand Down Expand Up @@ -788,8 +791,8 @@ int Main(int argc, char **argv) {
std::vector<IDataType> output_data_types(output_count);
for (size_t i = 0; i < output_count; ++i) {
output_data_types[i] = ParseDataType(raw_output_data_types[i]);
LOG(INFO) << "raw_output_data_types[" << i << "] is "
<< raw_output_data_types[i];
//LOG(INFO) << "raw_output_data_types[" << i << "] is "
// << raw_output_data_types[i];
}

std::vector<std::string> raw_input_data_formats =
Expand All @@ -812,6 +815,7 @@ int Main(int argc, char **argv) {
cpu_float32_performance = cpu_capability.float32_performance.exec_time;
}
bool ret = false;

for (int i = 0; i < FLAGS_restart_round; ++i) {
VLOG(0) << "restart round " << i;
ret = RunModel(FLAGS_model_name, input_names, input_shape_vec,
Expand Down
Binary file modified third_party/nnlib/arm64-v8a/libcdsprpc.so
100755 → 100644
Binary file not shown.
Empty file modified third_party/nnlib/v66/libhexagon_nn_skel.so
100644 → 100755
Empty file.
11 changes: 7 additions & 4 deletions tools/python/run_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,20 +259,18 @@ def run_model_for_device(flags, args, dev, model_name, model_conf):
opencl_dir, model_name,
dev.info()["ro.product.model"].replace(' ', ''),
dev.info()["ro.board.platform"]))

if flags.validate:
validate_model_file = util.download_or_get_model(
model_conf[ModelKeys.model_file_path],
model_conf[ModelKeys.model_sha256_checksum],
tmpdirname)

validate_weight_file = ""
if ModelKeys.weight_file_path in model_conf:
validate_weight_file = util.download_or_get_model(
model_conf[ModelKeys.weight_file_path],
model_conf[ModelKeys.weight_sha256_checksum],
tmpdirname)

dev.pull(Target(target_output_dir), tmpdirname + "/validate_out")
output_file_prefix = tmpdirname + "/validate_out/" + model_name
validation_outputs_data = \
Expand All @@ -292,7 +290,7 @@ def run_model_for_device(flags, args, dev, model_name, model_conf):
input_tensors_info[ModelKeys.input_data_types],
flags.backend,
validation_outputs_data,
"")
flags.accuracy_log)
if should_generate_data:
shutil.rmtree(tmpdirname)

Expand Down Expand Up @@ -429,6 +427,11 @@ def parse_args():
"--quantize_stat",
action="store_true",
help="whether to stat quantization range.")
parser.add_argument(
"--accuracy_log",
type=str,
default="",
help="the file of result of validate accuracy")

return parser.parse_known_args()

Expand Down
5 changes: 3 additions & 2 deletions tools/python/transform/hexagon_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -1215,14 +1215,15 @@ def convert_slice(self, op):
size.append(int((ipt_shape[dim] - onnx_start) / step))
else:
size.append(int((end - onnx_start) / step))
nn_starts.append(onnx_start)
elif onnx_start <0 and step > 0:
size.append(int((abs(onnx_start) - abs(end)) + 1 / step))
nn_starts.append(onnx_start)
nn_starts.append(onnx_start + ipt_shape[dim])
else:
nn_starts.append(0)
size.append(-1)
print(f'nn_starts: {nn_starts}, size: {size}')

del op.input[1:]
self.add_arg_const_node(op, '/starts:0', [len(nn_starts)], nn_starts)
self.add_arg_const_node(op, '/sizes:0', [len(size)], size)
Expand Down
1 change: 1 addition & 0 deletions tools/python/utils/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ def download_or_get_file(file,
shutil.copyfile(file, output_file)

if sha256_checksum:
print(file_checksum(output_file))
mace_check(file_checksum(output_file) == sha256_checksum,
"checksum validate failed")

Expand Down
48 changes: 28 additions & 20 deletions tools/python/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,27 +89,24 @@ def compare_output(output_name, mace_out_value,
assert len(out_value) == len(mace_out_value)
sqnr = calculate_sqnr(out_value, mace_out_value)
similarity = calculate_similarity(out_value, mace_out_value)
util.MaceLogger.summary(
'''util.MaceLogger.summary(
output_name + ' MACE VS training platform'
+ ' similarity: ' + str(similarity) + ' , sqnr: ' + str(sqnr)
+ ' , pixel_accuracy: ' + str(pixel_accuracy))
+ ' , pixel_accuracy: ' + str(pixel_accuracy))'''
if log_file:
if not os.path.exists(log_file):
with open(log_file, 'w') as f:
f.write('output_name,similarity,sqnr,pixel_accuracy\n')
summary = '{output_name},{similarity},{sqnr},{pixel_accuracy}\n' \
.format(output_name=output_name,
similarity=similarity,
sqnr=sqnr,
pixel_accuracy=pixel_accuracy)
with open(log_file, "a") as f:
f.write(summary)
elif similarity > validation_threshold:
util.MaceLogger.summary(
util.StringFormatter.block("Similarity Test Passed"))
with open(log_file, 'a') as f:
f.write('%-20s%-30s%-30s%-20s' %(output_name,similarity,sqnr,pixel_accuracy))
if similarity > validation_threshold:
f.write('%-20s'%'PASS'+'\n')
else:
f.write('%-20s'%'NOPASS'+'\n')
else:
util.MaceLogger.summary(
util.StringFormatter.block("Similarity Test Failed"))
if similarity > validation_threshold:
util.MaceLogger.summary(
util.StringFormatter.block("Similarity Test Passed"))
else:
util.MaceLogger.summary(
util.StringFormatter.block("Similarity Test Failed"))
else:
util.MaceLogger.error(
"", util.StringFormatter.block(
Expand Down Expand Up @@ -401,17 +398,28 @@ def validate_onnx_model(platform, model_file,

sess = onnxrt.InferenceSession(model.SerializeToString())
output_values = sess.run(output_names, input_dict)

if log_file:
with open(log_file, 'w') as f:
f.write('%-20s%-30s%-30s%-20s' %('output_name','similarity','sqnr','pixel_accuracy') + '\n')
for i in range(len(output_names)):
value = output_values[i].flatten()
output_file_name = util.formatted_file_name(mace_out_file,
output_names[i])
mace_out_value = load_data(output_file_name)
mace_out_value, real_output_shape, real_output_data_format = \
real_output_shape = output_shapes[i]
real_output_data_format = DataFormat.NONE
'''mace_out_value, real_output_shape, real_output_data_format = \
get_real_out_value_shape_df(platform,
mace_out_value,
output_shapes[i],
output_data_formats[i])
output_data_formats[i])'''
##下面是用来生成一个tensor分别在mace和nn库上跑完的具体数值文件
'''tensor_names = ['1669','1670','1671','1674','1689','3689','3690','3691']
if output_names[i] in tensor_names:
mace_output_file = "/root/workspace/tensors/mace_" + output_names[i]
onnx_output_file = "/root/workspace/tensors/onnxruntime_" + output_names[i]
np.savetxt(mace_output_file, mace_out_value, '%.6f')
np.savetxt(onnx_output_file, value, '%.6f')'''
compare_output(output_names[i],
mace_out_value, value,
validation_threshold, log_file,
Expand Down

0 comments on commit b1023af

Please sign in to comment.