diff --git a/tools/modelwriter.h b/tools/modelwriter.h index 9fc24369368..8707fe9eb81 100644 --- a/tools/modelwriter.h +++ b/tools/modelwriter.h @@ -796,7 +796,7 @@ int ModelWriter::save(const char* parampath, const char* binpath) continue; } - ncnn::Layer* layer_default = ncnn::create_layer(layer->typeindex); + ncnn::Layer* layer_default = ncnn::create_layer_cpu(layer->typeindex); ncnn::ParamDict pd; layer_default->load_param(pd); @@ -1764,6 +1764,19 @@ int ModelWriter::save(const char* parampath, const char* binpath) fprintf_param_value(" 20=%d", constant_TILE_M) fprintf_param_value(" 21=%d", constant_TILE_N) fprintf_param_value(" 22=%d", constant_TILE_K) + + if (op->constantA == 1) + { + fwrite_weight_tag_data(op->A_data, bp); + } + if (op->constantB == 1) + { + fwrite_weight_tag_data(op->B_data, bp); + } + if (op->constantC == 1 && op->constant_broadcast_type_C != -1) + { + fwrite_weight_tag_data(op->C_data, bp); + } } else if (layer->type == "GLU") { diff --git a/tools/ncnnoptimize.cpp b/tools/ncnnoptimize.cpp index d1a0cf63633..001e62a3445 100644 --- a/tools/ncnnoptimize.cpp +++ b/tools/ncnnoptimize.cpp @@ -1993,7 +1993,7 @@ int NetOptimize::fuse_binaryop_eltwise() fprintf(stderr, "fuse_binaryop_eltwise %s %s %s\n", binaryop0->name.c_str(), binaryop1->name.c_str(), binaryop->name.c_str()); - ncnn::Eltwise* eltwise = (ncnn::Eltwise*)ncnn::create_layer("Eltwise"); + ncnn::Eltwise* eltwise = (ncnn::Eltwise*)ncnn::create_layer_cpu("Eltwise"); eltwise->type = "Eltwise"; eltwise->name = binaryop->name; @@ -2554,7 +2554,7 @@ int NetOptimize::replace_reduction_with_global_pooling() fprintf(stderr, "replace_reduction_with_global_pooling %s %s\n", reduction1->name.c_str(), reduction2->name.c_str()); - ncnn::Pooling* pooling = (ncnn::Pooling*)ncnn::create_layer("Pooling"); + ncnn::Pooling* pooling = (ncnn::Pooling*)ncnn::create_layer_cpu("Pooling"); pooling->type = "Pooling"; pooling->name = reduction2->name; @@ -2593,7 +2593,7 @@ int NetOptimize::replace_prelu_with_leaky_relu() fprintf(stderr, "replace_prelu_with_leaky_relu %s\n", prelu->name.c_str()); - ncnn::ReLU* relu = (ncnn::ReLU*)ncnn::create_layer("ReLU"); + ncnn::ReLU* relu = (ncnn::ReLU*)ncnn::create_layer_cpu("ReLU"); relu->type = "ReLU"; relu->name = prelu->name; @@ -2647,7 +2647,7 @@ int NetOptimize::replace_convolution_with_innerproduct_after_global_pooling() fprintf(stderr, "replace_convolution_with_innerproduct_after_global_pooling %s %s\n", pooling->name.c_str(), convolution->name.c_str()); - ncnn::InnerProduct* innerproduct = (ncnn::InnerProduct*)ncnn::create_layer("InnerProduct"); + ncnn::InnerProduct* innerproduct = (ncnn::InnerProduct*)ncnn::create_layer_cpu("InnerProduct"); innerproduct->type = "InnerProduct"; innerproduct->name = convolution->name; @@ -2715,7 +2715,7 @@ int NetOptimize::replace_convolution_with_innerproduct_after_innerproduct() fprintf(stderr, "replace_convolution_with_innerproduct_after_innerproduct %s %s\n", innerproduct->name.c_str(), convolution->name.c_str()); - ncnn::InnerProduct* innerproduct2 = (ncnn::InnerProduct*)ncnn::create_layer("InnerProduct"); + ncnn::InnerProduct* innerproduct2 = (ncnn::InnerProduct*)ncnn::create_layer_cpu("InnerProduct"); innerproduct2->type = "InnerProduct"; innerproduct2->name = convolution->name; diff --git a/tools/quantize/ncnn2table.cpp b/tools/quantize/ncnn2table.cpp index 9fbafa2d181..386bfff2a09 100644 --- a/tools/quantize/ncnn2table.cpp +++ b/tools/quantize/ncnn2table.cpp @@ -1112,7 +1112,7 @@ int QuantNet::quantize_EQ() ncnn::Mat out; ex.extract(conv_top_blobs[i], out); - ncnn::Layer* layer_int8 = ncnn::create_layer(layer->typeindex); + ncnn::Layer* layer_int8 = ncnn::create_layer_cpu(layer->typeindex); ncnn::ParamDict pd; get_layer_param(layer, pd); @@ -1222,7 +1222,7 @@ int QuantNet::quantize_EQ() ncnn::Mat out; ex.extract(conv_top_blobs[i], out); - ncnn::Layer* layer_int8 = ncnn::create_layer(layer->typeindex); + ncnn::Layer* layer_int8 = ncnn::create_layer_cpu(layer->typeindex); ncnn::ParamDict pd; get_layer_param(layer, pd);