Skip to content

Commit

Permalink
feat: get to run successfully
Browse files Browse the repository at this point in the history
problem: still need to test on yolo5 mlmodel
improve: need to add more type support in coreml
  • Loading branch information
JoDio-zd committed Oct 3, 2023
1 parent da936c3 commit 19d767b
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 31 deletions.
4 changes: 2 additions & 2 deletions include/nndeploy/inference/coreml/coreml_convert.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ class CoremlConvert {
// You need to free it manually
static NSObject *convertFromDeviceType(const base::DeviceType &src);

static device::Tensor *convertToTensor(MLFeatureValue *src, std::string name,
static device::Tensor *convertToTensor(MLFeatureDescription *src, NSString *name,
device::Device *device);
static MLFeatureValue *convertFromTensor(device::Tensor *src);
static MLFeatureDescription *convertFromTensor(device::Tensor *src);

static base::Status convertFromInferenceParam(CoremlInferenceParam *src,
MLModelConfiguration *dst);
Expand Down
4 changes: 2 additions & 2 deletions include/nndeploy/inference/coreml/coreml_inference.h
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ class CoremlInference : public Inference {
MLModel *mlmodel_ = nullptr;
NSError *err_ = nil;
MLModelConfiguration *config_ = nullptr;
NSDictionary *dict_ = nullptr;
NSDictionary *result_ = nullptr;
NSMutableDictionary *dict_ = nullptr;
NSMutableDictionary *result_ = nullptr;
};

} // namespace inference
Expand Down
32 changes: 32 additions & 0 deletions source/nndeploy/inference/coreml/coreml_convert.mm
Original file line number Diff line number Diff line change
Expand Up @@ -66,5 +66,37 @@
return base::kStatusCodeOk;
}

device::Tensor *CoremlConvert::convertToTensor(MLFeatureDescription *src, NSString *name,
device::Device *device) {
MLFeatureType tensor_type = [src type];
device::Tensor *dst = nullptr;
device::TensorDesc desc;
switch (tensor_type) {
case MLFeatureTypeImage:
{
MLImageConstraint *image_attr = [src imageConstraint];
base::DataType data_type = CoremlConvert::convertToDataType([image_attr pixelFormatType]);
base::DataFormat format = base::kDataFormatNHWC;
base::IntVector shape = {1, int([image_attr pixelsHigh]), int ([image_attr pixelsWide]), 3};
base::SizeVector stride = base::SizeVector();
desc = device::TensorDesc(data_type, format, shape, stride);
break;
}
case MLFeatureTypeDouble:
{
base::DataType data_type = base::DataType();
base::DataFormat format = base::kDataFormatN;
base::IntVector shape = {1};
base::SizeVector stride = base::SizeVector();
desc = device::TensorDesc(data_type, format, shape, stride);
break;
}
default:
break;
}
dst = new device::Tensor(desc, std::string([name cStringUsingEncoding:NSASCIIStringEncoding]));
return dst;
}

} // namespace inference
} // namespace nndeploy
37 changes: 10 additions & 27 deletions source/nndeploy/inference/coreml/coreml_inference.mm
Original file line number Diff line number Diff line change
Expand Up @@ -145,23 +145,23 @@

base::Status CoremlInference::run() {
if (dict_ == nil) {
dict_ = [NSDictionary alloc];
dict_ = [[NSMutableDictionary alloc] init];
}
for (auto iter : external_input_tensors_) {
CVPixelBufferRef photodata = NULL;
int width = iter.second->getWidth();
int height = iter.second->getHeight();
int stride = iter.second->getStride()[0];
int stride = width;
OSType pixelFormat = kCVPixelFormatType_OneComponent8;
CVReturn status = CVPixelBufferCreateWithBytes(
kCFAllocatorDefault, width, height, pixelFormat, iter.second->getPtr(),
stride, NULL, NULL, NULL, &photodata);
if (status != 0) {
NNDEPLOY_LOGE("Tensor create failed");
}
auto input_data = [MLFeatureValue featureValueWithPixelBuffer:photodata];
[dict_ setValue:input_data
forKey:[NSString stringWithCString:iter.first.c_str() encoding:NSASCIIStringEncoding]];
MLFeatureValue* input_data = [MLFeatureValue featureValueWithPixelBuffer:photodata];
[dict_ setObject:[NSString stringWithCString:iter.first.c_str() encoding:NSASCIIStringEncoding]
forKey:input_data];
}
MLDictionaryFeatureProvider *provider =
[[MLDictionaryFeatureProvider alloc] initWithDictionary:dict_
Expand All @@ -182,31 +182,14 @@
MLModelDescription *model_description = [mlmodel_ modelDescription];
NSDictionary<NSString *, MLFeatureDescription *> *model_input_feature = [model_description inputDescriptionsByName];
for (NSString * iter in model_input_feature) {
std::string name([iter cStringUsingEncoding:NSASCIIStringEncoding]);
MLFeatureDescription *attr = model_input_feature[iter];
MLImageConstraint *constraint = [attr imageConstraint];
base::DataType data_type = CoremlConvert::convertToDataType([constraint pixelFormatType]);
base::DataFormat data_fmt = base::kDataFormatAuto;
base::IntVector shape = {(int)constraint.pixelsHigh, (int)constraint.pixelsWide};
base::SizeVector stride = base::SizeVector();
device::TensorDesc desc(data_type, data_fmt, shape, stride);
device::Tensor *dst = nullptr;
dst = new device::Tensor(desc, name);
input_tensors_.insert({name, dst});
device::Tensor *input_tensor =
CoremlConvert::convertToTensor(model_input_feature[iter], iter, device);
input_tensors_.insert({std::string([iter cStringUsingEncoding:NSASCIIStringEncoding]), input_tensor});
}
NSDictionary<NSString *, MLFeatureDescription *> *model_output_feature = [model_description outputDescriptionsByName];
for (NSString * iter in model_output_feature) {
std::string name([iter cStringUsingEncoding:NSASCIIStringEncoding]);
MLFeatureDescription *attr = model_output_feature[iter];
MLImageConstraint *constraint = [attr imageConstraint];
base::DataType data_type = CoremlConvert::convertToDataType([constraint pixelFormatType]);
base::DataFormat data_fmt = base::kDataFormatAuto;
base::IntVector shape = {(int)constraint.pixelsHigh, (int)constraint.pixelsWide};
base::SizeVector stride = base::SizeVector();
device::TensorDesc desc(data_type, data_fmt, shape, stride);
device::Tensor *dst = nullptr;
dst = new device::Tensor(desc, name);
output_tensors_.insert({name, dst});
device::Tensor *dst = CoremlConvert::convertToTensor(model_input_feature[iter], iter, device);
output_tensors_.insert({std::string([iter cStringUsingEncoding:NSASCIIStringEncoding]), dst});
}
return base::kStatusCodeOk;
}
Expand Down

0 comments on commit 19d767b

Please sign in to comment.