Skip to content

Commit

Permalink
修改状态码命名
Browse files Browse the repository at this point in the history
  • Loading branch information
zjhellofss committed Mar 13, 2024
1 parent d263093 commit a44ec8c
Show file tree
Hide file tree
Showing 21 changed files with 39 additions and 30 deletions.
2 changes: 1 addition & 1 deletion include/status_code.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ enum class StatusCode {
kFunctionNotImplement = 5,
kParseWeightError = 6,
kParseParameterError = 7,
kParseOperatorNullParam = 8,
kParseNullOperator = 8,
};

} // namespace kuiper_infer
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/adaptive_avgpooling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ StatusCode AdaptiveAveragePoolingLayer::CreateInstance(const std::shared_ptr<Run
std::shared_ptr<Layer<float>>& avg_layer) {
if (!op) {
LOG(ERROR) << "The adaptive pooling operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
20 changes: 14 additions & 6 deletions source/layer/details/base_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -146,13 +146,21 @@ StatusCode BaseConvolutionLayer::Forward(const std::vector<std::shared_ptr<Tenso
}

const uint32_t kernel_count = this->weights_.size();
if (!kernel_count) {
LOG(ERROR) << "The size of kernel matrix in the convolution layer should be greater "
"than zero";
return StatusCode::kInferParameterError;
}

const uint32_t kernel_h = this->weights_.at(0)->rows();
const uint32_t kernel_w = this->weights_.at(0)->cols();
const uint32_t kernel_channel = this->weights_.at(0)->channels();

uint32_t kernel_h = this->weights_.at(0)->rows();
uint32_t kernel_w = this->weights_.at(0)->cols();
CHECK(kernel_count > 0 && kernel_h > 0 && kernel_w > 0 && kernel_channel > 0)
<< "The size of kernel matrix in the convolution layer should be greater "
"than zero";
if (!kernel_h || !kernel_w || !kernel_channel) {
LOG(ERROR) << "The size of kernel matrix in the convolution layer should be greater "
"than zero";
return StatusCode::kInferParameterError;
}

for (uint32_t k = 0; k < kernel_count; ++k) {
const std::shared_ptr<Tensor<float>>& kernel = this->weights_.at(k);
Expand Down Expand Up @@ -218,7 +226,7 @@ StatusCode BaseConvolutionLayer::CreateInstance(const std::shared_ptr<RuntimeOpe
std::shared_ptr<Layer<float>>& conv_layer) {
if (!op) {
LOG(ERROR) << "The convolution operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
4 changes: 2 additions & 2 deletions source/layer/details/base_convolution.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ class BaseConvolutionLayer : public ParamLayer {
static StatusCode CreateInstance(const std::shared_ptr<RuntimeOperator>& op,
std::shared_ptr<Layer<float>>& conv_layer);

StatusCode Forward(const std::vector<std::shared_ptr<Tensor<float>>>& inputs,
std::vector<std::shared_ptr<Tensor<float>>>& outputs) override;
[[nodiscard]] StatusCode Forward(const std::vector<std::shared_ptr<Tensor<float>>>& inputs,
std::vector<std::shared_ptr<Tensor<float>>>& outputs) override;

private:
virtual void ComputeOutput(sftensor input, sftensor output_tensor, uint32_t kernel_h,
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/batchnorm2d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ StatusCode BatchNorm2dLayer::CreateInstance(const std::shared_ptr<RuntimeOperato
std::shared_ptr<Layer<float>>& batch_layer) {
if (!op) {
LOG(ERROR) << "The batchnorm operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/cat.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ StatusCode CatLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>& op,
std::shared_ptr<Layer<float>>& cat_layer) {
if (!op) {
LOG(ERROR) << "The cat operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
9 changes: 5 additions & 4 deletions source/layer/details/convolution.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,11 @@ class ConvolutionLayer : public BaseConvolutionLayer {
uint32_t kernel_index, uint32_t kernel_count_group, uint32_t output_h,
uint32_t output_w, bool is_1x1conv_nopadding) const;

arma::fmat ConvIm2Col(sftensor input, uint32_t kernel_h, uint32_t kernel_w, uint32_t input_h,
uint32_t input_w, uint32_t channels_per_group, uint32_t output_h,
uint32_t output_w, uint32_t group, uint32_t row_len,
uint32_t col_len) const;
[[nodiscard]] arma::fmat ConvIm2Col(sftensor input, uint32_t kernel_h, uint32_t kernel_w,
uint32_t input_h, uint32_t input_w,
uint32_t channels_per_group, uint32_t output_h,
uint32_t output_w, uint32_t group, uint32_t row_len,
uint32_t col_len) const;
};

} // namespace kuiper_infer
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/expression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ StatusCode ExpressionLayer::CreateInstance(const std::shared_ptr<RuntimeOperator
std::shared_ptr<Layer<float>>& expression_layer) {
if (!op) {
LOG(ERROR) << "The expression operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/flatten.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ StatusCode FlattenLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>&
std::shared_ptr<Layer<float>>& flatten_layer) {
if (!op) {
LOG(ERROR) << "The flatten operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/hardsigmoid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ StatusCode HardSigmoid::CreateInstance(const std::shared_ptr<RuntimeOperator>& o
std::shared_ptr<Layer<float>>& hardsigmoid_layer) {
if (!op) {
LOG(ERROR) << "The hardsigmoid operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}
hardsigmoid_layer = std::make_shared<HardSigmoid>();
return StatusCode::kSuccess;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/hardswish.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ StatusCode HardSwishLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>
std::shared_ptr<Layer<float>>& hardswish_layer) {
if (!op) {
LOG(ERROR) << "The hardswish operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}
hardswish_layer = std::make_shared<HardSwishLayer>();
return StatusCode::kSuccess;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/linear.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ StatusCode LinearLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>& o
std::shared_ptr<Layer<float>>& linear_layer) {
if (!op) {
LOG(ERROR) << "The linear operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/maxpooling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ StatusCode MaxPoolingLayer::CreateInstance(const std::shared_ptr<RuntimeOperator
std::shared_ptr<Layer<float>>& max_layer) {
if (!op) {
LOG(ERROR) << "The maxpooling operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/relu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ StatusCode ReluLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>& op,
std::shared_ptr<Layer<float>>& relu_layer) {
if (!op) {
LOG(ERROR) << "The relu operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

relu_layer = std::make_shared<ReluLayer>();
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/relu6.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ StatusCode Relu6Layer::CreateInstance(const std::shared_ptr<RuntimeOperator>& op
std::shared_ptr<Layer<float>>& relu_layer) {
if (!op) {
LOG(ERROR) << "The relu6 operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

relu_layer = std::make_shared<Relu6Layer>();
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/sigmoid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ StatusCode SigmoidLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>&
std::shared_ptr<Layer<float>>& sigmoid_layer) {
if (!op) {
LOG(ERROR) << "The sigmoid operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}
sigmoid_layer = std::make_shared<SigmoidLayer>();
return StatusCode::kSuccess;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/silu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ StatusCode SiLULayer::CreateInstance(const std::shared_ptr<RuntimeOperator>& op,
std::shared_ptr<Layer<float>>& silu_layer) {
if (!op) {
LOG(ERROR) << "The SiLU operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}
silu_layer = std::make_shared<SiLULayer>();
return StatusCode::kSuccess;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/softmax.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ StatusCode SoftmaxLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>&
std::shared_ptr<Layer<float>>& softmax_layer) {
if (!op) {
LOG(ERROR) << "The softmax operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/upsample.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ StatusCode UpSampleLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>&
std::shared_ptr<Layer<float>>& upsample_layer) {
if (!op) {
LOG(ERROR) << "The upsample operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/view.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ StatusCode ViewLayer::CreateInstance(const std::shared_ptr<RuntimeOperator>& op,
std::shared_ptr<Layer<float>>& view_layer) {
if (!op) {
LOG(ERROR) << "The view operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& params = op->params;
Expand Down
2 changes: 1 addition & 1 deletion source/layer/details/yolo_detect.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ StatusCode YoloDetectLayer::CreateInstance(const std::shared_ptr<RuntimeOperator
std::shared_ptr<Layer<float>>& yolo_detect_layer) {
if (!op) {
LOG(ERROR) << "The yolo head operator parameter in the layer is null pointer.";
return StatusCode::kParseOperatorNullParam;
return StatusCode::kParseNullOperator;
}

const auto& attrs = op->attribute;
Expand Down

0 comments on commit a44ec8c

Please sign in to comment.