Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add interp layer supporting nearest and bilinear #155

Merged
merged 1 commit into from
Oct 9, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ ncnn_add_layer(PriorBox)
ncnn_add_layer(ConcatV2)
ncnn_add_layer(SoftmaxV2)
ncnn_add_layer(DetectionOutput)
ncnn_add_layer(Interp)

add_library(ncnn STATIC ${ncnn_SRCS})

Expand Down
2 changes: 1 addition & 1 deletion src/layer/detectionoutput.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -322,4 +322,4 @@ int DetectionOutput::forward(const std::vector<Mat>& bottom_blobs, std::vector<M
return 0;
}

} // namespace ncnn
} // namespace ncnn
136 changes: 136 additions & 0 deletions src/layer/interp.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#include "interp.h"

namespace ncnn {

DEFINE_LAYER_CREATOR(Interp);

Interp::Interp()
{
one_blob_only = true;
}

Interp::~Interp()
{
};

#if NCNN_STDIO
#if NCNN_STRING
int Interp::load_param(FILE *paramfp)
{
int nscan = fscanf(paramfp, " %d %f %f %d %d", &resize_type, &height_scale, &width_scale, &output_height,
&output_width);
if (nscan != 5)
{
fprintf(stderr, "Interp load_param failed %d\n", nscan);
return -1;
}

return 0;
}
#endif // NCNN_STRING
int Interp::load_param_bin(FILE *paramfp)
{
fread(&resize_type, sizeof(int), 1, paramfp);

fread(&height_scale, sizeof(float), 1, paramfp);

fread(&width_scale, sizeof(float), 1, paramfp);

fread(&output_height, sizeof(int), 1, paramfp);

fread(&output_width, sizeof(int), 1, paramfp);

return 0;
}
#endif // NCNN_STDIO

int Interp::load_param(const unsigned char *&mem)
{
resize_type = *(int *) (mem);
mem += 4;

height_scale = *(float *) (mem);
mem += 4;

width_scale = *(float *) (mem);
mem += 4;

output_height = *(int *) (mem);
mem += 4;

output_width = *(int *) (mem);
mem += 4;

return 0;
}

int Interp::forward(const Mat &bottom_blob, Mat &top_blob) const
{
int h = bottom_blob.h;
int w = bottom_blob.w;
int c = bottom_blob.c;
int oh = output_height;
int ow = output_width;
if (ow == 0 || ow == 0)
{
oh = h * height_scale;
ow = w * width_scale;
}
if (oh == h && ow == w)
{
top_blob = bottom_blob;
return 0;
}
top_blob.create(ow, oh, c);
if (top_blob.empty())
return -100;

if (resize_type == 1)//nearest
{
#pragma omp parallel for
for (int q = 0; q < c; ++q)
{
const float *ptr = bottom_blob.channel(q);
float *output_ptr = top_blob.channel(q);
for (int y = 0; y < oh; ++y)
{
const int in_y = std::min((int) (y / height_scale), (h - 1));
for (int x = 0; x < ow; ++x)
{
const int in_x = std::min((int) (x / width_scale), (w - 1));
output_ptr[ow * y + x] = ptr[in_y * w + in_x];
}
}
}
return 0;

}
else if (resize_type == 2)// bilinear
{
resize_bilinear(bottom_blob, top_blob, ow, oh);
return 0;

}
else
{
fprintf(stderr, "unsupported resize type %d %d %d\n", &resize_type, oh, ow);
return -233;
}
}


} // namespace ncnn
49 changes: 49 additions & 0 deletions src/layer/interp.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#ifndef LAYER_INTERP_H
#define LAYER_INTERP_H

#include "layer.h"

namespace ncnn {

class Interp : public Layer
{
public:
Interp();
virtual ~Interp();

#if NCNN_STDIO
#if NCNN_STRING
virtual int load_param(FILE *paramfp);
#endif // NCNN_STRING
virtual int load_param_bin(FILE *paramfp);
#endif // NCNN_STDIO
virtual int load_param(const unsigned char *&mem);

virtual int forward(const Mat &bottom_blob, Mat &top_blob) const;

public:
// param
float width_scale;
float height_scale;
int output_width;
int output_height;
int resize_type;//1:near 2: bilinear
};

} // namespace ncnn

#endif // LAYER_INTERP_H
10 changes: 9 additions & 1 deletion tools/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -382,6 +382,7 @@ message LayerParameter {
optional InfogainLossParameter infogain_loss_param = 116;
optional InnerProductParameter inner_product_param = 117;
optional InputParameter input_param = 143;
optional InterpParameter interp_param = 205;
optional LogParameter log_param = 134;
optional LRNParameter lrn_param = 118;
optional MemoryDataParameter memory_data_param = 119;
Expand Down Expand Up @@ -939,7 +940,14 @@ message InputParameter {
// Define no shape to defer to reshaping manually.
repeated BlobShape shape = 1;
}

message InterpParameter {
optional int32 height = 1 [default = 0]; // Height of output
optional int32 width = 2 [default = 0]; // Width of output
optional int32 zoom_factor = 3 [default = 1]; // zoom factor
optional int32 shrink_factor = 4 [default = 1]; // shrink factor
optional int32 pad_beg = 5 [default = 0]; // padding at begin of input
optional int32 pad_end = 6 [default = 0]; // padding at end of input
}
// Message that stores parameters used by LogLayer
message LogParameter {
// LogLayer computes outputs y = log_base(shift + scale * x), for base > 0.
Expand Down
7 changes: 7 additions & 0 deletions tools/caffe2ncnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

#include "caffe.pb.h"


static inline size_t alignSize(size_t sz, int n)
{
return (sz + n-1) & -n;
Expand Down Expand Up @@ -663,6 +664,12 @@ int main(int argc, char** argv)
fprintf(pp, " -233");
}
}
else if (layer.type() == "Interp")
{
const caffe::InterpParameter& interp_param = layer.interp_param();
fprintf(pp," %d %f %f %d %d",2, static_cast<float>(interp_param.zoom_factor()), \
static_cast<float>(interp_param.zoom_factor()),interp_param.height(),interp_param.width());
}
else if (layer.type() == "LRN")
{
const caffe::LRNParameter& lrn_param = layer.lrn_param();
Expand Down