Permalink
Fetching contributors…
Cannot retrieve contributors at this time
104 lines (90 sloc) 3.21 KB
#include <algorithm>
#include <cfloat>
#include <vector>
#include "caffe/layer.hpp"
#include "caffe/util/math_functions.hpp"
#include "caffe/vision_layers.hpp"
namespace caffe {
template <typename Dtype>
void SoftmaxWithLossLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
LossLayer<Dtype>::LayerSetUp(bottom, top);
softmax_bottom_vec_.clear();
softmax_bottom_vec_.push_back(bottom[0]);
softmax_top_vec_.clear();
softmax_top_vec_.push_back(&prob_);
softmax_layer_->SetUp(softmax_bottom_vec_, &softmax_top_vec_);
}
template <typename Dtype>
void SoftmaxWithLossLayer<Dtype>::Reshape(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
LossLayer<Dtype>::Reshape(bottom, top);
softmax_layer_->Reshape(softmax_bottom_vec_, &softmax_top_vec_);
if (top->size() >= 2) {
// softmax output
(*top)[1]->ReshapeLike(*bottom[0]);
}
}
template <typename Dtype>
void SoftmaxWithLossLayer<Dtype>::Forward_cpu(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
// The forward pass computes the softmax prob values.
softmax_layer_->Forward(softmax_bottom_vec_, &softmax_top_vec_);
const Dtype* prob_data = prob_.cpu_data();
const Dtype* label = bottom[1]->cpu_data();
const Dtype* sample_weight = bottom[2]->cpu_data();
int num = prob_.num();
int dim = prob_.count() / num;
int spatial_dim = prob_.height() * prob_.width();
Dtype loss = 0;
for (int i = 0; i < num; ++i) {
for (int j = 0; j < spatial_dim; j++) {
Dtype w = sample_weight[i * spatial_dim + j];
loss -= w * log(std::max(prob_data[i * dim +
static_cast<int>(label[i * spatial_dim + j]) * spatial_dim + j],
Dtype(FLT_MIN)));
}
}
(*top)[0]->mutable_cpu_data()[0] = loss / num / spatial_dim;
if (top->size() == 2) {
(*top)[1]->ShareData(prob_);
}
}
template <typename Dtype>
void SoftmaxWithLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down,
vector<Blob<Dtype>*>* bottom) {
if (propagate_down[1]) {
LOG(FATAL) << this->type_name()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff();
const Dtype* prob_data = prob_.cpu_data();
caffe_copy(prob_.count(), prob_data, bottom_diff);
const Dtype* label = (*bottom)[1]->cpu_data();
const Dtype* sample_weight = (*bottom)[2]->cpu_data();
int num = prob_.num();
int dim = prob_.count() / num;
int spatial_dim = prob_.height() * prob_.width();
Dtype w;
for (int i = 0; i < num; ++i) {
for (int j = 0; j < spatial_dim; ++j) {
w = sample_weight[i * spatial_dim + j];
bottom_diff[i * dim + static_cast<int>(label[i * spatial_dim + j])
* spatial_dim + j] -= 1;
for (int k = 0; k < dim; ++k) {
bottom_diff[i * dim + k * spatial_dim + j] *= w;
}
}
}
// Scale gradient
const Dtype loss_weight = top[0]->cpu_diff()[0];
caffe_scal(prob_.count(), loss_weight / num / spatial_dim, bottom_diff);
}
}
#ifdef CPU_ONLY
STUB_GPU(SoftmaxWithLossLayer);
#endif
INSTANTIATE_CLASS(SoftmaxWithLossLayer);
} // namespace caffe