SoftmaxLayer and SoftmaxwithLossLayer 代码解读

谭晓博
2023-12-01

 

SoftmaxLayer and SoftmaxwithLossLayer 代码解读

Wang Xiao

 


  先来看看 SoftmaxWithLoss 在prototext文件中的定义:

1 layer {
2 name: "loss"
3 type: "SoftmaxWithLoss"
4 bottom: "fc8"
5 bottom: "label"
6 top: "loss"
7 }

 

 

  再看SoftmaxWithLossLayer的.cpp文件:

  

 1 #include <algorithm>
 2 #include <cfloat>
 3 #include <vector>
 4 
 5 #include "caffe/layers/softmax_loss_layer.hpp"
 6 #include "caffe/util/math_functions.hpp"
 7 
 8 namespace caffe {
 9 
10 template <typename Dtype>
11 void SoftmaxWithLossLayer<Dtype>::LayerSetUp(
12     const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
13   LossLayer<Dtype>::LayerSetUp(bottom, top);
14   LayerParameter softmax_param(this->layer_param_);
15   softmax_param.set_type("Softmax");
16   softmax_layer_ = LayerRegistry<Dtype>::CreateLayer(softmax_param);
17   softmax_bottom_vec_.clear();
18   softmax_bottom_vec_.push_back(bottom[0]); // 将bottom[0]存入softmax_bottom_vec_;
19   softmax_top_vec_.clear();
20   softmax_top_vec_.push_back(&prob_);    // 将 prob_ 存入 softmax_top_vec_;
21 softmax_layer_->SetUp(softmax_bottom_vec_, softmax_top_vec_); 
22
23 has_ignore_label_ = // draw the parameter from layer
24 this->layer_param_.loss_param().has_ignore_label();
25 if (has_ignore_label_) {
26 ignore_label_ = this->layer_param_.loss_param().ignore_label();
27 }
28 if (!this->layer_param_.loss_param().has_normalization() &&
29 this->layer_param_.loss_param().has_normalize()) {
30 normalization_ = this->layer_param_.loss_param().normalize() ?
31 LossParameter_NormalizationMode_VALID :
32 LossParameter_NormalizationMode_BATCH_SIZE;
33 } else {
34 normalization_ = this->layer_param_.loss_param().normalization();
35 }
36 }

 

 

 

 


 

  

  接下来是对输入数据进行 reshape 操作: 

  

 1 template <typename Dtype>
 2 void SoftmaxWithLossLayer<Dtype>::Reshape(
 3 const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
 4 LossLayer<Dtype>::Reshape(bottom, top);
 5 softmax_layer_->Reshape(softmax_bottom_vec_, softmax_top_vec_);
 6 softmax_axis_ =
 7 bottom[0]->CanonicalAxisIndex(this->layer_param_.softmax_param().axis());
 8 outer_num_ = bottom[0]->count(0, softmax_axis_);
 9 inner_num_ = bottom[0]->count(softmax_axis_ + 1);
10 CHECK_EQ(outer_num_ * inner_num_, bottom[1]->count())
11 << "Number of labels must match number of predictions; "
12 << "e.g., if softmax axis == 1 and prediction shape is (N, C, H, W), "
13 << "label count (number of labels) must be N*H*W, "
14 << "with integer values in {0, 1, ..., C-1}.";
15 if (top.size() >= 2) {
16 // softmax output
17 top[1]->ReshapeLike(*bottom[0]);
18 }
19 }

 

  

 

转载于:https://www.cnblogs.com/wangxiaocvpr/p/5487558.html

 类似资料: