FCN源码解读之net.py

转载自 https://blog.csdn.net/qq_21368481/article/details/80196819

开篇先从FCN的net.py开始,net.py用于构建网络,即运行net.py可以直接生成所需要的train.prototxt和val.prototxt。

即当需要修改网络时,不需要自己逐个修改train.prototxt和val.prototxt,只需要修改net.py中的相应内容,运行后即可重新全盘生成所修改后的网络。

以下先拿fcn.berkeleyvision.org-master下的voc-fcn32s中的net.py文件进行分析,其源码如下:


   
     
     
     
     
  1. import caffe
  2. from caffe import layers as L, params as P
  3. from caffe.coord_map import crop
  4. def conv_relu(bottom, nout, ks=3, stride=1, pad=1):
  5. conv = L.Convolution(bottom, kernel_size=ks, stride=stride,
  6. num_output=nout, pad=pad,
  7. param=[dict(lr_mult= 1, decay_mult= 1), dict(lr_mult= 2, decay_mult= 0)])
  8. return conv, L.ReLU(conv, in_place= True)
  9. def max_pool(bottom, ks=2, stride=2):
  10. return L.Pooling(bottom, pool=P.Pooling.MAX, kernel_size=ks, stride=stride)
  11. def fcn(split):
  12. n = caffe.NetSpec()
  13. pydata_params = dict(split=split, mean=( 104.00699, 116.66877, 122.67892),
  14. seed= 1337)
  15. if split == 'train':
  16. pydata_params[ 'sbdd_dir'] = '../data/sbdd/dataset'
  17. pylayer = 'SBDDSegDataLayer'
  18. else:
  19. pydata_params[ 'voc_dir'] = '../data/pascal/VOC2011'
  20. pylayer = 'VOCSegDataLayer'
  21. n.data, n.label = L.Python(module= 'voc_layers', layer=pylayer,
  22. ntop= 2, param_str=str(pydata_params))
  23. # the base net
  24. n.conv1_1, n.relu1_1 = conv_relu(n.data, 64, pad= 100)
  25. n.conv1_2, n.relu1_2 = conv_relu(n.relu1_1, 64)
  26. n.pool1 = max_pool(n.relu1_2)
  27. n.conv2_1, n.relu2_1 = conv_relu(n.pool1, 128)
  28. n.conv2_2, n.relu2_2 = conv_relu(n.relu2_1, 128)
  29. n.pool2 = max_pool(n.relu2_2)
  30. n.conv3_1, n.relu3_1 = conv_relu(n.pool2, 256)
  31. n.conv3_2, n.relu3_2 = conv_relu(n.relu3_1, 256)
  32. n.conv3_3, n.relu3_3 = conv_relu(n.relu3_2, 256)
  33. n.pool3 = max_pool(n.relu3_3)
  34. n.conv4_1, n.relu4_1 = conv_relu(n.pool3, 512)
  35. n.conv4_2, n.relu4_2 = conv_relu(n.relu4_1, 512)
  36. n.conv4_3, n.relu4_3 = conv_relu(n.relu4_2, 512)
  37. n.pool4 = max_pool(n.relu4_3)
  38. n.conv5_1, n.relu5_1 = conv_relu(n.pool4, 512)
  39. n.conv5_2, n.relu5_2 = conv_relu(n.relu5_1, 512)
  40. n.conv5_3, n.relu5_3 = conv_relu(n.relu5_2, 512)
  41. n.pool5 = max_pool(n.relu5_3)
  42. # fully conv
  43. n.fc6, n.relu6 = conv_relu(n.pool5, 4096, ks= 7, pad= 0)
  44. n.drop6 = L.Dropout(n.relu6, dropout_ratio= 0.5, in_place= True)
  45. n. fc7, n.relu7 = conv_relu(n.drop6, 4096, ks= 1, pad= 0)
  46. n.drop7 = L.Dropout(n.relu7, dropout_ratio= 0.5, in_place= True)
  47. n.score_fr = L.Convolution(n.drop7, num_output= 21, kernel_size= 1, pad= 0,
  48. param=[dict(lr_mult= 1, decay_mult= 1), dict(lr_mult= 2, decay_mult= 0)])
  49. n.upscore = L.Deconvolution(n.score_fr,
  50. convolution_param=dict(num_output= 21, kernel_size= 64, stride= 32,
  51. bias_term= False),
  52. param=[dict(lr_mult= 0)])
  53. n.score = crop(n.upscore, n.data)
  54. n.loss = L.SoftmaxWithLoss(n.score, n.label,
  55. loss_param=dict(normalize= False, ignore_label= 255))
  56. return n.to_proto()
  57. def make_net():
  58. with open( 'train.prototxt', 'w') as f:
  59. f.write(str(fcn( 'train')))
  60. with open( 'val.prototxt', 'w') as f:
  61. f.write(str(fcn( 'seg11valid')))
  62. if __name__ == '__main__':
  63. make_net()
详细代码解读如下

1.函数conv_relu()

调用caffe中用于定义卷积层和ReLU激活函数层的Convolution和ReLU函数


   
     
     
     
     
  1. #用于定义卷积层以及该层的激活函数层(均采用ReLU激活函数)
  2. '''
  3. 参数bottum:即该层的上一层的输出
  4. 参数nout:该层输出的数目(对应于卷积层,即输出的特征图数目)
  5. 参数ks:卷积核的大小
  6. 参数stride:步长
  7. 参数pad:填充数
  8. '''
  9. def conv_relu(bottom, nout, ks=3, stride=1, pad=1):
  10.     conv = L.Convolution(bottom, kernel_size=ks, stride=stride,
  11.         num_output=nout, pad=pad,
  12.         param=[dict(lr_mult= 1, decay_mult= 1), dict(lr_mult= 2, decay_mult= 0)])
  13.     #前者为权重W的学习速率和衰减系数设置;后者为偏置b的学习速率(偏置不设置衰减系数)设置
  14.     #lr_mult为学习速率倍增参数(即最终的学习速率需乘上solver.prototxt中的base_lr)
  15.     #decay_mult为权重衰减系数倍增参数(即最终的权重衰减系数需乘上solver.prototxt中的weight_decay)
  16.     #返回该层及其激活层(激活层的输入和输出都为该卷积层的输出)
  17.     return conv, L.ReLU(conv, in_place= True)

2.函数max_pool()

调用caffe中用于定义最大池化层的Pooling函数(Pooling.MAX即表示最大池化)


   
     
     
     
     
  1. #用于定义池化层(均采用最大池化)
  2. def max_pool(bottom, ks=2, stride=2):
  3. return L.Pooling(bottom, pool=P.Pooling.MAX, kernel_size=ks, stride=stride)

3.函数fcn()


   
     
     
     
     
  1. #用于生成train.prototxt和val.prototxt的函数
  2. #输入参数split为'train'或'val'(前者为训练,后者为测试)
  3. def fcn(split):
  4.     n = caffe.NetSpec() #使用pycaffe定义Net
  5.     pydata_params = dict(split=split, mean=( 104.00699, 116.66877, 122.67892),
  6.             seed= 1337)
  7.     if split == 'train':
  8.         pydata_params[ 'sbdd_dir'] = '../data/sbdd/dataset' #对应训练集的路径
  9.         pylayer = 'SBDDSegDataLayer' #类的名称,详见voc_layers.py
  10.     else:
  11.         pydata_params[ 'voc_dir'] = '../data/pascal/VOC2011' #对应测试或验证集的路径
  12.         pylayer = 'VOCSegDataLayer'
  13.     '''
  14.     输入参数
  15.     参数module:即模型名称,一般对应自己所写的一个.py文件(用于实现自己想要的该层的功能,这里对应voc_layers.py文件)
  16.     参数layer:即所定义的类的名称
  17.     参数ntop:即该层的输出数目(一般而言第一层输出的是data和label)
  18.     参数param_str:即该层所需要的各项参数
  19.     输出参数
  20.     输出n.data和n.label,即对应ntop=2
  21.     '''
  22.     n.data, n.label = L.Python(module= 'voc_layers', layer=pylayer,
  23.             ntop= 2, param_str=str(pydata_params))
  24.     # the base net
  25.     n.conv1_1, n.relu1_1 = conv_relu(n.data, 64, pad= 100) #conv1_1,1_1卷积层(激活函数为ReLU)
  26.     n.conv1_2, n.relu1_2 = conv_relu(n.relu1_1, 64) #conv1_2,1_2卷积层(激活函数为ReLU)
  27.     n.pool1 = max_pool(n.relu1_2) #pool1,第一个最大池化层

此处对应.prototxt中的内容如下:


   
     
     
     
     
  1. layer {
  2. name: "conv1_1"
  3. type: "Convolution"
  4. bottom: "data"
  5. top: "conv1_1"
  6. param {
  7. lr_mult: 1.0
  8. decay_mult: 1.0
  9. }
  10. param {
  11. lr_mult: 2.0
  12. decay_mult: 0.0
  13. }
  14. convolution_param {
  15. num_output: 64
  16. pad: 100
  17. kernel_size: 3
  18. stride: 1
  19. }
  20. }
  21. layer {
  22. name: "relu1_1"
  23. type: "ReLU"
  24. bottom: "conv1_1"
  25. top: "conv1_1"
  26. }
  27. layer {
  28. name: "conv1_2"
  29. type: "Convolution"
  30. bottom: "conv1_1"
  31. top: "conv1_2"
  32. param {
  33. lr_mult: 1.0
  34. decay_mult: 1.0
  35. }
  36. param {
  37. lr_mult: 2.0
  38. decay_mult: 0.0
  39. }
  40. convolution_param {
  41. num_output: 64
  42. pad: 1
  43. kernel_size: 3
  44. stride: 1
  45. }
  46. }
  47. layer {
  48. name: "relu1_2"
  49. type: "ReLU"
  50. bottom: "conv1_2"
  51. top: "conv1_2"
  52. }
  53. layer {
  54. name: "pool1"
  55. type: "Pooling"
  56. bottom: "conv1_2"
  57. top: "pool1"
  58. pooling_param {
  59. pool: MAX
  60. kernel_size: 2
  61. stride: 2
  62. }
  63. }

这里第一个卷积层conv1_1采用pad=100进行填充是为了防止输入图片过小,也是FCN可以输入任意大小图片进行训练和测试的原因,具体分析如下:

从程序中可以看出conv1到conv5各卷积层的卷积核大小均为k=3,填充均为p=1(除了conv1_1),步长均为s=1,即卷积层的输出大小和输入大小是一样的,即(此处拿height这一维举例子,且假设conv1_1的填充也为1,同时假设输出的大小可以为小数,即去掉向下取整操作)

hout=hink+2×ps+1=hin3+2×11+1=hinhout=hin−k+2×ps+1=hin−3+2×11+1=hin

其中的40就是输出所多余的部分,和conv1_1层的填充数pad有关,而score层的目的就是要裁剪掉这多余部分,怎么裁剪,即对称裁剪(取中间部分,原因和填充是对称填充有关),故offset为19,即从每行每列的第20个像素点开始裁剪,取原图像大小为裁剪步长,得到和原图一样大小的输出(为何是19,不是20,这个也是因为caffe是用C++写的,数组的索引号从0开始),如下示意图:

FCN源码解读之net.py_第1张图片

要是没看懂coord_map.py文件的原理,可以自己改一下conv1_1的pad,然后在终端输入一下语句运行修改过的net.py文件,看看score这一层的offset的变化,可以间接来理解conv1_1的pad以及这个offset的作用。

python net.py
   
     
     
     
     
FCN源码解读之net.py_第2张图片

   
     
     
     
     
  1.     #损失函数采用softmax
  2.     n.loss = L.SoftmaxWithLoss(n.score, n.label,
  3.             loss_param=dict(normalize= False, ignore_label= 255))
  4. #忽略255这个像素值
  5.     return n.to_proto() #返回生成的整个网络

4.主函数及其入口


   
     
     
     
     
  1. #创建train.prototxt和val.prototxt并写入相应的网络
  2. def make_net():
  3. with open( 'train.prototxt', 'w') as f:
  4. f.write(str(fcn( 'train')))
  5. with open( 'val.prototxt', 'w') as f:
  6. f.write(str(fcn( 'seg11valid')))
  7. #主函数入口
  8. if __name__ == '__main__':
  9. make_net()

以下为部分训练日志:

大家可以对照每一层的核的大小、步长和填充数来计算每一层的输出大小,然后可以和caffe的输出结果对照一下,需要说明的一点是caffe中的卷积层计算输出大小时采用向下取整的方式,但其池化层计算输出大小采用向上取整,具体原因暂未知,还没深入看代码,但个人感觉是互补吧,将卷积层丢失的在池化层补回来。故在pool2层caffe的输出是144*175,而不是144*174(174是概念里都是向下取整,但caffe并不是这么做的)。

具体可参见caffe/src/caffe/layers下的conv_layer.cpp和pooling_layer.cpp文件,在此截取部分代码,方便查看:

(1)conv_layer.cpp(采用除号/向下取整)


   
     
     
     
     
  1. template < typename Dtype>
  2. void ConvolutionLayer::compute_output_shape() {
  3. const int* kernel_shape_data = this->kernel_shape_.cpu_data();
  4. const int* stride_data = this->stride_.cpu_data();
  5. const int* pad_data = this->pad_.cpu_data();
  6. const int* dilation_data = this->dilation_.cpu_data();
  7. this->output_shape_.clear();
  8. for ( int i = 0; i < this->num_spatial_axes_; ++i) {
  9. // i + 1 to skip channel axis
  10. const int input_dim = this->input_shape(i + 1);
  11. const int kernel_extent = dilation_data[i] * (kernel_shape_data[i] - 1) + 1;
  12. const int output_dim = (input_dim + 2 * pad_data[i] - kernel_extent)
  13. / stride_data[i] + 1;
  14. this->output_shape_.push_back(output_dim);
  15. }
  16. }

(2)pooling_layer.cpp(采用ceil函数向上取整)


   
     
     
     
     
  1. template < typename Dtype>
  2. void PoolingLayer::Reshape( const vector*>& bottom,
  3. const vector*>& top) {
  4. CHECK_EQ( 4, bottom[ 0]->num_axes()) << "Input must have 4 axes, "
  5. << "corresponding to (num, channels, height, width)";
  6. channels_ = bottom[ 0]->channels();
  7. height_ = bottom[ 0]->height();
  8. width_ = bottom[ 0]->width();
  9. if (global_pooling_) {
  10. kernel_h_ = bottom[ 0]->height();
  11. kernel_w_ = bottom[ 0]->width();
  12. }
  13. pooled_height_ = static_cast< int>( ceil( static_cast< float>(
  14. height_ + 2 * pad_h_ - kernel_h_) / stride_h_)) + 1;
  15. pooled_width_ = static_cast< int>( ceil( static_cast< float>(
  16. width_ + 2 * pad_w_ - kernel_w_) / stride_w_)) + 1;
  17. if (pad_h_ || pad_w_) {
  18. // If we have padding, ensure that the last pooling starts strictly
  19. // inside the image (instead of at the padding); otherwise clip the last.
  20. if ((pooled_height_ - 1) * stride_h_ >= height_ + pad_h_) {
  21. --pooled_height_;
  22. }
  23. if ((pooled_width_ - 1) * stride_w_ >= width_ + pad_w_) {
  24. --pooled_width_;
  25. }
  26. CHECK_LT((pooled_height_ - 1) * stride_h_, height_ + pad_h_);
  27. CHECK_LT((pooled_width_ - 1) * stride_w_, width_ + pad_w_);
  28. }
  29. top[ 0]->Reshape(bottom[ 0]->num(), channels_, pooled_height_,
  30. pooled_width_);
  31. if (top.size() > 1) {
  32. top[ 1]->ReshapeLike(*top[ 0]);
  33. }
  34. // If max pooling, we will initialize the vector index part.
  35. if ( this->layer_param_.pooling_param().pool() ==
  36. PoolingParameter_PoolMethod_MAX && top.size() == 1) {
  37. max_idx_.Reshape(bottom[ 0]->num(), channels_, pooled_height_,
  38. pooled_width_);
  39. }
  40. // If stochastic pooling, we will initialize the random index part.
  41. if ( this->layer_param_.pooling_param().pool() ==
  42. PoolingParameter_PoolMethod_STOCHASTIC) {
  43. rand_idx_.Reshape(bottom[ 0]->num(), channels_, pooled_height_,
  44. pooled_width_);
  45. }
  46. }

具体的输出日志:


   
     
     
     
     
  1. I0505 15: 40: 16.159765 5317 layer_factory.hpp: 77] Creating layer data
  2. I0505 15: 40: 16.174499 5317 net.cpp: 84] Creating Layer data
  3. I0505 15: 40: 16.174676 5317 net.cpp: 380] data -> data
  4. I0505 15: 40: 16.174806 5317 net.cpp: 380] data -> label
  5. I0505 15: 40: 16.330996 5317 net.cpp: 122] Setting up data
  6. I0505 15: 40: 16.331058 5317 net.cpp: 129] Top shape: 1 3 375 500 ( 562500)
  7. I0505 15: 40: 16.331099 5317 net.cpp: 129] Top shape: 1 1 375 500 ( 187500)
  8. I0505 15: 40: 16.331130 5317 net.cpp: 137] Memory required for data: 3000000
  9. I0505 15: 40: 16.331173 5317 layer_factory.hpp: 77] Creating layer data_data_0_split
  10. I0505 15: 40: 16.331220 5317 net.cpp: 84] Creating Layer data_data_0_split
  11. I0505 15: 40: 16.331256 5317 net.cpp: 406] data_data_0_split <- data
  12. I0505 15: 40: 16.331293 5317 net.cpp: 380] data_data_0_split -> data_data_0_split_0
  13. I0505 15: 40: 16.331348 5317 net.cpp: 380] data_data_0_split -> data_data_0_split_1
  14. I0505 15: 40: 16.331512 5317 net.cpp: 122] Setting up data_data_0_split
  15. I0505 15: 40: 16.331542 5317 net.cpp: 129] Top shape: 1 3 375 500 ( 562500)
  16. I0505 15: 40: 16.331569 5317 net.cpp: 129] Top shape: 1 3 375 500 ( 562500)
  17. I0505 15: 40: 16.331596 5317 net.cpp: 137] Memory required for data: 7500000
  18. I0505 15: 40: 16.331619 5317 layer_factory.hpp: 77] Creating layer conv1_1
  19. I0505 15: 40: 16.331663 5317 net.cpp: 84] Creating Layer conv1_1
  20. I0505 15: 40: 16.331687 5317 net.cpp: 406] conv1_1 <- data_data_0_split_0
  21. I0505 15: 40: 16.331720 5317 net.cpp: 380] conv1_1 -> conv1_1
  22. I0505 15: 40: 17.840531 5317 net.cpp: 122] Setting up conv1_1
  23. I0505 15: 40: 17.840585 5317 net.cpp: 129] Top shape: 1 64 573 698 ( 25597056)
  24. I0505 15: 40: 17.840610 5317 net.cpp: 137] Memory required for data: 109888224
  25. I0505 15: 40: 17.840651 5317 layer_factory.hpp: 77] Creating layer relu1_1
  26. I0505 15: 40: 17.840685 5317 net.cpp: 84] Creating Layer relu1_1
  27. I0505 15: 40: 17.840703 5317 net.cpp: 406] relu1_1 <- conv1_1
  28. I0505 15: 40: 17.840723 5317 net.cpp: 367] relu1_1 -> conv1_1 ( in-place)
  29. I0505 15: 40: 17.842114 5317 net.cpp: 122] Setting up relu1_1
  30. I0505 15: 40: 17.842149 5317 net.cpp: 129] Top shape: 1 64 573 698 ( 25597056)
  31. I0505 15: 40: 17.842166 5317 net.cpp: 137] Memory required for data: 212276448
  32. I0505 15: 40: 17.842182 5317 layer_factory.hpp: 77] Creating layer conv1_2
  33. I0505 15: 40: 17.842221 5317 net.cpp: 84] Creating Layer conv1_2
  34. I0505 15: 40: 17.842236 5317 net.cpp: 406] conv1_2 <- conv1_1
  35. I0505 15: 40: 17.842260 5317 net.cpp: 380] conv1_2 -> conv1_2
  36. I0505 15: 40: 17.847681 5317 net.cpp: 122] Setting up conv1_2
  37. I0505 15: 40: 17.847723 5317 net.cpp: 129] Top shape: 1 64 573 698 ( 25597056)
  38. I0505 15: 40: 17.847745 5317 net.cpp: 137] Memory required for data: 314664672
  39. I0505 15: 40: 17.847779 5317 layer_factory.hpp: 77] Creating layer relu1_2
  40. I0505 15: 40: 17.847810 5317 net.cpp: 84] Creating Layer relu1_2
  41. I0505 15: 40: 17.847826 5317 net.cpp: 406] relu1_2 <- conv1_2
  42. I0505 15: 40: 17.847846 5317 net.cpp: 367] relu1_2 -> conv1_2 ( in-place)
  43. I0505 15: 40: 17.849138 5317 net.cpp: 122] Setting up relu1_2
  44. I0505 15: 40: 17.849169 5317 net.cpp: 129] Top shape: 1 64 573 698 ( 25597056)
  45. I0505 15: 40: 17.849186 5317 net.cpp: 137] Memory required for data: 417052896
  46. I0505 15: 40: 17.849202 5317 layer_factory.hpp: 77] Creating layer pool1
  47. I0505 15: 40: 17.849226 5317 net.cpp: 84] Creating Layer pool1
  48. I0505 15: 40: 17.849241 5317 net.cpp: 406] pool1 <- conv1_2
  49. I0505 15: 40: 17.849261 5317 net.cpp: 380] pool1 -> pool1
  50. I0505 15: 40: 17.849390 5317 net.cpp: 122] Setting up pool1
  51. I0505 15: 40: 17.849406 5317 net.cpp: 129] Top shape: 1 64 287 349 ( 6410432)
  52. I0505 15: 40: 17.849421 5317 net.cpp: 137] Memory required for data: 442694624
  53. I0505 15: 40: 17.849434 5317 layer_factory.hpp: 77] Creating layer conv2_1
  54. I0505 15: 40: 17.849462 5317 net.cpp: 84] Creating Layer conv2_1
  55. I0505 15: 40: 17.849475 5317 net.cpp: 406] conv2_1 <- pool1
  56. I0505 15: 40: 17.849496 5317 net.cpp: 380] conv2_1 -> conv2_1
  57. I0505 15: 40: 17.853972 5317 net.cpp: 122] Setting up conv2_1
  58. I0505 15: 40: 17.854018 5317 net.cpp: 129] Top shape: 1 128 287 349 ( 12820864)
  59. I0505 15: 40: 17.854041 5317 net.cpp: 137] Memory required for data: 493978080
  60. I0505 15: 40: 17.854084 5317 layer_factory.hpp: 77] Creating layer relu2_1
  61. I0505 15: 40: 17.854116 5317 net.cpp: 84] Creating Layer relu2_1
  62. I0505 15: 40: 17.854133 5317 net.cpp: 406] relu2_1 <- conv2_1
  63. I0505 15: 40: 17.854153 5317 net.cpp: 367] relu2_1 -> conv2_1 ( in-place)
  64. I0505 15: 40: 17.855494 5317 net.cpp: 122] Setting up relu2_1
  65. I0505 15: 40: 17.855537 5317 net.cpp: 129] Top shape: 1 128 287 349 ( 12820864)
  66. I0505 15: 40: 17.855561 5317 net.cpp: 137] Memory required for data: 545261536
  67. I0505 15: 40: 17.855579 5317 layer_factory.hpp: 77] Creating layer conv2_2
  68. I0505 15: 40: 17.855623 5317 net.cpp: 84] Creating Layer conv2_2
  69. I0505 15: 40: 17.855640 5317 net.cpp: 406] conv2_2 <- conv2_1
  70. I0505 15: 40: 17.855669 5317 net.cpp: 380] conv2_2 -> conv2_2
  71. I0505 15: 40: 17.861979 5317 net.cpp: 122] Setting up conv2_2
  72. I0505 15: 40: 17.862028 5317 net.cpp: 129] Top shape: 1 128 287 349 ( 12820864)
  73. I0505 15: 40: 17.862051 5317 net.cpp: 137] Memory required for data: 596544992
  74. I0505 15: 40: 17.862080 5317 layer_factory.hpp: 77] Creating layer relu2_2
  75. I0505 15: 40: 17.862112 5317 net.cpp: 84] Creating Layer relu2_2
  76. I0505 15: 40: 17.862129 5317 net.cpp: 406] relu2_2 <- conv2_2
  77. I0505 15: 40: 17.862150 5317 net.cpp: 367] relu2_2 -> conv2_2 ( in-place)
  78. I0505 15: 40: 17.863425 5317 net.cpp: 122] Setting up relu2_2
  79. I0505 15: 40: 17.863457 5317 net.cpp: 129] Top shape: 1 128 287 349 ( 12820864)
  80. I0505 15: 40: 17.863476 5317 net.cpp: 137] Memory required for data: 647828448
  81. I0505 15: 40: 17.863492 5317 layer_factory.hpp: 77] Creating layer pool2
  82. I0505 15: 40: 17.863514 5317 net.cpp: 84] Creating Layer pool2
  83. I0505 15: 40: 17.863529 5317 net.cpp: 406] pool2 <- conv2_2
  84. I0505 15: 40: 17.863554 5317 net.cpp: 380] pool2 -> pool2
  85. I0505 15: 40: 17.863689 5317 net.cpp: 122] Setting up pool2
  86. I0505 15: 40: 17.863708 5317 net.cpp: 129] Top shape: 1 128 144 175 ( 3225600)
  87. I0505 15: 40: 17.863723 5317 net.cpp: 137] Memory required for data: 660730848
  88. I0505 15: 40: 17.863735 5317 layer_factory.hpp: 77] Creating layer conv3_1
  89. I0505 15: 40: 17.863765 5317 net.cpp: 84] Creating Layer conv3_1
  90. I0505 15: 40: 17.863780 5317 net.cpp: 406] conv3_1 <- pool2
  91. I0505 15: 40: 17.863798 5317 net.cpp: 380] conv3_1 -> conv3_1
  92. I0505 15: 40: 17.869124 5317 net.cpp: 122] Setting up conv3_1
  93. I0505 15: 40: 17.869174 5317 net.cpp: 129] Top shape: 1 256 144 175 ( 6451200)
  94. I0505 15: 40: 17.869195 5317 net.cpp: 137] Memory required for data: 686535648
  95. I0505 15: 40: 17.869246 5317 layer_factory.hpp: 77] Creating layer relu3_1
  96. I0505 15: 40: 17.869279 5317 net.cpp: 84] Creating Layer relu3_1
  97. I0505 15: 40: 17.869299 5317 net.cpp: 406] relu3_1 <- conv3_1
  98. I0505 15: 40: 17.869324 5317 net.cpp: 367] relu3_1 -> conv3_1 ( in-place)
  99. I0505 15: 40: 17.870867 5317 net.cpp: 122] Setting up relu3_1
  100. I0505 15: 40: 17.870905 5317 net.cpp: 129] Top shape: 1 256 144 175 ( 6451200)
  101. I0505 15: 40: 17.870934 5317 net.cpp: 137] Memory required for data: 712340448
  102. I0505 15: 40: 17.870950 5317 layer_factory.hpp: 77] Creating layer conv3_2
  103. I0505 15: 40: 17.870985 5317 net.cpp: 84] Creating Layer conv3_2
  104. I0505 15: 40: 17.871001 5317 net.cpp: 406] conv3_2 <- conv3_1
  105. I0505 15: 40: 17.871023 5317 net.cpp: 380] conv3_2 -> conv3_2
  106. I0505 15: 40: 17.877616 5317 net.cpp: 122] Setting up conv3_2
  107. I0505 15: 40: 17.877665 5317 net.cpp: 129] Top shape: 1 256 144 175 ( 6451200)
  108. I0505 15: 40: 17.877687 5317 net.cpp: 137] Memory required for data: 738145248
  109. I0505 15: 40: 17.877713 5317 layer_factory.hpp: 77] Creating layer relu3_2
  110. I0505 15: 40: 17.877748 5317 net.cpp: 84] Creating Layer relu3_2
  111. I0505 15: 40: 17.877765 5317 net.cpp: 406] relu3_2 <- conv3_2
  112. I0505 15: 40: 17.877785 5317 net.cpp: 367] relu3_2 -> conv3_2 ( in-place)
  113. I0505 15: 40: 17.879271 5317 net.cpp: 122] Setting up relu3_2
  114. I0505 15: 40: 17.879315 5317 net.cpp: 129] Top shape: 1 256 144 175 ( 6451200)
  115. I0505 15: 40: 17.879339 5317 net.cpp: 137] Memory required for data: 763950048
  116. I0505 15: 40: 17.879357 5317 layer_factory.hpp: 77] Creating layer conv3_3
  117. I0505 15: 40: 17.879406 5317 net.cpp: 84] Creating Layer conv3_3
  118. I0505 15: 40: 17.879428 5317 net.cpp: 406] conv3_3 <- conv3_2
  119. I0505 15: 40: 17.879454 5317 net.cpp: 380] conv3_3 -> conv3_3
  120. I0505 15: 40: 17.887154 5317 net.cpp: 122] Setting up conv3_3
  121. I0505 15: 40: 17.887202 5317 net.cpp: 129] Top shape: 1 256 144 175 ( 6451200)
  122. I0505 15: 40: 17.887228 5317 net.cpp: 137] Memory required for data: 789754848
  123. I0505 15: 40: 17.887259 5317 layer_factory.hpp: 77] Creating layer relu3_3
  124. I0505 15: 40: 17.887291 5317 net.cpp: 84] Creating Layer relu3_3
  125. I0505 15: 40: 17.887310 5317 net.cpp: 406] relu3_3 <- conv3_3
  126. I0505 15: 40: 17.887331 5317 net.cpp: 367] relu3_3 -> conv3_3 ( in-place)
  127. I0505 15: 40: 17.888519 5317 net.cpp: 122] Setting up relu3_3
  128. I0505 15: 40: 17.888551 5317 net.cpp: 129] Top shape: 1 256 144 175 ( 6451200)
  129. I0505 15: 40: 17.888572 5317 net.cpp: 137] Memory required for data: 815559648
  130. I0505 15: 40: 17.888586 5317 layer_factory.hpp: 77] Creating layer pool3
  131. I0505 15: 40: 17.888613 5317 net.cpp: 84] Creating Layer pool3
  132. I0505 15: 40: 17.888628 5317 net.cpp: 406] pool3 <- conv3_3
  133. I0505 15: 40: 17.888649 5317 net.cpp: 380] pool3 -> pool3
  134. I0505 15: 40: 17.888797 5317 net.cpp: 122] Setting up pool3
  135. I0505 15: 40: 17.888816 5317 net.cpp: 129] Top shape: 1 256 72 88 ( 1622016)
  136. I0505 15: 40: 17.888833 5317 net.cpp: 137] Memory required for data: 822047712
  137. I0505 15: 40: 17.888845 5317 layer_factory.hpp: 77] Creating layer conv4_1
  138. I0505 15: 40: 17.888873 5317 net.cpp: 84] Creating Layer conv4_1
  139. I0505 15: 40: 17.888887 5317 net.cpp: 406] conv4_1 <- pool3
  140. I0505 15: 40: 17.888906 5317 net.cpp: 380] conv4_1 -> conv4_1
  141. I0505 15: 40: 17.897431 5317 net.cpp: 122] Setting up conv4_1
  142. I0505 15: 40: 17.897486 5317 net.cpp: 129] Top shape: 1 512 72 88 ( 3244032)
  143. I0505 15: 40: 17.897514 5317 net.cpp: 137] Memory required for data: 835023840
  144. I0505 15: 40: 17.897552 5317 layer_factory.hpp: 77] Creating layer relu4_1
  145. I0505 15: 40: 17.897588 5317 net.cpp: 84] Creating Layer relu4_1
  146. I0505 15: 40: 17.897605 5317 net.cpp: 406] relu4_1 <- conv4_1
  147. I0505 15: 40: 17.897629 5317 net.cpp: 367] relu4_1 -> conv4_1 ( in-place)
  148. I0505 15: 40: 17.899286 5317 net.cpp: 122] Setting up relu4_1
  149. I0505 15: 40: 17.899335 5317 net.cpp: 129] Top shape: 1 512 72 88 ( 3244032)
  150. I0505 15: 40: 17.899360 5317 net.cpp: 137] Memory required for data: 847999968
  151. I0505 15: 40: 17.899381 5317 layer_factory.hpp: 77] Creating layer conv4_2
  152. I0505 15: 40: 17.899421 5317 net.cpp: 84] Creating Layer conv4_2
  153. I0505 15: 40: 17.899440 5317 net.cpp: 406] conv4_2 <- conv4_1
  154. I0505 15: 40: 17.899468 5317 net.cpp: 380] conv4_2 -> conv4_2
  155. I0505 15: 40: 17.911787 5317 net.cpp: 122] Setting up conv4_2
  156. I0505 15: 40: 17.911835 5317 net.cpp: 129] Top shape: 1 512 72 88 ( 3244032)
  157. I0505 15: 40: 17.911859 5317 net.cpp: 137] Memory required for data: 860976096
  158. I0505 15: 40: 17.911897 5317 layer_factory.hpp: 77] Creating layer relu4_2
  159. I0505 15: 40: 17.911931 5317 net.cpp: 84] Creating Layer relu4_2
  160. I0505 15: 40: 17.911952 5317 net.cpp: 406] relu4_2 <- conv4_2
  161. I0505 15: 40: 17.911975 5317 net.cpp: 367] relu4_2 -> conv4_2 ( in-place)
  162. I0505 15: 40: 17.913126 5317 net.cpp: 122] Setting up relu4_2
  163. I0505 15: 40: 17.913168 5317 net.cpp: 129] Top shape: 1 512 72 88 ( 3244032)
  164. I0505 15: 40: 17.913190 5317 net.cpp: 137] Memory required for data: 873952224
  165. I0505 15: 40: 17.913206 5317 layer_factory.hpp: 77] Creating layer conv4_3
  166. I0505 15: 40: 17.913239 5317 net.cpp: 84] Creating Layer conv4_3
  167. I0505 15: 40: 17.913257 5317 net.cpp: 406] conv4_3 <- conv4_2
  168. I0505 15: 40: 17.913282 5317 net.cpp: 380] conv4_3 -> conv4_3
  169. I0505 15: 40: 17.925925 5317 net.cpp: 122] Setting up conv4_3
  170. I0505 15: 40: 17.925977 5317 net.cpp: 129] Top shape: 1 512 72 88 ( 3244032)
  171. I0505 15: 40: 17.926003 5317 net.cpp: 137] Memory required for data: 886928352
  172. I0505 15: 40: 17.926033 5317 layer_factory.hpp:

你可能感兴趣的:(Deep,Learning,FCN,caffe)