**问题描述:**一个子程序中添加两个BN层,但是只用一个bool类型变量赋给 training参数,但是BN后的结果都没有单独命名,导致出错。
报错信息:
Variable model_definition/batch_normalization_5/gamma does #not exist, or was not created with tf.get_variable(). Did you mean to #set reuse=tf.AUTO_REUSE in VarScope?
程序:
def LA_conv_net(input_imgs, part, istraining):
#part是字符串,istraining是bool类型
input_dim = input_imgs.get_shape()[1].value
#第1层卷积,BN,Relu,[3,3,128]
with tf.compat.v1.variable_scope(part+'_conv1') as scope:
conv1_w = truncated_normal_var(name=part+'_conv1_w',shape=[input_dim,3,3,1,128],
dtype=tf.float32, stddev=init_kaiming(input_imgs))
conv1_b = zeros_var(name=part+'_conv1_b', shape=[128],
dtype=tf.float32, b_value=0.0)
conv1 = tf.nn.bias_add(conv3d(input_imgs, conv1_w), conv1_b)
BN_conv1 = tf.layers.batch_normalization(conv1, training=istraining) #
relu_conv1 = tf.nn.relu(BN_conv1) #output [,1,9,9,128]
#第2层卷积
with tf.compat.v1.variable_scope(part+'_conv2') as scope:
...
(其他程序)
...
conv_concat = tf.concat([relu_conv3,relu_conv4,relu_conv5],axis=4)
#再次添加BN层
conv_concat_bn = tf.layers.batch_normalization(conv_concat, training=istraining)
#**此处会报错**:
...
(其他程序)
...
reshaped_output = tf.reshape(tf.squeeze(conv_concat_bn), [-1, num_flat])
return (reshaped_output)
训练和验证程序
with tf.compat.v1.variable_scope('model_definition') as scope:
#申明训练网络模型
train_output = DR_CNN_models(train_img_batch, batch_size, NUM_CLASS, istraining=True)
#use same variables within scope
scope.reuse_variables()
test_output = DR_CNN_models(test_imgs, batch_size, NUM_CLASS, istraining=False
程序在执行验证程序的时候无法执行,报错为:
“Variable model_definition/batch_normalization_5/gamma does #not exist, or was not created with
tf.get_variable(). Did you mean to #set reuse=tf.AUTO_REUSE in VarScope?”
解决办法:
给张量添加名字
conv_concat_bn = tf.layers.batch_normalization(conv_concat, training=istraining, name=part+'_conv_concat_bn')
子程序LA_conv_ne更改为:
def LA_conv_net(input_imgs, part, istraining):
#part是字符串,istraining是bool类型
input_dim = input_imgs.get_shape()[1].value
#第1层卷积,BN,Relu,[3,3,128]
with tf.compat.v1.variable_scope(part+'_conv1') as scope:
conv1_w = truncated_normal_var(name=part+'_conv1_w',shape=[input_dim,3,3,1,128],
dtype=tf.float32, stddev=init_kaiming(input_imgs))
conv1_b = zeros_var(name=part+'_conv1_b', shape=[128],
dtype=tf.float32, b_value=0.0)
conv1 = tf.nn.bias_add(conv3d(input_imgs, conv1_w), conv1_b)
BN_conv1 = tf.layers.batch_normalization(conv1, training=istraining) #
relu_conv1 = tf.nn.relu(BN_conv1) #output [,1,9,9,128]
#第2层卷积
with tf.compat.v1.variable_scope(part+'_conv2') as scope:
...
(其他程序)
...
conv_concat = tf.concat([relu_conv3,relu_conv4,relu_conv5],axis=4)
#再次添加BN层
conv_concat_bn = tf.layers.batch_normalization(conv_concat, training=istraining, name=part+'_conv_concat_bn') #添加变量名
...
(其他程序)
...
reshaped_output = tf.reshape(tf.squeeze(conv_concat_bn), [-1, num_flat])
return (reshaped_output)