windows下tensorflow两种加载模型的测试数据方法

一、加载多次保存的模型中的某一次模型,而不是latest的一次
global sess
global charcnn
def get_logits_with_value_by_input(start,end):
    x=test_x[start:end]
    global sess
    global charcnn
    logits = sess.run(charcnn.predictions, feed_dict={charcnn.input_x: x, charcnn.dropout_keep_prob: 1})
    real_labels=test_y[start:end]
    real_labels_label=tf.argmax(real_labels,axis=1)
    return logits,real_labels_label



with tf.Graph().as_default():
    session_config=tf.ConfigProto(allow_soft_placement=True,log_device_placement=False)
    sess=tf.Session(config=session_config)
    with tf.Session() as sess:
        charcnn = charCNN(config.l0, config.num_classes, config.model.conv_layers, config.model.fc_layers,
                          l2_reg_lambda=0.0)
         saver = tf.train.Saver()
        #checkpoint_dir = os.path.abspath(os.path.join(out_dir, 'checkpoints'))
         if os.path.exists("./run/1513350504/checkpoints/checkpoint"):    #一定要用‘/’这个反斜杠
            print("Restoring Variables from Checkpoint")
            saver = tf.train.import_meta_graph('./run/1513350504/checkpoints/model-1200.meta')              #导入验证准确率比较高的某个计算图
            saver.restore(sess,'./run/1513350504/checkpoints/model-1200')                                                           #加载模型,不加后缀名                  
        else:
            print("Can't find the checkpoint.going to stop")
        logits,real_labels_label=get_logits_with_value_by_input(0,-1)
        print (logits)
        print (real_labels_label.eval())



二、直接加载checkpoint中最近latest的模型,(不过有时候模型收敛的不好,latest的模型准确率不高)
with tf.Graph().as_default():
    session_config=tf.ConfigProto(allow_soft_placement=True,log_device_placement=False)
    sess=tf.Session(config=session_config)
    with tf.Session() as sess:
        charcnn = charCNN(config.l0, config.num_classes, config.model.conv_layers, config.model.fc_layers,
                          l2_reg_lambda=0.0)
        saver = tf.train.Saver()
        #checkpoint_dir = os.path.abspath(os.path.join(out_dir, 'checkpoints'))
         if os.path.exists("checkpoint"):
            print("Restoring Variables from Checkpoint")
            saver.restore(sess, tf.train.latest_checkpoint('checkpoints/'))               #还是这种反斜杠
        else:
            print("Can't find the checkpoint.going to stop")
        logits,real_labels_label=get_logits_with_value_by_input(0,-1)
        print (logits)
        print (real_labels_label.eval())

你可能感兴趣的:(tensorflow,NLP&Tensorflow)