mnist 多层感知机 及 模型保存和重新加载

前一篇文章中写了mnist的两种训练方法,今天尝试自己搭建网络。这次,搭建了一个只含两层隐层和一层dropout层

本模型训练速度很快,且训练正确率达到了98%以上。

此外,本代码中还包含了保存模型和再次导入模型的内容。

具体代码如下:

# -*- coding: utf-8 -*-
"""
Created on Thu Aug 16 10:02:18 2018

@author: czx
"""

import tensorflow as tf 
from tensorflow.examples.tutorials.mnist import input_data 
import os

mnist = input_data.read_data_sets("E:\桌面上的文件\MNIST", one_hot=True) 

sess = tf.InteractiveSession()

#参数初始化
#产生一个符合正态分布的初始值
def weight_variable(shape): 
    initial = tf.truncated_normal(shape, mean=0, stddev=0.1) 
    #用于产生正态分布,shape为大小,mean为均值,stddev为标准差
    return tf.Variable(initial)

def bias_variable(shape): 
    initial = tf.constant(0.1, shape=shape) 
    return tf.Variable(initial)

in_unit = 784
h1_unit = 300
h2_unit = 150

w1 = weight_variable([in_unit,h1_unit])
b1 = bias_variable([h1_unit])
w2 = weight_variable([h1_unit,h2_unit])
b2 = bias_variable([h2_unit])
w3 = tf.Variable(tf.zeros([h2_unit,10]))
b3 = tf.Variable(tf.zeros([10]))

x = tf.placeholder(tf.float32,[None, in_unit])
keep_prob = tf.placeholder(tf.float32) #dropout层保留的概率

#隐层1
hidden1 = tf.nn.relu(tf.matmul(x,w1)+b1)

#隐层2
hidden2 = tf.nn.relu(tf.matmul(hidden1,w2) + b2)

#dropout层
hidden1_drop = tf.nn.dropout(hidden2, keep_prob)

#softmax
y = tf.nn.softmax(tf.matmul(hidden1_drop, w3) + b3)

y_= tf.placeholder(tf.float32,[None,10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1])) 
train_step = tf.train.AdagradOptimizer(0.3).minimize(cross_entropy)

correct_prediction = tf.equal(tf.argmax(y,1),tf. argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

#训练
tf.global_variables_initializer().run()
for i in range(3000):
    batch_xs, batch_ys = mnist.train.next_batch(150)
    train_step.run({x:batch_xs, y_:batch_ys, keep_prob:0.75})
    if (i%10 == 0):
        train_accuracy = sess.run(accuracy, feed_dict = {x:batch_xs, y_:batch_ys, keep_prob:1.0})
        print ("step",i,"train accuracy",train_accuracy)
    
print ("result accuracy:",accuracy.eval({x:mnist.test.images,y_:mnist.test.labels,keep_prob:1.0}))

save_path = "output"
save_dir = "model2_multilayers.ckpt"
saver = tf.train.Saver()
if not os.path.exists(save_path):
    os.makedirs(save_path)
save_path_full = os.path.join(save_path,save_dir)
saver.save(sess,save_path_full)

sess.close()


#model restore test

"""
saver = tf.train.Saver() # 声明tf.train.Saver类用于保存模型
with tf.Session() as sess:
    saver.restore(sess, "./output/model2_multilayers.ckpt")
    print("Model Restored")
"""

 

你可能感兴趣的:(机器学习)