tensorlow中tensorboard可视化展示训练过程

import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data

max_steps=1000 # 训练步数
learning_rate=0.001 # 设置学习率
dropout=0.9 # 神经元保留比例
data_dir='./MNIST_data' # 数据存放路径  
# minist数据集下载 链接: https://pan.baidu.com/s/13M8TYuw77D_cH0tnLU4O4g 密码: xa2w
log_dir='./' # 日志保存路径

# 下面定义几个重要的函数

# 初始化权重函数
  # We can't initialize these variables to 0 - the network will get stuck.
def weight_variable(shape):
  """Create a weight variable with appropriate initialization."""
  initial = tf.truncated_normal(shape, stddev=0.1)
  return tf.Variable(initial)

# 初始化偏置函数
def bias_variable(shape):
  """Create a bias variable with appropriate initialization."""
  initial = tf.constant(0.1, shape=shape)
  return tf.Variable(initial)

# 将某个变量写入tensorboard
def variable_summaries(var):
  """Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
  with tf.name_scope('summaries'):
    mean = tf.reduce_mean(var)
    tf.summary.scalar('mean', mean) # 写入均值
    with tf.name_scope('stddev'):
      stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) # 写入方差
    tf.summary.scalar('stddev', stddev) # 写入方差
    tf.summary.scalar('max', tf.reduce_max(var)) # 写入最大值
    tf.summary.scalar('min', tf.reduce_min(var)) # 写入最小值
    tf.summary.histogram('histogram', var)  # 绘制直方图

def nn_layer(input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu):
  """R

你可能感兴趣的:(tensorflow,tensorboard,tensorflow,mnist,神经网络)