用tf.Seesino()构建会话,要定义好所有的operation之后,才能构建会话
用tf.InteractiveSession()构建会话,可以先构建一个会话再定义operation,适用于交互式环境。
创建session时,对session进行参数设置
with tf.Session(config=tf.ConfigProto(...),...)
#tf.ConfigProto()的参数
log_device_placement=True #是否打印设备分配日志
allow_soft_placement=True #如果你指定的设备不存在,允许TF自动分配设备
tf.ConfigProto(log_device_placement=True,allow_soft_placement=True)
tf.get_variable() 用于创建或获取变量
(1)用于创建变量时,与tf.Variable()功能基本相同
v=tf.get_variable("v",shape=[1],initializer.constant(1.0))
v=tf.Variable(tf.constant(1.0,shape=[1]),name="v")
(2)用于获取变量时
先通过tf.variable_scope()生成一个上下文管理器,并指明需求的变量在这个上下文管理器中
然后直接用tf.get_variable()获取已经生成的变量
#通过tf.variable_scope()控制tf.get_variable(),以获取或创建变量
#名为“zx”的上下文控制器
with tf.variable_scope("zx"):
v=tf.get_variable("v",[1],initializer=tf.constant_initializer(1.0))#在“zx”中定义名为“v”的变量
with tf.variable_scope("zx"):
v=tf.get_variable("v",[1])#通过tf.get_variable()在“zx”中创建“v”变量,失败,因为在“zx”中已经有一个了
如果上下文管理器中已经有了想要的变量,想要通过tf.get_variable()获取它,可通过设置参数reuse的值为True来获取:
with tf.variable_scope("zx",reuse=True):
v1=tf.get_variable("v",[1])
print v==v1#输出为True
(3)其他
返回一个用于初始化权重的初始化程序“Xavier”,用来保持每一层梯度大小基本相同
xavier_initializer(
uniform=True,
seed=None,
dtype=tf.float32
)
参数:
返回值:
通式:tf.placeholder(dtype,shape=None,name=None)
可将这个函数中的值当作形参,在具体用到的时候再给它们赋值
参数:
x=tf.placeholder(tf.float32,shape=(1024,1024))
y=tf.matmul(x,x)#矩阵相乘
with tf.Session() as sess:
print(sess.run(y))#ERROR,因为此时x没有具体赋值
rand_array=np.random.rand(1024,1024)
print(sess.run(y,feed_dict={x:rand_array}))#输出成功
返回:tensor类型
实现了Adam算法的优化器
tf.nn.xw_plus_b((x,weights)+biases)相当于tf.matmul(x,weights)+biases
import tensorflow as tf
x=[[1, 2, 3],[4, 5, 6]]
w=[[ 7, 8],[ 9, 10],[11, 12]]
b=[[3,3],[3,3]]
res1=tf.nn.xw_plus_b(x,w,[3,3])
res2=tf.matmul(x,w)+b
init_op=tf.initializer_all_variables()
with tf.Session() as sess:
#运行初始化操作
sess.run(init_op)
print(sess.run(res1))
print(sess.run(res2))
输出:
[[ 61 67]
[142 157]]
[[ 61 67]
[142 157]]
给使用placeholder创建出来的tensor赋值。
tf.variable_scope() 可以使变量有相同的名字,包括tf.get_Variable()和tf.Variable()
tf.name_scope() 也可以使变量有相同的名字,但是只能应用于tf.Variable()
import tensorflow as tf;
import numpy as np;
import matplotlib.pyplot as plt;
with tf.variable_scope('V1'):
a1 = tf.get_variable(name='a1', shape=[1], initializer=tf.constant_initializer(1))
a2 = tf.Varible(tf.random_normal(shape=[2,3], mean=0, stddev=1), name='a2')
with tf.variable_scope('V2'):
a3 = tf.get_variable(name='a1', shape=[1], initializer=tf.constant_initializer(1))
a4 = tf.Variable(tf.random_normal(shape[2,3], mean=0, stddev=1), name='a2')
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
print a1.name
print a2.name
print a3.name
print a4.name
输出:
V1/a1:0
V1/a2:0
V2/a1:0
V2/a2:0
import tensorflow as tf;
import numpy as np;
import matplotlib.pyplot as pltl
with tf.name_scope('V1'):
a1 = tf.get_variable(name='a1', shape=[1], initializer=tf.constant_initializer(1))
a2 = tf.Variable(tf.random_normal(shape=[2,3], mean=0, stddev=1), name='a2')
with tf.name_scope('V2'):
a3 = tf.get_variable(name='a1', shape=[1], initializer=tf.constant_initializer(1))
a4 = tf.Variable(tf.random_normal(shape=[2,3], mean=0, stddev=1), name='a2')
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
print a1.name
print a2.name
print a3.name
print a4.name
输出:
报错:Variable a1 already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:
正确写法:
import tensorflow as tf;
import numpy as np;
import matplotlib.pyplot as plt
with tf.name_scope('V1'):
a1 = tf.Variable(tf.random_normal(shape=[2,3], mean=0, stddev=1), name='a1')
with tf.name_scope('V2'):
a2 = tf.Variable(tf.random_normal(shape=[2,3], mean=0, stddev=1), name='a1')
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
print a1.name
print a2.name
输出:
V1/a1:0
V2/a1:0
后接:https://blog.csdn.net/Dorothy_Xue/article/details/84975706