import torch
A = torch.arange(20).reshape(5, 4)
print(A)
print(A.T)#A的转置
#not 矩阵乘
A = torch.arange(20, dtype=torch.float32).reshape(5, 4)
B = A.clone()
print(A*B)#就是简单的对应位置相乘 不是矩阵乘法
A =torch.tensor([[1,2,3],
[1,2,1],
[4,1,7]],dtype=torch.float32)
print(A.shape)
A_sum_axis0 = A.sum(axis=0)#把0维消除
print(A_sum_axis0)
A_sum_axis1 = A.sum(axis=1)#把第一维消除
print(A_sum_axis1)
A_sum_axis10=A.sum(axis=[0, 1])#全加起来对于这道
print(A_sum_axis10)
print(A.mean())#全部的sum 除 个数
print((A.mean() == (A.sum()/ A.numel())))
print(A.mean(axis=0))#把第一维消除 一样的原理
sum_A = A.sum(axis=1, keepdims=True)
print(sum_A.shape)#不会消掉1维度,会将那个维度变成1 后面乘法算术可以使用
print(A.cumsum(axis=0))#按下 累加
y = torch.ones(4, dtype=torch.float32)
x = torch.arange(4.)
print(torch.dot(x,y))#点积是相同位置的按元素乘积的和 结果等于torch.sum(x * y)
MA = torch.tensor([[1,2,3],
[1,2,3],
[1,2,3],
[2,3,4]])
MB = torch.tensor([[1,1,1,1],
[1,1,1,1],
[1,1,1,1]])
print(MA.shape,MB.shape)
print(torch.mm(MB,MA))#矩阵乘法
MC = torch.tensor([1,1,1])
print(torch.mv(MA,MC))#矩阵和向量相乘乘法
u = torch.tensor([3.0, -4.0])
print(torch.norm(u))#平方和再开根号 为5
print(torch.abs(u).sum())#L1范数 绝对值求和
print(torch.norm(torch.ones((4, 9))))#矩阵 的弗罗贝尼乌斯范数(Frobenius norm)是矩阵元素平方和的平方根
#求导
x = torch.arange(4.0)
print(x)
x.requires_grad_(True)
print(x.grad)#初始值为none
y = 2 * torch.dot(x, x)#内积的2倍
y.backward()
print(x.grad)#4x 内积求导为2xT
x.grad.zero_()#清0梯度 防止累加
y = x.sum()
y.backward()
print(x.grad)
x.grad.zero_()
y = x * x
y.sum().backward()
print(x.grad)
x.grad.zero_()
y = x * x
u = y.detach()#u就是y的常数
z = u * x
z.sum().backward()
print(x.grad == u)
print(x.grad)
#线性的
def f(a):
b = a * 2
while b.norm() < 1000:
b = b * 2
if b.sum() > 0:
c = b
else:
c = 100 * b
return c
a = torch.randn(size=(), requires_grad=True)
d = f(a)
d.backward()
print(a.grad == d / a)
运行结果:
F:\python3\python.exe C:\study\project_1\3_21_xiandai.py
tensor([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15],
[16, 17, 18, 19]])
tensor([[ 0, 4, 8, 12, 16],
[ 1, 5, 9, 13, 17],
[ 2, 6, 10, 14, 18],
[ 3, 7, 11, 15, 19]])
tensor([[ 0., 1., 4., 9.],
[ 16., 25., 36., 49.],
[ 64., 81., 100., 121.],
[144., 169., 196., 225.],
[256., 289., 324., 361.]])
torch.Size([3, 3])
tensor([ 6., 5., 11.])
tensor([ 6., 4., 12.])
tensor(22.)
tensor(2.4444)
tensor(True)
tensor([2.0000, 1.6667, 3.6667])
torch.Size([3, 1])
tensor([[ 1., 2., 3.],
[ 2., 4., 4.],
[ 6., 5., 11.]])
tensor(6.)
torch.Size([4, 3]) torch.Size([3, 4])
tensor([[ 5, 9, 13],
[ 5, 9, 13],
[ 5, 9, 13]])
tensor([6, 6, 6, 9])
tensor(5.)
tensor(7.)
tensor(6.)
tensor([0., 1., 2., 3.])
None
tensor([ 0., 4., 8., 12.])
tensor([1., 1., 1., 1.])
tensor([0., 2., 4., 6.])
tensor([True, True, True, True])
tensor([0., 1., 4., 9.])
tensor(True)
进程已结束,退出代码0