标量:就是一个数,是0维的,只有大小,没有方向
向量:是1*n的一列数,是1维的,有大小,也有方向
张量:是n*n的一堆数,是2维的,n个向量合并而成
a.size():输出a的某一维度中元素的个数,若未指定维度,则计算所有元素的个数
a.shape():输出a数组各维度的长度信息,返回是元组类型。
a.numel():输出a占用内存的数量
a.dim():输出a的维数
import torch
import numpy as np
if __name__ == '__main__':
# 随机正太分布
a=torch.randn(2,3)
print("a:",a)
print("a.size():",a.size())
print("a.size(0):",a.size(0))
print("a.size(1):",a.size(1))
print("a.shape[0]:",a.shape[0])
print("a.shape[1]:",a.shape[1])
print("a.shape:",a.shape)
# 将a.shape转换成list
print("list(a.shape):",list(a.shape))
# 输出a占用内存的数量=2*3
print("a.numel():",a.numel())
# 输出a的维数
print("a.dim():",a.dim())
print()
# 0~1随机均匀分布
b=torch.rand(2,3,4)
print("b:",b)
print("b.size():",b.size())
print("b.size(0):",b.size(0))
print("b.size(1):",b.size(1))
print("b.shape[0]:",b.shape[0])
print("b.shape[1]:",b.shape[1])
print("b.shape:",b.shape)
print("list(b.shape):",list(b.shape))
# 输出b占用内存的数量=2*3*4
print("b.numel():",b.numel())
print("b.dim():",b.dim())
print()
a: tensor([[-0.2106, -2.1292, -0.8221],
[-1.5805, 0.2592, -1.1203]])
a.size(): torch.Size([2, 3])
a.size(0): 2
a.size(1): 3
a.shape[0]: 2
a.shape[1]: 3
a.shape: torch.Size([2, 3])
list(a.shape): [2, 3]
a.numel(): 6
a.dim(): 2
b: tensor([[[0.8126, 0.8908, 0.3507, 0.1554],
[0.8679, 0.5295, 0.5461, 0.5021],
[0.2570, 0.2250, 0.6310, 0.0662]],
[[0.1139, 0.9552, 0.5847, 0.5421],
[0.3589, 0.0090, 0.0324, 0.6984],
[0.9562, 0.4533, 0.4296, 0.4052]]])
b.size(): torch.Size([2, 3, 4])
b.size(0): 2
b.size(1): 3
b.shape[0]: 2
b.shape[1]: 3
b.shape: torch.Size([2, 3, 4])
list(b.shape): [2, 3, 4]
b.numel(): 24
b.dim(): 3
import torch
if __name__ == '__main__':
# 01正太分布生成一个矩阵
a=10*torch.rand(3,3)
print(a,'\n')
# 按照某个矩阵再生成一个矩阵
a1=torch.rand_like(a)
print('a1=torch.rand_like(a):\n',a1,'\n')
# 规定区间随机整数矩阵
b=torch.randint(1,6,[3,3])
print('torch.randint(1,6,[3,3]):\n',b,'\n')
# 生成一个两行三列的矩阵,并把所有值赋值为3.92
c=torch.full((2, 3), 3.92)
print('torch.full((2, 3), 3.92):\n',c,'\n')
# 步长为2,按序生成0~10之间的数字
d=torch.arange(0,10,step=2)
print('d=torch.arange(0,10,step=2):\n',d,'\n')
# 均匀生成某段数据
e=torch.linspace(0,10,steps=10)
print('torch.linspace(0,10,steps=10):\n',e,'\n')
e1=torch.linspace(0,10,steps=11)
print('e1=torch.linspace(0,10,steps=11):\n',e1,'\n')
# 值全为1矩阵
f=torch.ones(3,3)
print('torch.ones(3,3):\n',f,'\n')
# 值全为零矩阵
f1=torch.zeros(3,3)
print('torch.zeros(3,3):\n',f1,'\n')
# 单位矩阵
f2=torch.eye(3,3)
print('torch.eye(3,3):\n',f2,'\n')
g=torch.rand(4,3,28,28)
print('shape的基本使用:')
print(g[0].shape)
print(g[0,0].shape)
print(g[0,0,2,4])
print('\ntensor的切片使用:')
# 取前两张图片
print(g[:2].shape)
# 取第二张图片向后及第一个通道向后
print(g[2:,1:].shape)
# 行:隔七个采一个样,列:隔14个采一个样,(start:stop:step)
print(g[0,0,0:28:7,::14])
h=torch.randn(3,4)
print('\n',h)
# 矩阵中值大于0.5的 赋值为ture
mask=h.__ge__(0.5)
print(mask)
print(torch.masked_select(h,mask))
tensor([[6.6247, 1.7639, 2.3681],
[1.4683, 7.0583, 6.3519],
[2.0854, 6.2536, 0.0829]])
a1=torch.rand_like(a):
tensor([[0.2688, 0.0892, 0.7759],
[0.4124, 0.1816, 0.1043],
[0.8010, 0.4711, 0.5239]])
torch.randint(1,6,[3,3]):
tensor([[5, 4, 3],
[1, 1, 3],
[2, 1, 3]])
torch.full((2, 3), 3.92):
tensor([[3.9200, 3.9200, 3.9200],
[3.9200, 3.9200, 3.9200]])
d=torch.arange(0,10,step=2):
tensor([0, 2, 4, 6, 8])
torch.linspace(0,10,steps=10):
tensor([ 0.0000, 1.1111, 2.2222, 3.3333, 4.4444, 5.5556, 6.6667, 7.7778,
8.8889, 10.0000])
e1=torch.linspace(0,10,steps=11):
tensor([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])
torch.ones(3,3):
tensor([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]])
torch.zeros(3,3):
tensor([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]])
torch.eye(3,3):
tensor([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
shape的基本使用:
torch.Size([3, 28, 28])
torch.Size([28, 28])
tensor(0.7568)
tensor的切片使用:
torch.Size([2, 3, 28, 28])
torch.Size([2, 2, 28, 28])
tensor([[0.4571, 0.3198],
[0.6540, 0.3359],
[0.2601, 0.8069],
[0.9713, 0.6876]])
tensor([[-2.4096, 1.1243, -1.0314, -1.4685],
[-2.5054, 0.7131, -0.0376, -0.2110],
[ 1.8922, 1.8989, 0.0459, -1.6457]])
tensor([[False, True, False, False],
[False, True, False, False],
[ True, True, False, False]])
tensor([1.1243, 0.7131, 1.8922, 1.8989])