x = torch.arange(20).view(1,4,5)
print(x)
t = x.repeat(2,1,1)
print(t)
t1 = x.expand(2,4,5)
print(t1)
x1 = torch.randn(2,5)
x2 = torch.randn(2,5)
x = torch.cat((x1,x2),dim=0)
print(x)
print(torch.chunk(x,2,0))
print(x)
x1 = x.max(dim=0)
x2 = torch.max(x,dim=1)
print(x)
print(x1[0])
print(x2)
a = torch.arange(24).view(2,12)
b = a[:, 0] # tensor([ 0, 12])
c = a[:, 0::3] # 从index0进行切片,每3个数取一个数
print(a)
print(b)
print©
a = torch.arange(15).view(3,5)
print(a)
b = a.gather(0,torch.LongTensor([[0,1,2,1],[2,0,1,0]]))
print(b)
b = a.gather(0,torch.LongTensor([[0],[1],[2]]))
print(b)
c = a.gather(1,torch.LongTensor([[0,3,4],[4,3,2]]))
print©
a = torch.arange(24).view(2,3,4)
print("*"100)
print(“a:”)
print(a) # [2,3,4]
b = torch.split(a,1,dim=1) # 注意这里的第二个参数是块的大小而不是块的数量
print(""100)
print(“b:”)
print(b) # 3片 * [2,1,4]
c = torch.cat(b,dim=1)
print(""100)
print(“c:”)
print© # [2,3,4]
print(c.shape)
d = torch.stack(b) # 把b这个元组堆起来,并增加一个维度,默认增加0维
print("“100)
print(“d:”)
print(d)
print(d.shape) # [3,2,1,4]
e = torch.squeeze(d)
print("”*100)
print(e)
print(e.shape) # [3,2,4]
a = torch.arange(24).view(2,3,4)
print(a)
c = torch.permute(a,[2,0,1])
print©
a = torch.arange(12).view(3,4)
print(a)
d = torch.transpose(a,1,0) # transpose好像就两维,感觉和转置一样
print(d)
e = torch.reshape(a,[4,3]) # reshape是按照所有元素的先后顺序重排
print(e)
f = a.contiguous()
print(f)
a = torch.arange(12).view(3,4)
print(a)
b = torch.transpose(a,1,0)
print(b)
b = b.contiguous()
c = b.view(2,6)
print©