【Pytorch】torch api 学习1

torch api

目录

矩阵相乘 torch.mm

矩阵相乘 torch.matmul

emb table lookup

torch.cat((a,b), dim=-1)表示按倒数第一维cat


import torch

torch.__version__ # '1.3.1'


a = torch.randn(2,3)
b = torch.randn(3,2)


a
tensor([[ 0.5496,  0.6146, -0.4678],
        [ 0.2285, -0.8506, -0.7809]])

b
tensor([[ 1.6438, -0.7533],
        [-0.3179, -0.0761],
        [ 0.2346, -0.3628]])

矩阵相乘 torch.mm

c = torch.mm(a,b)

print(c)
tensor([[ 0.5983, -0.2911],
        [ 0.4628,  0.1759]])


print(c.shape)
torch.Size([2, 2])

矩阵相乘 torch.matmul

d = torch.matmul(a,b)


print(d)
tensor([[ 0.5983, -0.2911],
        [ 0.4628,  0.1759]]) 


print(d.shape)
torch.Size([2, 2])

hidden_units= [256, 128, 64]

### python 基础用法

# 从index=0至倒数第一列的前一个值
hidden_units[:-1]
[256, 128]


# 从index=1至最后一个值
hidden_units[1:]
[128, 64]


hidden_units.insert(0, 13)
hidden_units
[13, 256, 128, 64]


# zip
zip(hidden_units[:-1], hidden_units[1:])
# 

zip([256, 128], [128, 64])


for layer in list(zip(hidden_units[:-1], hidden_units[1:])):
    print("layer", layer)
    for linear in layer:
        print("linear", linear)

layer (256, 128)
linear 256
linear 128
layer (128, 64)
linear 128
linear 64

# zip 还是不太明白

emb table lookup

初始化emb矩阵,shape=(5,4)。

取emb矩阵中拿对应的 [1, 2, 3],index=1,2,3的行。

embedding = nn.Embedding(5, 4) # 假定字典中只有5个词,词向量维度为4
embedding
Embedding(5, 4)


word = [[1, 2, 3],
        [2, 3, 4]] # 每个数字代表一个词,例如 {'!':0,'how':1, 'are':2, 'you':3,  'ok':4}
                    #而且这些数字的范围只能在0~4之间,因为上面定义了只有5个词
torch.LongTensor(word)
tensor([[1, 2, 3],
        [2, 3, 4]])



embed = embedding(torch.LongTensor(word))
embed
tensor([[[ 0.2270, -1.3801,  0.4612, -0.0465],
         [-1.0791, -0.6831,  0.7300,  1.9376],
         [-0.2408, -0.8692,  1.4777,  0.9520]],

        [[-1.0791, -0.6831,  0.7300,  1.9376],
         [-0.2408, -0.8692,  1.4777,  0.9520],
         [ 0.7200,  0.2607, -0.0887,  1.6848]]], grad_fn=)

# 第一个list的第一行、第二个list的第二行 是一样的


embed.shape # torch.Size([2, 3, 4])

embed.size() # torch.Size([2, 3, 4])

取字典对应的值也是可以的

word_to_ix = {'hello': 0, 'world': 1}
embeds = nn.Embedding(2, 5) # 2个词,5d
# print(embeds) # Embedding(2, 5)


hello_idx = torch.LongTensor([word_to_ix['hello']])
hello_idx = Variable(hello_idx)
# hello_idx # tensor([0])

# 变量名-》变量值-》get emb
hello_embed = embeds(hello_idx)
print(hello_embed)
tensor([[ 0.0702, -1.7894, -0.7901,  1.6338, -0.7428]],
       grad_fn=)


hello_embed.shape # torch.Size([1, 5])

torch.cat((a,b), dim=-1)表示按倒数第一维cat

观察embs_a,embs_b的拼接

embs_a = torch.randn(2,3)
embs_a
(tensor([[ 0.2293, -0.0313, -0.3125],
         [ 0.1254,  0.4505,  0.2046]])


embs_a.shape # torch.Size([2, 3]))

embs_b = torch.randn(2,3)
tensor([[-0.6700,  0.0621,  0.8191],
        [-0.8210,  1.6372, -0.4524]])



embs_a_cat = torch.cat((embs_a,embs_b), dim=-1)
print(embs_a_cat)
tensor([[ 0.2293, -0.0313, -0.3125, -0.6700,  0.0621,  0.8191],
        [ 0.1254,  0.4505,  0.2046, -0.8210,  1.6372, -0.4524]])

print(embs_a_cat.shape)
torch.Size([2, 6])

你可能感兴趣的:(Tensorflow,&,Pytorch,python,开发语言)