Bert+BiLSTM+attention+TextCNN来实现文本分类

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchtext.datasets import text_classification
from torchtext.data import Field, BucketIterator

class TextCNN(nn.Module):
    def __init__(self, vocab_size, embedding_dim, output_dim, kernel_dim, kernel_sizes):
        super().__init__()

        # Embedding layer
        self.embedding = nn.Embedding(vocab_size, embedding_dim)

        # convolutional layers
        self.convs = nn.ModuleList([
            nn.Conv2d(1, kernel_dim, (k, embedding_dim))
            for k in kernel_sizes
        ])

        # dropout
        self.dropout = nn.Dropout(0.5)

        # final, fully-connected layer
        self.fc = nn.Linear(len(kernel_sizes) * kernel_dim, output_dim)

    def forward(self, text):
        # text = [batch_size, sent_length]

        embedded = self.embedding(text)
        # embedded = [batch_size, sent_length, emb_d

你可能感兴趣的:(Pytorch-深度学习,python,深度学习)