BERT的某层均值池化或几层叠加再池化

'''bert的池化是将一个句子中所有token在每一维度上取均值或取最大'''
def forward(self,input_ids,attention_mask,token_type_ids,pooling_type):
	out = self.bert(input_ids,attention_mask,token_type_ids,output_hidden_states=True)
	if pooling_type = 'cls':
		return (out.last_hidden_state[:,0]) #[batch,768]
	if pooling_type = 'pooler':
		return (out.pooler_output) #[batch,768]
	if pooling_type = 'last-avg': #在每个维度上取均值
		'''
		last = torch.mean(out.last_hidden_state,dim=1) #[batch,768] torch.mean会squeeze dim=1的维度
		'''
		last = out.last_hidden_state.transpose(1,2) #[batch,768,seq]
		return torch.avg_pool1d(last,kernel_size=last.shape[-1]).squeeze(-1) #[batch,768,seq]->[batch,768,1]->[batch,768]
	if pooling_type = 'first-last-avg':
		first = out.hidden_states[1].transpose(1,2) #[batch,768,seq]
		first_avg = torch.avg_pool1d(first,kernel_size=first.shape[-1]).squeeze(-1) #[batch,768]
		last = out.hidden_states[-1].transpose() #[batch,768,seq]
		last_avg = torch.avg_pool1d(last,kernel_size=last.shape[-1]).squeeze(-1) #[batch,768]
		first_last_cat = torch.cat(first_avg.unsqueeze(1),last_avg.unsqueeze(1),dim=1).transpose(1,2) #[batch,768,2]
		return torch.avg_pool1d(first_last_cat,kernel_size=last.shape[-1]).squeeze(-1) #[batch,768]

你可能感兴趣的:(工程实践,bert,自然语言处理)