Peft库实战(一):Lora微调bert(文本情感分类)

peft_bert_demo.py

import argparse
import os

import torch
from torch.optim import AdamW
from torch.utils.data import DataLoader
from peft import (get_peft_config, get_peft_model, get_peft_model_state_dict, set_peft_model_state_dict, PeftType, PrefixTuningConfig, PromptEncoderConfig, PromptTuningConfig, LoraConfig)

import evaluate
from datasets import load_dataset
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from tqdm import tqdm

import peft
print(peft.__version__)

#!wget https://raw.githubusercontent.com/SophonPlus/ChineseNlpCorpus/master/datasets/ChnSentiCorp_htl_all/ChnSentiCorp_htl_all.csv

data_file = "./ChnSentiCorp_htl_all.csv" # 数据文件路径,数据需要提前下载
# 加载数据集
dataset = load_dataset("csv", data_files=data_file)
dataset = dataset.filter(lambda x: x["review"] is not None)
datasets = dataset["train"

你可能感兴趣的:(#,LLM/预训练&微调,bert,深度学习,python)