from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("canwenxu/BERT-of-Theseus-MNLI")
model = AutoModel.from_pretrained("canwenxu/BERT-of-Theseus-MNLI")
Downloading: 0%| | 0.00/151 [00:00, ?B/s]
Downloading: 0%| | 0.00/684 [00:00, ?B/s]
Downloading: 0%| | 0.00/256k [00:00, ?B/s]
Downloading: 0%| | 0.00/112 [00:00, ?B/s]
Downloading: 0%| | 0.00/255M [00:00, ?B/s]
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import torch
import transformers
import random
from tqdm import tqdm
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.empty_cache()
import os
import datetime
def printbar():
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("\n"+"=========="*8 + "%s"%nowtime)
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
def read_sst2_split(split_dir):
texts = []
labels = []
spilt_data=pd.read_csv(split_dir,sep='\t'
)
spilt_data.columns=['text','label']
texts=list(spilt_data.text)
labels=list(spilt_data.label)
return texts, labels
path='D://Dataset//GLUE//SST//SST-2//'
train_texts, train_labels = read_sst2_split(path+'train.tsv')
test_texts, test_labels = read_sst2_split(path+'dev.tsv')
数据处理
random_seed = 0
numpy_seed = 1
torch_seed = 2
cuda_seed = 3
random.seed(random_seed)
np.random.seed(numpy_seed)
torch.manual_seed(torch_seed)
torch.cuda.manual_seed_all(cuda_seed)
from sklearn.model_selection import train_test_split
train_texts, val_texts, train_labels, val_labels = train_test_split(train_texts,
train_labels, test_size=.4,random_state=random_seed)
model_path="canwenxu/BERT-of-Theseus-MNLI"
from transformers import BertForSequenceClassification,BertTokenizer
tokenizer = BertTokenizer.from_pretrained(model_path)
maxlen=20
train_encodings = tokenizer(train_texts[:128],
padding='max_length',
truncation=True,
max_length=maxlen,
return_tensors='pt')
val_encodings = tokenizer(val_texts[:64],
padding='max_length',
truncation=True,
max_length=maxlen,
return_tensors='pt')
test_encodings = tokenizer(test_texts[:32],
padding='max_length',
truncation=True,
max_length=maxlen,
return_tensors='pt')
import gc
gc.collect()
1324
class IMDbDataset(torch.utils.data.Dataset):
def __init__(self, encodings, labels):
self.encodings = encodings
self.labels = labels
def __getitem__(self, idx):
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
item['labels'] = torch.tensor(self.labels[idx])
return item
def __len__(self):
return len(self.labels)
train_dataset = IMDbDataset(train_encodings, train_labels[:128])
val_dataset = IMDbDataset(val_encodings, val_labels[:64])
test_dataset = IMDbDataset(test_encodings, test_labels[:32])
from torch.utils.data import DataLoader
train_data_loader = DataLoader(
dataset = train_dataset,
batch_size=32,
shuffle=True
)
val_data_loader = DataLoader(
dataset = val_dataset,
batch_size=32,
shuffle=True
)
test_data_loader = DataLoader(
dataset = val_dataset,
batch_size=32,
shuffle=True
)
model = BertForSequenceClassification.from_pretrained(model_path,num_labels=2)
Some weights of BertForSequenceClassification were not initialized from the model checkpoint at canwenxu/BERT-of-Theseus-MNLI and are newly initialized: ['classifier.bias', 'classifier.weight']
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
model
BertForSequenceClassification(
(bert): BertModel(
(embeddings): BertEmbeddings(
(word_embeddings): Embedding(30522, 768, padding_idx=0)
(position_embeddings): Embedding(512, 768)
(token_type_embeddings): Embedding(2, 768)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(encoder): BertEncoder(
(layer): ModuleList(
(0): BertLayer(
(attention): BertAttention(
(self): BertSelfAttention(
(query): Linear(in_features=768, out_features=768, bias=True)
(key): Linear(in_features=768, out_features=768, bias=True)
(value): Linear(in_features=768, out_features=768, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(output): BertSelfOutput(
(dense): Linear(in_features=768, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(intermediate): BertIntermediate(
(dense): Linear(in_features=768, out_features=3072, bias=True)
)
(output): BertOutput(
(dense): Linear(in_features=3072, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(1): BertLayer(
(attention): BertAttention(
(self): BertSelfAttention(
(query): Linear(in_features=768, out_features=768, bias=True)
(key): Linear(in_features=768, out_features=768, bias=True)
(value): Linear(in_features=768, out_features=768, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(output): BertSelfOutput(
(dense): Linear(in_features=768, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(intermediate): BertIntermediate(
(dense): Linear(in_features=768, out_features=3072, bias=True)
)
(output): BertOutput(
(dense): Linear(in_features=3072, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(2): BertLayer(
(attention): BertAttention(
(self): BertSelfAttention(
(query): Linear(in_features=768, out_features=768, bias=True)
(key): Linear(in_features=768, out_features=768, bias=True)
(value): Linear(in_features=768, out_features=768, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(output): BertSelfOutput(
(dense): Linear(in_features=768, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(intermediate): BertIntermediate(
(dense): Linear(in_features=768, out_features=3072, bias=True)
)
(output): BertOutput(
(dense): Linear(in_features=3072, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(3): BertLayer(
(attention): BertAttention(
(self): BertSelfAttention(
(query): Linear(in_features=768, out_features=768, bias=True)
(key): Linear(in_features=768, out_features=768, bias=True)
(value): Linear(in_features=768, out_features=768, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(output): BertSelfOutput(
(dense): Linear(in_features=768, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(intermediate): BertIntermediate(
(dense): Linear(in_features=768, out_features=3072, bias=True)
)
(output): BertOutput(
(dense): Linear(in_features=3072, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(4): BertLayer(
(attention): BertAttention(
(self): BertSelfAttention(
(query): Linear(in_features=768, out_features=768, bias=True)
(key): Linear(in_features=768, out_features=768, bias=True)
(value): Linear(in_features=768, out_features=768, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(output): BertSelfOutput(
(dense): Linear(in_features=768, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(intermediate): BertIntermediate(
(dense): Linear(in_features=768, out_features=3072, bias=True)
)
(output): BertOutput(
(dense): Linear(in_features=3072, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(5): BertLayer(
(attention): BertAttention(
(self): BertSelfAttention(
(query): Linear(in_features=768, out_features=768, bias=True)
(key): Linear(in_features=768, out_features=768, bias=True)
(value): Linear(in_features=768, out_features=768, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
)
(output): BertSelfOutput(
(dense): Linear(in_features=768, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
(intermediate): BertIntermediate(
(dense): Linear(in_features=768, out_features=3072, bias=True)
)
(output): BertOutput(
(dense): Linear(in_features=3072, out_features=768, bias=True)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
)
)
)
(pooler): BertPooler(
(dense): Linear(in_features=768, out_features=768, bias=True)
(activation): Tanh()
)
)
(dropout): Dropout(p=0.1, inplace=False)
(classifier): Linear(in_features=768, out_features=2, bias=True)
)
del model
import gc
gc.collect()
2367
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score, precision_score, recall_score, accuracy_score
loss_func = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(params=model.parameters(),lr = 0.01)
metric_func = accuracy_score
metric_name = "accuracy"
def train_epoch(tmp_model,dataloader,tmp_op,epoch):
loss_function=torch.nn.CrossEntropyLoss()
tmp_model.train()
tmp_model.zero_grad()
total_loss = 0.0
total_metric=0.0
preds = []
labels = []
for step,batch in enumerate(dataloader,1):
tmp_op.zero_grad()
input_ids = batch['input_ids'].to(device)
attention_mask = batch['attention_mask'].to(device)
label = batch['labels'].to(device)
outputs = model(input_ids, attention_mask=attention_mask, labels=label)
loss = outputs[0]
logits=outputs[1]
total_loss += float(loss.item())
pred = torch.argmax(logits, dim=-1)
preds.extend(pred.cpu().tolist())
labels.extend(batch['labels'].cpu().tolist())
metric = accuracy_score(preds, labels)
total_metric += float(metric.item())
loss.requires_grad_(True)
tmp_op.step()
return total_loss/step, total_metric/step
def evaluate(tmp_model,dataloader, split, post_evaluate_hook=None):
preds = []
labels = []
tmp_model.eval()
val_total_loss = 0.0
val_total_metric=0.0
with torch.no_grad():
for step,batch in enumerate(dataloader,1):
input_ids = batch['input_ids'].to(device)
attention_mask = batch['attention_mask'].to(device)
label = batch['labels'].to(device)
outputs = tmp_model(input_ids, attention_mask=attention_mask, labels=label)
loss = outputs[0]
logits=outputs[1]
pred = torch.argmax(logits, dim=-1)
preds.extend(pred.cpu().tolist())
labels.extend(batch['labels'].cpu().tolist())
val_total_loss += loss.item()
metric = accuracy_score(preds, labels)
val_total_metric += float(metric.item())
tmp_model.train()
return val_total_loss/step,val_total_metric/step
model.device
device(type='cpu')
model=model.to(device)
model.device
device(type='cuda', index=0)
epochs = 10
dfhistory = pd.DataFrame(columns = ["epoch","loss",metric_name,"val_loss","val_"+metric_name])
print("Start Training...")
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("=========="*8 + "%s"%nowtime)
for epoch in range(1,epochs+1):
train_loss,train_metric= train_epoch(model,train_data_loader,optimizer,epoch)
val_loss,val_metric= evaluate(model,val_data_loader, "Valid")
info = (epoch, train_loss, train_metric, val_loss,val_metric)
dfhistory.loc[epoch-1] = info
print(("\nEPOCH = %d, loss = %.4f,"+ metric_name + \
" = %.4f, val_loss = %.4f, "+"val_"+ metric_name+" = %.4f")
%info)
nowtime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("\n"+"=========="*8 + "%s"%nowtime)
print('Finished Training...')
Start Training...
================================================================================2022-04-26 10:51:26
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
EPOCH = 1, loss = 0.6709,accuracy = 0.6237, val_loss = 0.6608, val_accuracy = 0.6641
================================================================================2022-04-26 10:51:30
EPOCH = 2, loss = 0.6715,accuracy = 0.4870, val_loss = 0.6608, val_accuracy = 0.6484
================================================================================2022-04-26 10:51:30
EPOCH = 3, loss = 0.6704,accuracy = 0.5827, val_loss = 0.6608, val_accuracy = 0.6641
================================================================================2022-04-26 10:51:30
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
EPOCH = 4, loss = 0.6777,accuracy = 0.5762, val_loss = 0.6608, val_accuracy = 0.6641
================================================================================2022-04-26 10:51:30
EPOCH = 5, loss = 0.6793,accuracy = 0.6530, val_loss = 0.6608, val_accuracy = 0.6953
================================================================================2022-04-26 10:51:30
EPOCH = 6, loss = 0.6817,accuracy = 0.5553, val_loss = 0.6608, val_accuracy = 0.7109
================================================================================2022-04-26 10:51:30
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
EPOCH = 7, loss = 0.6757,accuracy = 0.6016, val_loss = 0.6608, val_accuracy = 0.6484
================================================================================2022-04-26 10:51:30
EPOCH = 8, loss = 0.6775,accuracy = 0.5996, val_loss = 0.6608, val_accuracy = 0.6953
================================================================================2022-04-26 10:51:30
EPOCH = 9, loss = 0.6936,accuracy = 0.6035, val_loss = 0.6608, val_accuracy = 0.7266
================================================================================2022-04-26 10:51:30
EPOCH = 10, loss = 0.6795,accuracy = 0.6204, val_loss = 0.6608, val_accuracy = 0.6484
================================================================================2022-04-26 10:51:30
Finished Training...
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}
C:\Users\22274\AppData\Local\Temp\ipykernel_18928\4205912457.py:7: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}