102,Do you want to add negative TCs?,negativetc
103,Add a negative TC,negativetc
104,negative TC,negativetc
-105,Header File:dns-sd.h,headerinfo
+105,Header File:dns-sd.h,headerinfo
106,Back,goback
107,I want to go back,goback
108,Feature name:,featureinfo
text = ["this is a distil bert model.", "data is oil"]
# Encode the text
encoded_input = tokenizer(text, padding=True, truncation=True, return_tensors='pt')
+# print(encoded_input)
seq_len = [len(i.split()) for i in train_text]
train_seq = torch.tensor(tokens_train['input_ids'])
train_mask = torch.tensor(tokens_train['attention_mask'])
train_y = torch.tensor(train_labels.tolist())
-train_mask
+train_mask
batch_size= 6
train_data = TensorDataset(train_seq, train_mask, train_y)
train_sampler = RandomSampler(train_data)
-train_dataloader = DataLoader(
- train_data, sampler=train_sampler, batch_size=batch_size)
+train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=batch_size)
try:
class BERT_Arch(nn.Module):
# print(data)
torch.save(model,'modelweight.pth')
-# print("model saved")
\ No newline at end of file
+print("model saved")
\ No newline at end of file