Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

xlnet_dataloaders.py 1.6 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
  1. import yaml
  2. import torch
  3. from functools import partial
  4. from transformers import XLNetTokenizer
  5. from torch.utils.data import DataLoader
  6. with open('params.yaml', 'r') as f:
  7. PARAMS = yaml.safe_load(f)
  8. if torch.cuda.is_available():
  9. DEVICE = torch.device('cuda', PARAMS.get('gpu', 0))
  10. else:
  11. DEVICE = torch.device('cpu')
  12. class DataFrameDataLoader(DataLoader):
  13. def __init__(self, df, max_len, pretrained_model, do_lower_case, *args, **kwargs):
  14. # order is text, label
  15. self._tokenizer = XLNetTokenizer.from_pretrained(pretrained_model, do_lower_case=do_lower_case)
  16. self._data_iter = list(zip(df['review'], df['sentiment']))
  17. collate_batch = partial(self.collate_batch, max_len=max_len)
  18. super(DataFrameDataLoader, self).__init__(self._data_iter, collate_fn=collate_batch, *args, **kwargs)
  19. def collate_batch(self, batch, max_len):
  20. label_list, text_list = [], []
  21. attention_masks = []
  22. for (_text, _label) in batch:
  23. label_list.append(_label)
  24. encoded_dict = self._tokenizer.encode_plus(
  25. _text, add_special_tokens=True, max_length=max_len,
  26. pad_to_max_length=True, return_attention_mask=True,
  27. return_tensors='pt'
  28. )
  29. text_list.append(encoded_dict['input_ids'])
  30. attention_masks.append(encoded_dict['attention_mask'])
  31. label_list = torch.tensor(label_list, dtype=torch.float32)
  32. text_list = torch.cat(text_list, dim=0)
  33. attention_masks = torch.cat(attention_masks, dim=0)
  34. return label_list.to(DEVICE), text_list.to(DEVICE), attention_masks.to(DEVICE)
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...