-
Notifications
You must be signed in to change notification settings - Fork 3
/
pytorch_loader.py
58 lines (43 loc) · 1.87 KB
/
pytorch_loader.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import os
import pickle
import numpy as np
from torch.utils.data import Dataset, DataLoader
def get_dataloader(data_path, max_sent, max_doc, mode, batch_size, num_workers):
dataloader = text_dataloader(data_path, max_sent, max_doc, mode)
data_loader = DataLoader(dataset=dataloader,
batch_size=batch_size,
shuffle=(True if mode=='train' else False),
num_workers=num_workers,
drop_last=True)
return data_loader
class text_dataloader(Dataset):
def __init__(self, data_path, max_sent, max_doc, mode):
self.data_path = data_path
self.max_sent = max_sent
self.max_doc = max_doc
self.mode = mode
# load
with open(os.path.join(data_path, 'word_vocab.pkl'), 'rb') as f:
self.word_vocab = pickle.load(f)
with open(os.path.join(data_path, '{}_data.pkl'.format(mode)), 'rb') as f:
data_ = pickle.load(f)
self.x, self.y = data_['x'], data_['y']
self.vocab_size = len(self.word_vocab)
self.n_classes = len(np.unique(self.y))
def __len__(self):
return len(self.y)
def __getitem__(self, idx):
text = self.x[idx]
label = self.y[idx] - 1
batch_x = np.zeros([self.max_doc, self.max_sent])
sent_length = []
if len(text) > self.max_doc:
text = text[-self.max_doc:]
for si, sent in enumerate(text):
if len(sent) > self.max_sent:
sent = sent[-self.max_sent:]
batch_x[si][:len(sent)] = sent
sent_length.append(len(sent))
sent_length = sent_length + [0]*(self.max_doc - len(sent_length))
doc_length = [len(text)]
return batch_x, np.array([label]), np.array(sent_length), np.array(doc_length)