-
Notifications
You must be signed in to change notification settings - Fork 0
/
dataset.py
84 lines (60 loc) · 2.65 KB
/
dataset.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import numpy as np
import torch
import torch.utils.data
class EventData(torch.utils.data.Dataset):
""" Event stream dataset. """
def __init__(self, data,t_max = None):
"""
Data should be a list of event streams; each event stream is a list of dictionaries;
each dictionary contains: time_since_start, time_since_last_event, type_event
"""
self.has_intensity = False
self.time = [[elem['time_since_start'] for elem in inst] for inst in data] if t_max == None \
else [[elem['time_since_start']/t_max for elem in inst] for inst in data]
self.time_gap = [[elem['time_since_last_event'] for elem in inst] for inst in data] if t_max == None \
else [[elem['time_since_last_event'] / t_max for elem in inst] for inst in data]
# plus 1 since there could be event type 0, but we use 0 as padding
self.event_type = [[elem['type_event'] + 1 for elem in inst] for inst in data]
if 'intensities' in data[0][0].keys():
self.intensities = [[elem['intensities'][0] for elem in inst] for inst in data]
else:
self.intensities = [[0 for elem in inst] for inst in data]
self.length = len(data)
def __len__(self):
return self.length
def __getitem__(self, idx):
""" Each returned element is a list, which represents an event stream """
return self.time[idx], self.time_gap[idx], self.event_type[idx], self.intensities[idx]
def pad_time(insts):
""" Pad the instance to the max seq length in batch. """
max_len = max(len(inst) for inst in insts)
batch_seq = np.array([
inst + [0] * (max_len - len(inst))
for inst in insts])
return torch.tensor(batch_seq, dtype=torch.float32)
def pad_type(insts):
""" Pad the instance to the max seq length in batch. """
max_len = max(len(inst) for inst in insts)
batch_seq = np.array([
inst + [0] * (max_len - len(inst))
for inst in insts])
return torch.tensor(batch_seq, dtype=torch.long)
def collate_fn(insts):
""" Collate function, as required by PyTorch. """
time, time_gap, event_type, intensities = list(zip(*insts))
time = pad_time(time)
time_gap = pad_time(time_gap)
event_type = pad_type(event_type)
intensities = pad_time(intensities)
return time, time_gap, event_type,intensities
def get_dataloader(data, batch_size, shuffle=True,t_max = None):
""" Prepare dataloader. """
ds = EventData(data,t_max = t_max)
dl = torch.utils.data.DataLoader(
ds,
num_workers=0,
batch_size=batch_size,
collate_fn=collate_fn,
shuffle=shuffle
)
return dl