Skip to content

Commit

Permalink
Periodic signal and sinusoidal timeseries
Browse files Browse the repository at this point in the history
  • Loading branch information
nschaetti committed Jan 26, 2019
1 parent aedbf03 commit 5533459
Show file tree
Hide file tree
Showing 3 changed files with 113 additions and 8 deletions.
96 changes: 96 additions & 0 deletions echotorch/datasets/PeriodicSignalDataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
# -*- coding: utf-8 -*-
#

# Imports
import torch
from torch.utils.data.dataset import Dataset
import numpy as np


# Periodic signal timeseries
class PeriodicSignalDataset(Dataset):
"""
Create simple periodic signal timeseries
"""

# Constructor
def __init__(self, sample_len, period, n_samples, start=0):
"""
Constructor
:param sample_len: Sample's length
:param period:
"""
# Properties
self.sample_len = sample_len
self.n_samples = n_samples
self.period = period
self.start = start

# Period length
if type(period) is list:
self.period_length = len(period)
elif type(period) is np.array or type(period) is torch.FloatTensor:
self.period_length = period.shape[0]
# end if

# Generate data set
self.outputs = self._generate()
# end __init__

#############################################
# OVERRIDE
#############################################

# Length
def __len__(self):
"""
Length
:return:
"""
return self.n_samples
# end __len__

# Get item
def __getitem__(self, idx):
"""
Get item
:param idx:
:return:
"""
return self.outputs[idx]
# end __getitem__

##############################################
# PRIVATE
##############################################

# Generate
def _generate(self):
"""
Generate dataset
:return:
"""
# List of samples
samples = list()

# For each sample
for i in range(self.n_samples):
# Tensor
period_tensor = torch.FloatTensor(self.period)
sample = period_tensor.repeat(int(self.sample_len // self.period_length) + 1)

# Start
if type(self.start) is list:
start = self.start[i]
else:
start = self.start
# end if

# Append
samples.append(sample[start:start+self.sample_len].unsqueeze(-1))
# end for

return samples
# end _generate

# end PeriodicSignalDataset
22 changes: 15 additions & 7 deletions echotorch/datasets/SinusoidalTimeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class SinusoidalTimeseries(Dataset):
"""

# Constructor
def __init__(self, sample_len, n_samples, w, a=1, seed=None):
def __init__(self, sample_len, n_samples, w, a=1, g=None, seed=None):
"""
Constructor
:param sample_len: Length of the time-series in time steps.
Expand All @@ -32,6 +32,7 @@ def __init__(self, sample_len, n_samples, w, a=1, seed=None):
self.n_samples = n_samples
self.w = w
self.a = a
self.g = g

# Seed
if seed is not None:
Expand Down Expand Up @@ -107,12 +108,19 @@ def _generate(self):
# Tensor
sample = torch.zeros(self.sample_len, 1)

# Init
init_g = self.random_initial_points()

for t in range(0, self.sample_len):
sample[t, 0] = self.a * math.sin(self.w * t + init_g)
# end for
# Random start
if self.g is None:
# Init
init_g = self.random_initial_points()

for t in range(0, self.sample_len):
sample[t, 0] = self.a * math.sin(self.w * t + init_g)
# end for
else:
for t in range(0, self.sample_len):
sample[t, 0] = self.a * math.sin(self.w * t + self.g)
# end for
# end if

# Append
samples.append(sample)
Expand Down
3 changes: 2 additions & 1 deletion echotorch/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@
from .NARMADataset import NARMADataset
from .RosslerAttractor import RosslerAttractor
from .SinusoidalTimeseries import SinusoidalTimeseries
from .PeriodicSignalDataset import PeriodicSignalDataset

__all__ = [
'DatasetComposer', 'HenonAttractor', 'LogisticMapDataset', 'LorenzAttractor', 'MackeyGlassDataset',
'MemTestDataset', 'NARMADataset', 'RosslerAttractor', 'SinusoidalTimeseries'
'MemTestDataset', 'NARMADataset', 'RosslerAttractor', 'SinusoidalTimeseries', 'PeriodicSignalDataset'
]

0 comments on commit 5533459

Please sign in to comment.