Skip to content

Commit

Permalink
ensemble.SimpleEnsembleFTS
Browse files Browse the repository at this point in the history
  • Loading branch information
petroniocandido committed May 30, 2019
1 parent 9b2f286 commit ac71ee3
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 27 deletions.
28 changes: 28 additions & 0 deletions pyFTS/models/ensemble/ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from pyFTS.common import SortedCollection, fts, tree
from pyFTS.models import chen, cheng, hofts, hwang, ismailefendi, sadaei, song, yu
from pyFTS.probabilistic import ProbabilityDistribution
from pyFTS.partitioners import Grid
import scipy.stats as st
from itertools import product

Expand Down Expand Up @@ -288,6 +289,33 @@ def forecast_ahead_distribution(self, data, steps, **kwargs):
return ret


class SimpleEnsembleFTS(EnsembleFTS):
'''
An homogeneous FTS method ensemble with variations on partitionings and orders.
'''
def __init__(self, **kwargs):
super(SimpleEnsembleFTS, self).__init__(**kwargs)
self.method = kwargs.get('fts_method', hofts.WeightedHighOrderFTS)
"""FTS method class that will be used on internal models"""
self.partitioner_method = kwargs.get('partitioner_method', Grid.GridPartitioner)
"""UoD partitioner class that will be used on internal methods"""
self.partitions = kwargs.get('partitions', np.arange(15,35,10))
"""Possible variations of number of partitions on internal models"""
self.orders = kwargs.get('orders', [1,2,3])
"""Possible variations of order on internal models"""

def train(self, data, **kwargs):
for k in self.partitions:
fs = self.partitioner_method(data=data, npart=k)

for order in self.orders:
tmp = self.method(partitioner=fs, order=order)

tmp.fit(data)

self.append_model(tmp)


class AllMethodEnsembleFTS(EnsembleFTS):
"""
Creates an EnsembleFTS with all point forecast methods, sharing the same partitioner
Expand Down
44 changes: 17 additions & 27 deletions pyFTS/tests/ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,48 +19,38 @@
from pyFTS.data import TAIEX

data = TAIEX.get_data()
train = data[:800]
test = data[800:1000]
'''
model = ensemble.EnsembleFTS()
for k in [15, 25, 35]:
for order in [1, 2, 3]:
fs = Grid.GridPartitioner(data=train, npart=k)
tmp = hofts.WeightedHighOrderFTS(partitioner=fs, order=order)

tmp.fit(train)

model.append_model(tmp)
'''
#fig, ax = plt.subplots(nrows=1, ncols=1, figsize=[15, 5])

#ax.plot(data[:28], label='Original', color='black')
model1 = ensemble.SimpleEnsembleFTS()

from pyFTS.benchmarks import arima, quantreg, BSTS

#model = arima.ARIMA(order=(2,0,0))
#model = quantreg.QuantileRegression(order=1, dist=True)
model = BSTS.ARIMA(order=(2,0,0))
model.fit(train)
methods = [ensemble.SimpleEnsembleFTS, arima.ARIMA, quantreg.QuantileRegression, BSTS.ARIMA]
parameters = [{},{'order': (2,0,0)}, {'order': 1, 'dist': True}, {'order': (2,0,0)}]

horizon = 5

intervals = model.predict(test[:10], type='interval', alpha=.25, steps_ahead=horizon)
from pyFTS.benchmarks import Measures

distributions = model.predict(test[:10], type='distribution', smooth='histogram', steps_ahead=horizon, num_bins=100)
horizon = 5

#Util.plot_distribution2(forecasts, data[:28], start_at=20, order=3, ax=ax, cmap="Blues")
for ct, train, test, in Util.sliding_window(data,1000,0.8,.5):
print('data window {}'.format(ct))
for ct, method in enumerate(methods):
model = method(**parameters[ct])
model.fit(train)
start = model.order + 1
end = start + horizon
intervals = model.predict(test[:10], type='interval', alpha=.25, steps_ahead=horizon)
distributions = model.predict(test[:10], type='distribution', smooth='histogram', steps_ahead=horizon, num_bins=100)
print(model.name, Measures.get_interval_ahead_statistics(test[start:end], intervals))
print(model.name, Measures.get_distribution_ahead_statistics(test[start:end], distributions))

print('end')

from pyFTS.benchmarks import Measures

start = model.order+1
end = start + horizon

print(Measures.get_interval_ahead_statistics(test[start:end], intervals))
print(Measures.get_distribution_ahead_statistics(test[start:end], distributions))




Expand Down

0 comments on commit ac71ee3

Please sign in to comment.