Skip to content

Commit

Permalink
Fix/ensemble historical forecasts (#1616)
Browse files Browse the repository at this point in the history
* add correct extreme_lags override and test

* add required extreme_lags override

* delete logging print

* change lag priorities

* add a test + use switch to tuple

* fix extreme lags from other PR

* make RegressionEnsembleModel work

* small unit test fix

---------

Co-authored-by: madtoinou <32447896+madtoinou@users.noreply.github.com>
Co-authored-by: Dennis Bader <dennis.bader@gmx.ch>
  • Loading branch information
3 people committed Apr 10, 2023
1 parent ebb9eb6 commit a480b5e
Show file tree
Hide file tree
Showing 10 changed files with 266 additions and 20 deletions.
4 changes: 0 additions & 4 deletions darts/models/forecasting/baselines.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,6 @@ def predict(self, n: int, num_samples: int = 1, verbose: bool = False):
forecast = np.array([self.last_k_vals[i % self.K, :] for i in range(n)])
return self._build_forecast_series(forecast)

@property
def extreme_lags(self):
return -self.K, 0, None, None, None, None


class NaiveDrift(LocalForecastingModel):
def __init__(self):
Expand Down
28 changes: 27 additions & 1 deletion darts/models/forecasting/ensemble_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from abc import abstractmethod
from functools import reduce
from typing import List, Optional, Sequence, Union
from typing import List, Optional, Sequence, Tuple, Union

from darts.logging import get_logger, raise_if, raise_if_not
from darts.models.forecasting.forecasting_model import (
Expand Down Expand Up @@ -201,5 +201,31 @@ def min_train_series_length(self) -> int:
def min_train_samples(self) -> int:
return max(model.min_train_samples for model in self.models)

@property
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
def find_max_lag_or_none(lag_id, aggregator) -> Optional[int]:
max_lag = None
for model in self.models:
curr_lag = model.extreme_lags[lag_id]
if max_lag is None:
max_lag = curr_lag
elif curr_lag is not None:
max_lag = aggregator(max_lag, curr_lag)
return max_lag

lag_aggregators = (min, max, min, max, min, max)
return tuple(
find_max_lag_or_none(i, agg) for i, agg in enumerate(lag_aggregators)
)

def _is_probabilistic(self) -> bool:
return all([model._is_probabilistic() for model in self.models])
42 changes: 36 additions & 6 deletions darts/models/forecasting/forecasting_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ def min_train_samples(self) -> int:
return 1

@property
@abstractmethod
def extreme_lags(
self,
) -> Tuple[
Expand Down Expand Up @@ -335,8 +336,7 @@ def extreme_lags(
>>> model.extreme_lags
(-10, 6, None, None, 4, 6)
"""

return -1, 0, None, None, None, None
pass

@property
def _training_sample_time_index_length(self) -> int:
Expand Down Expand Up @@ -1914,6 +1914,23 @@ def fit(self, series: TimeSeries) -> "LocalForecastingModel":
super().fit(series)
series._assert_deterministic()

@property
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
# TODO: LocalForecastingModels do not yet handle extreme lags properly. Especially
# TransferableFutureCovariatesLocalForecastingModel, where there is a difference between fit and predict mode)
# do not yet. In general, Local models train on the entire series (input=output), different to Global models
# that use an input to predict an output.
return -self.min_train_series_length, -1, None, None, None, None


class GlobalForecastingModel(ForecastingModel, ABC):
"""The base class for "global" forecasting models, handling several time series and optional covariates.
Expand Down Expand Up @@ -2315,6 +2332,23 @@ def _supress_generate_predict_encoding(self) -> bool:
"""Controls wether encodings should be generated in :func:`FutureCovariatesLocalForecastingModel.predict()``"""
return False

@property
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
# TODO: LocalForecastingModels do not yet handle extreme lags properly. Especially
# TransferableFutureCovariatesLocalForecastingModel, where there is a difference between fit and predict mode)
# do not yet. In general, Local models train on the entire series (input=output), different to Global models
# that use an input to predict an output.
return -self.min_train_series_length, -1, None, None, 0, 0


class TransferableFutureCovariatesLocalForecastingModel(
FutureCovariatesLocalForecastingModel, ABC
Expand Down Expand Up @@ -2492,7 +2526,3 @@ def _supports_non_retrainable_historical_forecasts(self) -> bool:
@property
def _supress_generate_predict_encoding(self) -> bool:
return True

@property
def extreme_lags(self):
return -1, 0, None, None, 0, 0
14 changes: 14 additions & 0 deletions darts/models/forecasting/regression_ensemble_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,3 +170,17 @@ def ensemble(
for serie, prediction in zip(series, predictions)
]
return seq2series(ensembled) if is_single_series else ensembled

@property
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
extreme_lags_ = super().extreme_lags
return extreme_lags_[0] - self.train_n_points, *extreme_lags_[1:]
12 changes: 10 additions & 2 deletions darts/models/forecasting/regression_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,16 @@ def _model_encoder_settings(
)

@property
def extreme_lags(self):
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
min_target_lag = self.lags.get("target")[0] if "target" in self.lags else None
max_target_lag = self.output_chunk_length - 1
min_past_cov_lag = self.lags.get("past")[0] if "past" in self.lags else None
Expand All @@ -285,7 +294,6 @@ def extreme_lags(self):
max_future_cov_lag = (
self.lags.get("future")[-1] if "future" in self.lags else None
)

return (
min_target_lag,
max_target_lag,
Expand Down
15 changes: 14 additions & 1 deletion darts/models/forecasting/theta.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""

import math
from typing import List, Optional
from typing import List, Optional, Tuple

import numpy as np
import statsmodels.tsa.holtwinters as hw
Expand Down Expand Up @@ -179,6 +179,19 @@ def min_train_series_length(self) -> int:
else:
return 3

@property
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
return -self.min_train_series_length, 0, None, None, None, None


class FourTheta(LocalForecastingModel):
def __init__(
Expand Down
55 changes: 50 additions & 5 deletions darts/models/forecasting/torch_forecasting_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2097,7 +2097,16 @@ def _model_encoder_settings(
)

@property
def extreme_lags(self):
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
return (
-self.input_chunk_length,
self.output_chunk_length - 1,
Expand Down Expand Up @@ -2186,7 +2195,16 @@ def _model_encoder_settings(
)

@property
def extreme_lags(self):
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
return (
-self.input_chunk_length,
self.output_chunk_length - 1,
Expand Down Expand Up @@ -2266,7 +2284,16 @@ def _model_encoder_settings(
)

@property
def extreme_lags(self):
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
return (
-self.input_chunk_length,
self.output_chunk_length - 1,
Expand Down Expand Up @@ -2343,7 +2370,16 @@ def _model_encoder_settings(
)

@property
def extreme_lags(self):
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
return (
-self.input_chunk_length,
self.output_chunk_length - 1,
Expand Down Expand Up @@ -2421,7 +2457,16 @@ def _model_encoder_settings(
)

@property
def extreme_lags(self):
def extreme_lags(
self,
) -> Tuple[
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]:
return (
-self.input_chunk_length,
self.output_chunk_length - 1,
Expand Down
30 changes: 30 additions & 0 deletions darts/tests/models/forecasting/test_ensemble_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,30 @@ def test_untrained_models(self):
new_model = model_ens.untrained_model()
assert not new_model.models[0]._fit_called

def test_extreme_lag_inference(self):
ensemble = NaiveEnsembleModel([NaiveDrift()])
assert ensemble.extreme_lags == (
-3,
-1,
None,
None,
None,
None,
) # test if default is okay

model1 = LinearRegressionModel(
lags=3, lags_past_covariates=[-3, -5], lags_future_covariates=[7, 8]
)
model2 = LinearRegressionModel(
lags=5, lags_past_covariates=6, lags_future_covariates=[6, 9]
)

ensemble = NaiveEnsembleModel(
[model1, model2]
) # test if infers extreme lags is okay
expected = (-5, 0, -6, -1, 6, 9)
assert expected == ensemble.extreme_lags

def test_input_models_local_models(self):
with self.assertRaises(ValueError):
NaiveEnsembleModel([])
Expand All @@ -78,6 +102,12 @@ def test_call_predict_local_models(self):
pred1 = naive_ensemble.predict(5)
assert self.series1.components == pred1.components

def test_call_backtest_naive_ensemble_local_models(self):
ensemble = NaiveEnsembleModel([NaiveSeasonal(5), Theta(2, 5)])
ensemble.fit(self.series1)
assert ensemble.extreme_lags == (-10, 0, None, None, None, None)
ensemble.backtest(self.series1)

def test_predict_ensemble_local_models(self):
naive = NaiveSeasonal(K=5)
theta = Theta()
Expand Down

0 comments on commit a480b5e

Please sign in to comment.