Skip to content

Commit

Permalink
Fix merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
pranavvp16 committed Apr 8, 2024
2 parents 34ac498 + da47457 commit b33e822
Show file tree
Hide file tree
Showing 3 changed files with 143 additions and 19 deletions.
85 changes: 66 additions & 19 deletions sktime/forecasting/base/adapters/_neuralforecast.py
Expand Up @@ -2,25 +2,27 @@
"""Implements adapter for NeuralForecast models."""
import abc
import functools
import typing
from inspect import signature
from typing import List, Literal, Optional, Union

import numpy as np
import pandas

from sktime.forecasting.base import BaseForecaster, ForecastingHorizon
from sktime.utils.warnings import warn

__all__ = ["_NeuralForecastAdapter"]
__author__ = ["yarnabrina", "pranavvp16"]
__author__ = ["yarnabrina", "geetu040", "pranavvp16"]


class _NeuralForecastAdapter(BaseForecaster):
"""Base adapter class for NeuralForecast models.
Parameters
----------
freq : str (default="auto")
freq : Union[str, int] (default="auto")
frequency of the data, see available frequencies [1]_ from ``pandas``
use int freq when using RangeIndex in ``y``
default ("auto") interprets freq from ForecastingHorizon in ``fit``
local_scaler_type : str (default=None)
Expand Down Expand Up @@ -70,11 +72,11 @@ class _NeuralForecastAdapter(BaseForecaster):

def __init__(
self: "_NeuralForecastAdapter",
freq: str = "auto",
local_scaler_type: typing.Optional[
typing.Literal["standard", "robust", "robust-iqr", "minmax", "boxcox"]
freq: Union[str, int] = "auto",
local_scaler_type: Optional[
Literal["standard", "robust", "robust-iqr", "minmax", "boxcox"]
] = None,
futr_exog_list: typing.Optional[typing.List[str]] = None,
futr_exog_list: Optional[List[str]] = None,
verbose_fit: bool = False,
verbose_predict: bool = False,
) -> None:
Expand Down Expand Up @@ -191,7 +193,7 @@ def _instantiate_model(self: "_NeuralForecastAdapter", fh: ForecastingHorizon):
def _fit(
self: "_NeuralForecastAdapter",
y: pandas.Series,
X: typing.Optional[pandas.DataFrame],
X: Optional[pandas.DataFrame],
fh: ForecastingHorizon,
) -> "_NeuralForecastAdapter":
"""Fit forecaster to training data.
Expand Down Expand Up @@ -224,15 +226,60 @@ def _fit(
if not fh.is_all_out_of_sample(cutoff=self.cutoff):
raise NotImplementedError("in-sample prediction is currently not supported")

if self.freq == "auto" and fh.freq is None:
# when freq cannot be interpreted from ForecastingHorizon
raise ValueError(
f"Error in {self.__class__.__name__}, "
f"could not interpret freq, "
f"try passing freq in model initialization"
)

self._freq = fh.freq if self.freq == "auto" else self.freq
# A. freq is given {use this}
# B. freq is auto
# B1. freq is infered from fh {use this}
# B2. freq is not infered from fh
# B2.1. y is date-like {raise exception}
# B2.2. y is not date-like
# B2.2.1 equispaced integers {use diff in time}
# B2.2.2 non-equispaced integers {raise exception}

# behavior of different indexes when freq="auto"
# | Indexes | behavior |
# | ----------------------- | --------- |
# | PeriodIndex | B1 |
# | PeriodIndex (Missing) | B1 |
# | DatetimeIndex | B1 |
# | DatetimeIndex (Missing) | B2.1 |
# | RangeIndex | B2.2.1 |
# | RangeIndex (Missing) | B2.2.2 |
# | Index | B2.2.1 |
# | Index (Missing) | B2.2.2 |
# | Other | unreached |

if self.freq != "auto":
# A
self._freq = self.freq
else:
# B
if fh.freq:
# B1
self._freq = fh.freq
else:
# B2
if isinstance(y.index, pandas.DatetimeIndex):
# B2.1
raise ValueError(
f"Error in {self.__class__.__name__}, "
"could not interpret freq, "
"try passing freq in model initialization "
"or use a valid offset in index"
)
else:
# B2.2
diffs = np.unique(np.diff(y.index))
if diffs.shape[0] > 1:
# B2.2.1
raise ValueError(
f"Error in {self.__class__.__name__}, "
"could not interpret freq, "
"try passing integer freq in model initialization "
"or use a valid integer offset in index"
)
else:
# B2.2.2
self._freq = int(diffs[-1]) # converts numpy.int64 to int

train_indices = y.index
if isinstance(train_indices, pandas.PeriodIndex):
Expand Down Expand Up @@ -262,8 +309,8 @@ def _fit(

def _predict(
self: "_NeuralForecastAdapter",
fh: typing.Optional[ForecastingHorizon],
X: typing.Optional[pandas.DataFrame],
fh: Optional[ForecastingHorizon],
X: Optional[pandas.DataFrame],
) -> pandas.Series:
"""Forecast time series at future horizon.
Expand Down
20 changes: 20 additions & 0 deletions sktime/forecasting/neuralforecast.py
Expand Up @@ -96,6 +96,10 @@ class NeuralForecastRNN(_NeuralForecastAdapter):
whether ``TimeSeriesDataLoader`` drops last non-full batch
trainer_kwargs : dict (default=None)
keyword trainer arguments inherited from PyTorch Lighning's trainer [6]_
optimizer : pytorch optimizer (default=None)
optimizer to use for training, if passed with None defaults to Adam
optimizer_kwargs : dict (default=None)
dict of parameters to pass to the user defined optimizer
Notes
-----
Expand Down Expand Up @@ -194,6 +198,8 @@ def __init__(
num_workers_loader=0,
drop_last_loader=False,
trainer_kwargs: Optional[dict] = None,
optimizer=None,
optimizer_kwargs: dict = None,
):
self.input_size = input_size
self.inference_input_size = inference_input_size
Expand All @@ -218,6 +224,8 @@ def __init__(
self.random_seed = random_seed
self.num_workers_loader = num_workers_loader
self.drop_last_loader = drop_last_loader
self.optimizer = optimizer
self.optimizer_kwargs = optimizer_kwargs
self.trainer_kwargs = trainer_kwargs

super().__init__(
Expand Down Expand Up @@ -297,6 +305,8 @@ def algorithm_parameters(self: "NeuralForecastRNN") -> dict:
"random_seed": self.random_seed,
"num_workers_loader": self.num_workers_loader,
"drop_last_loader": self.drop_last_loader,
"optimizer": self.optimizer,
"optimizer_kwargs": self.optimizer_kwargs,
**self._trainer_kwargs,
}

Expand Down Expand Up @@ -456,6 +466,10 @@ class NeuralForecastLSTM(_NeuralForecastAdapter):
whether `TimeSeriesDataLoader` drops last non-full batch
trainer_kwargs : dict (default=None)
keyword trainer arguments inherited from PyTorch Lighning's trainer [6]_
optimizer : pytorch-optimizer (default=None)
optimizer to use for training, if passed with None defaults to Adam
optimizer_kwargs : dict (default=None)
dict of parameters to pass to the user defined optimizer
Notes
-----
Expand Down Expand Up @@ -549,6 +563,8 @@ def __init__(
num_workers_loader=0,
drop_last_loader=False,
trainer_kwargs: Optional[dict] = None,
optimizer=None,
optimizer_kwargs: dict = None,
):
self.input_size = input_size
self.inference_input_size = inference_input_size
Expand All @@ -572,6 +588,8 @@ def __init__(
self.random_seed = random_seed
self.num_workers_loader = num_workers_loader
self.drop_last_loader = drop_last_loader
self.optimizer = optimizer
self.optimizer_kwargs = optimizer_kwargs
self.trainer_kwargs = trainer_kwargs

super().__init__(
Expand Down Expand Up @@ -649,6 +667,8 @@ def algorithm_parameters(self: "NeuralForecastLSTM") -> dict:
"random_seed": self.random_seed,
"num_workers_loader": self.num_workers_loader,
"drop_last_loader": self.drop_last_loader,
"optimizer": self.optimizer,
"optimizer_kwargs": self.optimizer_kwargs,
**self._trainer_kwargs,
}

Expand Down
57 changes: 57 additions & 0 deletions sktime/forecasting/tests/test_neuralforecast.py
Expand Up @@ -322,3 +322,60 @@ def test_neural_forecast_with_auto_freq_on_missing_date_like(
ValueError, match="(could not interpret freq).*(use a valid offset in index)"
):
model.fit(y, fh=[1, 2, 3])


@pytest.mark.parametrize("model_class", [NeuralForecastLSTM, NeuralForecastRNN])
@pytest.mark.skipif(
not run_test_for_class([NeuralForecastLSTM, NeuralForecastRNN]),
reason="run test only if softdeps are present and incrementally (if requested)",
)
def test_neural_forecast_with_non_default_optimizer(model_class) -> None:
"""Test with user defined optimizer."""
# import non-default pytorch optimizer
from torch.optim import Adam

# define model
model = model_class(
freq="A-DEC",
max_steps=5,
optimizer=Adam,
trainer_kwargs={"logger": False},
)

# train model
model.fit(X_train, fh=[1, 2, 3, 4])

# predict with trained model
X_pred = model.predict()

# check prediction index
pandas.testing.assert_index_equal(X_pred.index, X_test.index, check_names=False)


@pytest.mark.parametrize("model_class", [NeuralForecastLSTM, NeuralForecastRNN])
@pytest.mark.skipif(
not run_test_for_class([NeuralForecastLSTM, NeuralForecastRNN]),
reason="run test only if softdeps are present and incrementally (if requested)",
)
def test_neural_forecast_with_non_default_optimizer_with_kwargs(model_class) -> None:
"""Test with user defined optimizer and optimizer_kwargs."""
# import non-default pytorch optimizer
from torch.optim import Adagrad

# define model
model = model_class(
freq="A-DEC",
optimizer=Adagrad,
optimizer_kwargs={"lr": 0.1},
max_steps=5,
trainer_kwargs={"logger": False},
)

# train model
model.fit(X_train, fh=[1, 2, 3, 4])

# predict with trained model
X_pred = model.predict()

# check prediction index
pandas.testing.assert_index_equal(X_pred.index, X_test.index, check_names=False)

0 comments on commit b33e822

Please sign in to comment.