Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ENH] Add optimizer param for neuralforecast models #6235

Merged
merged 13 commits into from Apr 14, 2024
72 changes: 70 additions & 2 deletions sktime/forecasting/base/adapters/_neuralforecast.py
Expand Up @@ -2,15 +2,18 @@
"""Implements adapter for NeuralForecast models."""
import abc
import functools
from copy import deepcopy
from inspect import signature
from typing import List, Literal, Optional, Union

import numpy as np
import pandas

from sktime.forecasting.base import BaseForecaster, ForecastingHorizon
from sktime.utils.warnings import warn

__all__ = ["_NeuralForecastAdapter"]
__author__ = ["yarnabrina", "geetu040"]
__author__ = ["yarnabrina", "geetu040", "pranavvp16"]


class _NeuralForecastAdapter(BaseForecaster):
Expand Down Expand Up @@ -134,16 +137,81 @@ def algorithm_parameters(self: "_NeuralForecastAdapter") -> dict:
- custom model name (``alias``) - used from ``algorithm_name``
"""

def _get_valid_parameters(self: "_NeuralForecastAdapter") -> dict:
"""Get valid parameters for the underlying NeuralForecast algorithm class.

Returns
-------
dict
valid list of arguments for the underlying algorithm class
"""
from pytorch_lightning import Trainer

# get valid init parameters from the model class and Trainer
model_class = self.algorithm_class
trainer_params = list(signature(Trainer.__init__).parameters.keys())
yarnabrina marked this conversation as resolved.
Show resolved Hide resolved
valid_parameters = list(signature(model_class.__init__).parameters.keys())
valid_parameters += trainer_params
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved

sktime_parameters = self.algorithm_parameters
default_parameters = self.get_param_defaults()

valid_parameters = set(valid_parameters)
unsupported_parameters = set(sktime_parameters.keys()) - valid_parameters

# iterate through neuralforecast parameters and check for non default values
user_parameters = [
key
for key in sktime_parameters
if sktime_parameters[key] != default_parameters[key]
]

# filter out unsupported parameters
sktime_trainer_params = set(self.algorithm_parameters["trainer_kwargs"])
invalid_trainer_params = sktime_trainer_params - set(trainer_params)

filter_params = deepcopy(self.algorithm_parameters)
for invalid_param in invalid_trainer_params:
warn(
f"Keyword argument '{invalid_param}' will be omitted as it is"
f" not found in the __init__ method "
f"from {Trainer}. "
f"Check your pytorch_lightning version "
f"to find out the right API parameters.",
obj=self,
stacklevel=2,
)
filter_params["trainer_kwargs"].pop(invalid_param)

for unsupported_param in unsupported_parameters:
if unsupported_param in user_parameters:
warn(
f"Keyword argument '{unsupported_param}' will be omitted as it is"
f" not found in the __init__ method "
f"from {self.algorithm_class}. "
f"Check your neuralforecast version "
f"to find out the right API parameters.",
obj=self,
stacklevel=2,
)
filter_params.pop(unsupported_param)

return filter_params

def _instantiate_model(self: "_NeuralForecastAdapter", fh: ForecastingHorizon):
"""Instantiate the model."""
exogenous_parameters = (
{"futr_exog_list": self.futr_exog_list} if self.needs_X else {}
)

# filter params according to neuralforecast version
params = self._get_valid_parameters()
trainer_kwargs = params.pop("trainer_kwargs")
algorithm_instance = self.algorithm_class(
fh,
alias=self.algorithm_name,
**self.algorithm_parameters,
**params,
**trainer_kwargs,
**exogenous_parameters,
)

Expand Down
38 changes: 35 additions & 3 deletions sktime/forecasting/neuralforecast.py
Expand Up @@ -6,7 +6,7 @@
from sktime.forecasting.base.adapters._neuralforecast import _NeuralForecastAdapter
from sktime.utils.validation._dependencies import _check_soft_dependencies

__author__ = ["yarnabrina", "geetu040"]
__author__ = ["yarnabrina", "geetu040", "pranavvp16"]


class NeuralForecastRNN(_NeuralForecastAdapter):
Expand Down Expand Up @@ -96,6 +96,10 @@ class NeuralForecastRNN(_NeuralForecastAdapter):
whether ``TimeSeriesDataLoader`` drops last non-full batch
trainer_kwargs : dict (default=None)
keyword trainer arguments inherited from PyTorch Lighning's trainer [6]_
optimizer : pytorch optimizer (default=None) [7]_
optimizer to use for training, if passed with None defaults to Adam
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved
optimizer_kwargs : dict (default=None) [8]_
dict of parameters to pass to the user defined optimizer

Notes
-----
Expand Down Expand Up @@ -147,6 +151,8 @@ class NeuralForecastRNN(_NeuralForecastAdapter):
.. [5] https://nixtlaverse.nixtla.io/neuralforecast/losses.pytorch.html
.. [6]
https://lightning.ai/docs/pytorch/stable/api/pytorch_lightning.trainer.trainer.Trainer.html#lightning.pytorch.trainer.trainer.Trainer
.. [7] https://pytorch.org/docs/stable/optim.html
.. [8] https://pytorch.org/docs/stable/generated/torch.optim.Adam.html#torch.optim.Adam
""" # noqa: E501

_tags = {
Expand Down Expand Up @@ -194,6 +200,8 @@ def __init__(
num_workers_loader=0,
drop_last_loader=False,
trainer_kwargs: Optional[dict] = None,
optimizer=None,
optimizer_kwargs: dict = None,
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved
):
self.input_size = input_size
self.inference_input_size = inference_input_size
Expand All @@ -218,6 +226,8 @@ def __init__(
self.random_seed = random_seed
self.num_workers_loader = num_workers_loader
self.drop_last_loader = drop_last_loader
self.optimizer = optimizer
self.optimizer_kwargs = optimizer_kwargs
self.trainer_kwargs = trainer_kwargs

super().__init__(
Expand Down Expand Up @@ -297,7 +307,9 @@ def algorithm_parameters(self: "NeuralForecastRNN") -> dict:
"random_seed": self.random_seed,
"num_workers_loader": self.num_workers_loader,
"drop_last_loader": self.drop_last_loader,
**self._trainer_kwargs,
"optimizer": self.optimizer,
"optimizer_kwargs": self.optimizer_kwargs,
"trainer_kwargs": self._trainer_kwargs,
}

@classmethod
Expand All @@ -324,6 +336,7 @@ def get_test_params(cls, parameter_set="default"):

try:
_check_soft_dependencies("neuralforecast", severity="error")
_check_soft_dependencies("torch", severity="error")
except ModuleNotFoundError:
params = [
{
Expand All @@ -346,6 +359,7 @@ def get_test_params(cls, parameter_set="default"):
]
else:
from neuralforecast.losses.pytorch import SMAPE, QuantileLoss
from torch.optim import Adam

params = [
{
Expand All @@ -366,6 +380,8 @@ def get_test_params(cls, parameter_set="default"):
"max_steps": 4,
"val_check_steps": 2,
"trainer_kwargs": {"logger": False},
"optimizer": Adam,
"optimizer_kwargs": {"lr": 0.001},
},
]

Expand Down Expand Up @@ -456,6 +472,10 @@ class NeuralForecastLSTM(_NeuralForecastAdapter):
whether `TimeSeriesDataLoader` drops last non-full batch
trainer_kwargs : dict (default=None)
keyword trainer arguments inherited from PyTorch Lighning's trainer [6]_
optimizer : pytorch optimizer (default=None) [7]_
optimizer to use for training, if passed with None defaults to Adam
optimizer_kwargs : dict (default=None) [8]_
dict of parameters to pass to the user defined optimizer

Notes
-----
Expand Down Expand Up @@ -503,6 +523,8 @@ class NeuralForecastLSTM(_NeuralForecastAdapter):
.. [4] https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
.. [5] https://nixtlaverse.nixtla.io/neuralforecast/losses.pytorch.html
.. [6] https://lightning.ai/docs/pytorch/stable/api/pytorch_lightning.trainer.trainer.Trainer.html#lightning.pytorch.trainer.trainer.Trainer
.. [7] https://pytorch.org/docs/stable/optim.html
.. [8] https://pytorch.org/docs/stable/generated/torch.optim.Adam.html#torch.optim.Adam
""" # noqa: E501

_tags = {
Expand Down Expand Up @@ -549,6 +571,8 @@ def __init__(
num_workers_loader=0,
drop_last_loader=False,
trainer_kwargs: Optional[dict] = None,
optimizer=None,
optimizer_kwargs: dict = None,
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved
):
self.input_size = input_size
self.inference_input_size = inference_input_size
Expand All @@ -572,6 +596,8 @@ def __init__(
self.random_seed = random_seed
self.num_workers_loader = num_workers_loader
self.drop_last_loader = drop_last_loader
self.optimizer = optimizer
self.optimizer_kwargs = optimizer_kwargs
self.trainer_kwargs = trainer_kwargs

super().__init__(
Expand Down Expand Up @@ -649,7 +675,9 @@ def algorithm_parameters(self: "NeuralForecastLSTM") -> dict:
"random_seed": self.random_seed,
"num_workers_loader": self.num_workers_loader,
"drop_last_loader": self.drop_last_loader,
**self._trainer_kwargs,
"optimizer": self.optimizer,
"optimizer_kwargs": self.optimizer_kwargs,
"trainer_kwargs": self._trainer_kwargs,
}

@classmethod
Expand All @@ -672,6 +700,7 @@ def get_test_params(cls, parameter_set="default"):

try:
_check_soft_dependencies("neuralforecast", severity="error")
_check_soft_dependencies("torch", severity="error")
except ModuleNotFoundError:
params = [
{
Expand All @@ -694,6 +723,7 @@ def get_test_params(cls, parameter_set="default"):
]
else:
from neuralforecast.losses.pytorch import SMAPE, QuantileLoss
from torch.optim import Adam

params = [
{
Expand All @@ -714,6 +744,8 @@ def get_test_params(cls, parameter_set="default"):
"max_steps": 4,
"val_check_steps": 2,
"trainer_kwargs": {"logger": False},
"optimizer": Adam,
"optimizer_kwargs": {"lr": 0.001},
},
]

Expand Down