Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ENH] Add optimizer param for neuralforecast models #6235

Merged
merged 13 commits into from Apr 14, 2024
14 changes: 6 additions & 8 deletions sktime/forecasting/neuralforecast.py
Expand Up @@ -3,8 +3,6 @@
import functools
import typing

from torch import optim

from sktime.forecasting.base.adapters._neuralforecast import _NeuralForecastAdapter
from sktime.utils.validation._dependencies import _check_soft_dependencies

Expand Down Expand Up @@ -95,7 +93,7 @@ class NeuralForecastRNN(_NeuralForecastAdapter):
workers to be used by ``TimeSeriesDataLoader``
drop_last_loader : bool (default=False)
whether ``TimeSeriesDataLoader`` drops last non-full batch
optimizer : torch.optim.Optimizer (default=None)
optimizer : pytorch optimizer (default=None)
optimizer to use for training, if passed with None defaults to Adam
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved
optimizer_kwargs : dict (default=None)
dict of parameters to pass to the user defined optimizer
Expand Down Expand Up @@ -163,7 +161,7 @@ class NeuralForecastRNN(_NeuralForecastAdapter):
# inherited from _NeuralForecastAdapter
# estimator type
# --------------
"python_dependencies": ["neuralforecast>=1.6.4"],
"python_dependencies": ["neuralforecast>=1.7.0"],
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved
}

def __init__(
Expand Down Expand Up @@ -198,7 +196,7 @@ def __init__(
random_seed=1,
num_workers_loader=0,
drop_last_loader=False,
optimizer: optim.Optimizer = None,
optimizer=None,
optimizer_kwargs: dict = None,
trainer_kwargs: typing.Optional[dict] = None,
):
Expand Down Expand Up @@ -464,7 +462,7 @@ class NeuralForecastLSTM(_NeuralForecastAdapter):
workers to be used by `TimeSeriesDataLoader`
drop_last_loader : bool (default=False)
whether `TimeSeriesDataLoader` drops last non-full batch
optimizer : torch.optim.Optimizer (default=None)
optimizer : pytorch-optimizer (default=None)
optimizer to use for training, if passed with None defaults to Adam
optimizer_kwargs : dict (default=None)
dict of parameters to pass to the user defined optimizer
Expand Down Expand Up @@ -528,7 +526,7 @@ class NeuralForecastLSTM(_NeuralForecastAdapter):
# inherited from _NeuralForecastAdapter
# estimator type
# --------------
"python_dependencies": ["neuralforecast>=1.6.4"],
"python_dependencies": ["neuralforecast>=1.7.0"],
pranavvp16 marked this conversation as resolved.
Show resolved Hide resolved
}

def __init__(
Expand Down Expand Up @@ -562,7 +560,7 @@ def __init__(
random_seed=1,
num_workers_loader=0,
drop_last_loader=False,
optimizer: optim.Optimizer = None,
optimizer=None,
optimizer_kwargs: dict = None,
trainer_kwargs: typing.Optional[dict] = None,
):
Expand Down