Skip to content

Remove deprecation warnings #1118

Merged
merged 7 commits into from
Feb 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions etna/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
from etna.analysis.eda_utils import distribution_plot
from etna.analysis.eda_utils import prediction_actual_scatter_plot
from etna.analysis.eda_utils import qq_plot
from etna.analysis.eda_utils import sample_acf_plot
from etna.analysis.eda_utils import sample_pacf_plot
from etna.analysis.eda_utils import seasonal_plot
from etna.analysis.eda_utils import stl_plot
from etna.analysis.feature_relevance.relevance import ModelRelevanceTable
Expand Down
68 changes: 0 additions & 68 deletions etna/analysis/eda_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,74 +241,6 @@ def acf_plot(
plt.show()


def sample_acf_plot(
ts: "TSDataset",
n_segments: int = 10,
lags: int = 21,
segments: Optional[List[str]] = None,
figsize: Tuple[int, int] = (10, 5),
):
"""
Autocorrelation plot for multiple timeseries.

Notes
-----
`Definition of autocorrelation <https://en.wikipedia.org/wiki/Autocorrelation>`_.

Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
lags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
figsize:
size of the figure per subplot with one segment in inches
"""
acf_plot(ts=ts, n_segments=n_segments, lags=lags, segments=segments, figsize=figsize, partial=False)
warnings.warn(
"DeprecationWarning: This function is deprecated and will be removed in etna=2.0; Please use acf_plot instead.",
DeprecationWarning,
)


def sample_pacf_plot(
ts: "TSDataset",
n_segments: int = 10,
lags: int = 21,
segments: Optional[List[str]] = None,
figsize: Tuple[int, int] = (10, 5),
):
"""
Partial autocorrelation plot for multiple timeseries.

Notes
-----
`Definition of partial autocorrelation <https://en.wikipedia.org/wiki/Partial_autocorrelation_function>`_.

Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
lags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
figsize:
size of the figure per subplot with one segment in inches
"""
acf_plot(ts=ts, n_segments=n_segments, lags=lags, segments=segments, figsize=figsize, partial=True)
warnings.warn(
"DeprecationWarning: This function is deprecated and will be removed in etna=2.0; Please use acf_plot instead.",
DeprecationWarning,
)


def distribution_plot(
ts: "TSDataset",
n_segments: int = 10,
Expand Down
2 changes: 0 additions & 2 deletions etna/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
from etna.models.base import PredictionIntervalContextIgnorantAbstractModel
from etna.models.base import PredictionIntervalContextRequiredAbstractModel
from etna.models.base import PredictionIntervalModelType
from etna.models.catboost import CatBoostModelMultiSegment
from etna.models.catboost import CatBoostModelPerSegment
from etna.models.catboost import CatBoostMultiSegmentModel
from etna.models.catboost import CatBoostPerSegmentModel
from etna.models.deadline_ma import DeadlineMovingAverageModel
Expand Down
260 changes: 0 additions & 260 deletions etna/models/catboost.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import pandas as pd
from catboost import CatBoostRegressor
from catboost import Pool
from deprecated import deprecated

from etna.models.base import BaseAdapter
from etna.models.base import NonPredictionIntervalContextIgnorantAbstractModel
Expand Down Expand Up @@ -360,262 +359,3 @@ def __init__(
**kwargs,
)
)


@deprecated(
reason="CatBoostModelPerSegment is deprecated; will be deleted in etna==2.0. Use CatBoostPerSegmentModel instead."
)
class CatBoostModelPerSegment(CatBoostPerSegmentModel):
"""Class for holding per segment Catboost model.

Warnings
--------
CatBoostModelPerSegment is deprecated; will be deleted in etna==2.0.
Use etna.models.CatBoostPerSegmentModel instead.

Examples
--------
>>> from etna.datasets import generate_periodic_df
>>> from etna.datasets import TSDataset
>>> from etna.models import CatBoostModelPerSegment
>>> from etna.transforms import LagTransform
>>> classic_df = generate_periodic_df(
... periods=100,
... start_time="2020-01-01",
... n_segments=4,
... period=7,
... sigma=3
... )
>>> df = TSDataset.to_dataset(df=classic_df)
>>> ts = TSDataset(df, freq="D")
>>> horizon = 7
>>> transforms = [
... LagTransform(in_column="target", lags=[horizon, horizon+1, horizon+2])
... ]
>>> ts.fit_transform(transforms=transforms)
>>> future = ts.make_future(horizon, transforms=transforms)
>>> model = CatBoostModelPerSegment()
>>> model.fit(ts=ts)
CatBoostModelPerSegment(iterations = None, depth = None, learning_rate = None,
logging_level = 'Silent', l2_leaf_reg = None, thread_count = None, )
>>> forecast = model.forecast(future)
>>> forecast.inverse_transform(transforms)
>>> pd.options.display.float_format = '{:,.2f}'.format
>>> forecast[:, :, "target"]
segment segment_0 segment_1 segment_2 segment_3
feature target target target target
timestamp
2020-04-10 9.00 9.00 4.00 6.00
2020-04-11 5.00 2.00 7.00 9.00
2020-04-12 0.00 4.00 7.00 9.00
2020-04-13 0.00 5.00 9.00 7.00
2020-04-14 1.00 2.00 1.00 6.00
2020-04-15 5.00 7.00 4.00 7.00
2020-04-16 8.00 6.00 2.00 0.00
"""

def __init__(
self,
iterations: Optional[int] = None,
depth: Optional[int] = None,
learning_rate: Optional[float] = None,
logging_level: Optional[str] = "Silent",
l2_leaf_reg: Optional[float] = None,
thread_count: Optional[int] = None,
**kwargs,
):
"""Create instance of CatBoostModelPerSegment with given parameters.

Parameters
----------
iterations:
The maximum number of trees that can be built when solving
machine learning problems. When using other parameters that
limit the number of iterations, the final number of trees
may be less than the number specified in this parameter.
depth:
Depth of the tree. The range of supported values depends
on the processing unit type and the type of the selected loss function:

* CPU — Any integer up to 16.

* GPU — Any integer up to 8 pairwise modes (YetiRank, PairLogitPairwise and
QueryCrossEntropy) and up to 16 for all other loss functions.
learning_rate:
The learning rate. Used for reducing the gradient step.
If None the value is defined automatically depending on the number of iterations.
logging_level:
The logging level to output to stdout.
Possible values:

* Silent — Do not output any logging information to stdout.

* Verbose — Output the following data to stdout:

* optimized metric

* elapsed time of training

* remaining time of training

* Info — Output additional information and the number of trees.

* Debug — Output debugging information.

l2_leaf_reg:
Coefficient at the L2 regularization term of the cost function.
Any positive value is allowed.
thread_count:
The number of threads to use during the training.

* For CPU. Optimizes the speed of execution. This parameter doesn't affect results.
* For GPU. The given value is used for reading the data from the hard drive and does
not affect the training.
During the training one main thread and one thread for each GPU are used.
"""
self.iterations = iterations
self.depth = depth
self.learning_rate = learning_rate
self.logging_level = logging_level
self.l2_leaf_reg = l2_leaf_reg
self.thread_count = thread_count
self.kwargs = kwargs
super().__init__(
iterations=iterations,
depth=depth,
learning_rate=learning_rate,
logging_level=logging_level,
thread_count=thread_count,
l2_leaf_reg=l2_leaf_reg,
**kwargs,
)


@deprecated(
reason="CatBoostModelMultiSegment is deprecated; will be deleted in etna==2.0. "
"Use CatBoostMultiSegmentModel instead."
)
class CatBoostModelMultiSegment(CatBoostMultiSegmentModel):
"""Class for holding Catboost model for all segments.

Warnings
--------
CatBoostModelMultiSegment is deprecated; will be deleted in etna==2.0.
Use etna.models.CatBoostMultiSegmentModel instead.

Examples
--------
>>> from etna.datasets import generate_periodic_df
>>> from etna.datasets import TSDataset
>>> from etna.models import CatBoostModelMultiSegment
>>> from etna.transforms import LagTransform
>>> classic_df = generate_periodic_df(
... periods=100,
... start_time="2020-01-01",
... n_segments=4,
... period=7,
... sigma=3
... )
>>> df = TSDataset.to_dataset(df=classic_df)
>>> ts = TSDataset(df, freq="D")
>>> horizon = 7
>>> transforms = [
... LagTransform(in_column="target", lags=[horizon, horizon+1, horizon+2])
... ]
>>> ts.fit_transform(transforms=transforms)
>>> future = ts.make_future(horizon, transforms=transforms)
>>> model = CatBoostModelMultiSegment()
>>> model.fit(ts=ts)
CatBoostModelMultiSegment(iterations = None, depth = None, learning_rate = None,
logging_level = 'Silent', l2_leaf_reg = None, thread_count = None, )
>>> forecast = model.forecast(future)
>>> forecast.inverse_transform(transforms)
>>> pd.options.display.float_format = '{:,.2f}'.format
>>> forecast[:, :, "target"].round()
segment segment_0 segment_1 segment_2 segment_3
feature target target target target
timestamp
2020-04-10 9.00 9.00 4.00 6.00
2020-04-11 5.00 2.00 7.00 9.00
2020-04-12 -0.00 4.00 7.00 9.00
2020-04-13 0.00 5.00 9.00 7.00
2020-04-14 1.00 2.00 1.00 6.00
2020-04-15 5.00 7.00 4.00 7.00
2020-04-16 8.00 6.00 2.00 0.00
"""

def __init__(
self,
iterations: Optional[int] = None,
depth: Optional[int] = None,
learning_rate: Optional[float] = None,
logging_level: Optional[str] = "Silent",
l2_leaf_reg: Optional[float] = None,
thread_count: Optional[int] = None,
**kwargs,
):
"""Create instance of CatBoostModelMultiSegment with given parameters.

Parameters
----------
iterations:
The maximum number of trees that can be built when solving
machine learning problems. When using other parameters that
limit the number of iterations, the final number of trees
may be less than the number specified in this parameter.
depth:
Depth of the tree. The range of supported values depends
on the processing unit type and the type of the selected loss function:

* CPU — Any integer up to 16.

* GPU — Any integer up to 8 pairwise modes (YetiRank, PairLogitPairwise and
QueryCrossEntropy) and up to 16 for all other loss functions.
learning_rate:
The learning rate. Used for reducing the gradient step.
If None the value is defined automatically depending on the number of iterations.
logging_level:
The logging level to output to stdout.
Possible values:

* Silent — Do not output any logging information to stdout.

* Verbose — Output the following data to stdout:

* optimized metric

* elapsed time of training

* remaining time of training

* Info — Output additional information and the number of trees.

* Debug — Output debugging information.

l2_leaf_reg:
Coefficient at the L2 regularization term of the cost function.
Any positive value is allowed.
thread_count:
The number of threads to use during the training.

* For CPU. Optimizes the speed of execution. This parameter doesn't affect results.
* For GPU. The given value is used for reading the data from the hard drive and does
not affect the training.
During the training one main thread and one thread for each GPU are used.
"""
self.iterations = iterations
self.depth = depth
self.learning_rate = learning_rate
self.logging_level = logging_level
self.l2_leaf_reg = l2_leaf_reg
self.thread_count = thread_count
self.kwargs = kwargs
super().__init__(
iterations=iterations,
depth=depth,
learning_rate=learning_rate,
logging_level=logging_level,
thread_count=thread_count,
l2_leaf_reg=l2_leaf_reg,
**kwargs,
)
2 changes: 1 addition & 1 deletion etna/pipeline/autoregressive_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def _forecast(self) -> TSDataset:
self.model = cast(ContextIgnorantModelType, self.model)
current_ts_forecast = current_ts.make_future(future_steps=current_step, transforms=self.transforms)
current_ts_future = self.model.forecast(ts=current_ts_forecast)

current_ts_future.inverse_transform(self.transforms)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are we going to make this change?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, understood.

prediction_df = prediction_df.combine_first(current_ts_future.to_pandas()[prediction_df.columns])

# construct dataset and add all features
Expand Down
Loading