Skip to content

Commit

Permalink
address review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
StrikerRUS committed Aug 31, 2021
1 parent 59cb379 commit 07046ec
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 13 deletions.
14 changes: 7 additions & 7 deletions python-package/lightgbm/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def train(
params["num_iterations"] = num_boost_round
# show deprecation warning only for early stop argument, setting early stop via global params should still be possible
if early_stopping_rounds is not None and early_stopping_rounds > 0:
_log_warning("'early_stopping_rounds' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'early_stopping_rounds' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'early_stopping()' callback via 'callbacks' argument instead.")
for alias in _ConfigAliases.get("early_stopping_round"):
if alias in params:
Expand Down Expand Up @@ -237,9 +237,9 @@ def train(

# Most of legacy advanced options becomes callbacks
if verbose_eval != "warn":
_log_warning("'verbose_eval' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'verbose_eval' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'print_evaluation()' callback via 'callbacks' argument instead.")
if verbose_eval == "warn":
else:
if callbacks: # assume user has already specified print_evaluation callback
verbose_eval = False
else:
Expand All @@ -253,12 +253,12 @@ def train(
callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=bool(verbose_eval)))

if learning_rates is not None:
_log_warning("'learning_rates' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'learning_rates' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'reset_parameter()' callback via 'callbacks' argument instead.")
callbacks.add(callback.reset_parameter(learning_rate=learning_rates))

if evals_result is not None:
_log_warning("'evals_result' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'evals_result' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'record_evaluation()' callback via 'callbacks' argument instead.")
callbacks.add(callback.record_evaluation(evals_result))

Expand Down Expand Up @@ -575,7 +575,7 @@ def cv(params, train_set, num_boost_round=100,
num_boost_round = params.pop(alias)
params["num_iterations"] = num_boost_round
if early_stopping_rounds is not None and early_stopping_rounds > 0:
_log_warning("'early_stopping_rounds' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'early_stopping_rounds' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'early_stopping()' callback via 'callbacks' argument instead.")
for alias in _ConfigAliases.get("early_stopping_round"):
if alias in params:
Expand Down Expand Up @@ -618,7 +618,7 @@ def cv(params, train_set, num_boost_round=100,
if early_stopping_rounds is not None and early_stopping_rounds > 0:
callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=False))
if verbose_eval is not None:
_log_warning("'verbose_eval' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'verbose_eval' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'print_evaluation()' callback via 'callbacks' argument instead.")
if verbose_eval is True:
callbacks.add(callback.print_evaluation(show_stdv=show_stdv))
Expand Down
16 changes: 11 additions & 5 deletions python-package/lightgbm/sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ def _get_meta_data(collection, name, i):
init_model = init_model.booster_

if early_stopping_rounds is not None and early_stopping_rounds > 0:
_log_warning("'early_stopping_rounds' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'early_stopping_rounds' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'early_stopping()' callback via 'callbacks' argument instead.")
params['early_stopping_rounds'] = early_stopping_rounds

Expand All @@ -721,9 +721,9 @@ def _get_meta_data(collection, name, i):
callbacks = copy.deepcopy(callbacks)

if verbose != 'warn':
_log_warning("'verbose' argument is deprecated and will be removed in 4.0.0 release. "
_log_warning("'verbose' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'print_evaluation()' callback via 'callbacks' argument instead.")
if verbose == 'warn':
else:
if callbacks: # assume user has already specified print_evaluation callback
verbose = False
else:
Expand All @@ -748,8 +748,14 @@ def _get_meta_data(collection, name, i):

if evals_result:
self._evals_result = evals_result
else: # reset after previous call to fit()
self._evals_result = None

if self._Booster.best_iteration != 0:
self._best_iteration = self._Booster.best_iteration
else: # reset after previous call to fit()
self._best_iteration = None

self._best_iteration = self._Booster.best_iteration
self._best_score = self._Booster.best_score

self.fitted_ = True
Expand Down Expand Up @@ -817,7 +823,7 @@ def best_score_(self):

@property
def best_iteration_(self):
""":obj:`int`: The best iteration of fitted model if ``early_stopping()`` callback has been specified."""
""":obj:`int` or :obj:`None`: The best iteration of fitted model if ``early_stopping()`` callback has been specified."""
if self._n_features is None:
raise LGBMNotFittedError('No best_iteration found. Need to call fit with early_stopping callback beforehand.')
return self._best_iteration
Expand Down
2 changes: 1 addition & 1 deletion tests/python_package_test/test_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -916,7 +916,7 @@ def test_eval_set_no_early_stopping(task, output, eval_sizes, eval_names_prefix,

# check that early stopping was not applied.
assert dask_model.booster_.num_trees() == model_trees
assert dask_model.best_iteration_ == 0
assert dask_model.best_iteration_ is None

# checks that evals_result_ and best_score_ contain expected data and eval_set names.
evals_result = dask_model.evals_result_
Expand Down

0 comments on commit 07046ec

Please sign in to comment.