Skip to content

Commit

Permalink
[python] pass params to _InnerPredictor in train and cv and verbose f…
Browse files Browse the repository at this point in the history
…ix (#1628)

* pass params to _InnerPredictor in train and cv

* fixed verbosity param description

* treat silent param as Fatal log level

* create Dataset in refit method silently

* do not overwrite verbose param by silent argument
  • Loading branch information
StrikerRUS committed Sep 6, 2018
1 parent 5442aa9 commit bd3889f
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion docs/Parameters.rst
Expand Up @@ -377,7 +377,7 @@ IO Parameters

- controls the level of LightGBM's verbosity

- ``< 0``: Fatal, ``= 0``: Error (Warn), ``> 0``: Info
- ``< 0``: Fatal, ``= 0``: Error (Warning), ``= 1``: Info, ``> 1``: Debug

- ``max_bin`` :raw-html:`<a id="max_bin" title="Permalink to this parameter" href="#max_bin">&#x1F517;&#xFE0E;</a>`, default = ``255``, type = int, constraints: ``max_bin > 1``

Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/config.h
Expand Up @@ -380,7 +380,7 @@ struct Config {

// alias = verbose
// desc = controls the level of LightGBM's verbosity
// desc = ``< 0``: Fatal, ``= 0``: Error (Warn), ``> 0``: Info
// desc = ``< 0``: Fatal, ``= 0``: Error (Warning), ``= 1``: Info, ``> 1``: Debug
int verbosity = 1;

// check = >1
Expand Down
12 changes: 7 additions & 5 deletions python-package/lightgbm/basic.py
Expand Up @@ -701,8 +701,9 @@ def _lazy_init(self, data, label=None, reference=None,
warnings.warn('{0} keyword has been found in `params` and will be ignored. '
'Please use {0} argument of the Dataset constructor to pass this parameter.'.format(key))
self.predictor = predictor
if silent:
params["verbose"] = 0
# user can set verbose with params, it has higher priority
if not any(verbose_alias in params for verbose_alias in ('verbose', 'verbosity')) and silent:
params["verbose"] = -1
# get categorical features
if categorical_feature is not None:
categorical_indices = set()
Expand Down Expand Up @@ -1431,8 +1432,9 @@ def __init__(self, params=None, train_set=None, model_file=None, silent=False):
self.best_iteration = -1
self.best_score = {}
params = {} if params is None else params
if silent:
params["verbose"] = 0
# user can set verbose with params, it has higher priority
if not any(verbose_alias in params for verbose_alias in ('verbose', 'verbosity')) and silent:
params["verbose"] = -1
if train_set is not None:
# Training task
if not isinstance(train_set, Dataset):
Expand Down Expand Up @@ -2118,7 +2120,7 @@ def refit(self, data, label, decay_rate=0.9, **kwargs):
predictor = self._to_predictor()
leaf_preds = predictor.predict(data, -1, pred_leaf=True, **kwargs)
nrow, ncol = leaf_preds.shape
train_set = Dataset(data, label)
train_set = Dataset(data, label, silent=True)
new_booster = Booster(self.params, train_set, silent=True)
# Copy models
_safe_call(_LIB.LGBM_BoosterMerge(
Expand Down
8 changes: 4 additions & 4 deletions python-package/lightgbm/engine.py
Expand Up @@ -118,9 +118,9 @@ def train(params, train_set, num_boost_round=100,
if num_boost_round <= 0:
raise ValueError("num_boost_round should be greater than zero.")
if isinstance(init_model, string_type):
predictor = _InnerPredictor(model_file=init_model)
predictor = _InnerPredictor(model_file=init_model, pred_parameter=params)
elif isinstance(init_model, Booster):
predictor = init_model._to_predictor()
predictor = init_model._to_predictor(dict(init_model.params, **params))
else:
predictor = None
init_iteration = predictor.num_total_iteration if predictor is not None else 0
Expand Down Expand Up @@ -415,9 +415,9 @@ def cv(params, train_set, num_boost_round=100,
if num_boost_round <= 0:
raise ValueError("num_boost_round should be greater than zero.")
if isinstance(init_model, string_type):
predictor = _InnerPredictor(model_file=init_model)
predictor = _InnerPredictor(model_file=init_model, pred_parameter=params)
elif isinstance(init_model, Booster):
predictor = init_model._to_predictor()
predictor = init_model._to_predictor(dict(init_model.params, **params))
else:
predictor = None
train_set._update_params(params)._set_predictor(predictor).set_feature_name(feature_name).set_categorical_feature(categorical_feature)
Expand Down
2 changes: 1 addition & 1 deletion python-package/lightgbm/sklearn.py
Expand Up @@ -402,7 +402,7 @@ def fit(self, X, y,
params = self.get_params()
# user can set verbose with kwargs, it has higher priority
if not any(verbose_alias in params for verbose_alias in ('verbose', 'verbosity')) and self.silent:
params['verbose'] = 0
params['verbose'] = -1
params.pop('silent', None)
params.pop('importance_type', None)
params.pop('n_estimators', None)
Expand Down

0 comments on commit bd3889f

Please sign in to comment.