Skip to content

Commit

Permalink
Add warning for empty eval_metric for classification
Browse files Browse the repository at this point in the history
  • Loading branch information
hcho3 committed Sep 30, 2020
1 parent ee77ce7 commit 6954595
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 0 deletions.
12 changes: 12 additions & 0 deletions src/learner.cc
Expand Up @@ -1031,6 +1031,18 @@ class LearnerImpl : public LearnerIO {
std::ostringstream os;
os << '[' << iter << ']' << std::setiosflags(std::ios::fixed);
if (metrics_.size() == 0 && tparam_.disable_default_eval_metric <= 0) {
auto warn_default_eval_metric = [](const std::string& objective, const std::string& before,
const std::string& after) {
LOG(WARNING) << "Starting in XGBoost 1.3.0, the default evaluation metric used with the "
<< "objective '" << objective << "' was changed from '" << before
<< "' to '" << after << "'. Explicitly set eval_metric if you'd like to "
<< "restore the old behavior.";
};
if (tparam_.objective == "binary:logistic") {
warn_default_eval_metric(tparam_.objective, "error", "logloss");
} else if ((tparam_.objective == "multi:softmax" || tparam_.objective == "multi:softprob")) {
warn_default_eval_metric(tparam_.objective, "merror", "mlogloss");
}
metrics_.emplace_back(Metric::Create(obj_->DefaultEvalMetric(), &generic_parameters_));
metrics_.back()->Configure({cfg_.begin(), cfg_.end()});
}
Expand Down
26 changes: 26 additions & 0 deletions tests/python/test_basic.py
Expand Up @@ -9,6 +9,7 @@
import json
from pathlib import Path
import tempfile
import contextlib

dpath = 'demo/data/'
rng = np.random.RandomState(1994)
Expand Down Expand Up @@ -39,6 +40,31 @@ def test_compat(self):
assert lazy_isinstance(a, 'numpy', 'ndarray')
assert not lazy_isinstance(a, 'numpy', 'dataframe')

def test_default_eval_metric_warning(self):
dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train')
dtest = xgb.DMatrix(dpath + 'agaricus.txt.test')
params = {'max_depth': 2, 'eta': 1, 'objective': 'binary:logistic'}
watchlist = [(dtrain, 'train')]
num_round = 2

f = StringIO()
with contextlib.redirect_stdout(f):
bst = xgb.train(params=params, dtrain=dtrain, num_boost_round=num_round,
evals=watchlist, verbose_eval=True)
assert (("Starting in XGBoost 1.3.0, the default evaluation metric used with the " +
"objective 'binary:logistic' was changed from 'error' to 'logloss'.")
in f.getvalue())

for objective in ['multi:softmax', 'multi:softprob']:
params = {'max_depth': 2, 'eta': 1, 'objective': objective, 'num_class': 2}
f = StringIO()
with contextlib.redirect_stdout(f):
bst = xgb.train(params=params, dtrain=dtrain, num_boost_round=num_round,
evals=watchlist, verbose_eval=True)
assert (("Starting in XGBoost 1.3.0, the default evaluation metric used with the " +
f"objective '{objective}' was changed from 'merror' to 'mlogloss'.")
in f.getvalue())

def test_basic(self):
dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train')
dtest = xgb.DMatrix(dpath + 'agaricus.txt.test')
Expand Down

0 comments on commit 6954595

Please sign in to comment.