Skip to content

Commit

Permalink
Fix missing base_score in xgboost regression (#3616)
Browse files Browse the repository at this point in the history
* fix xgb regression for tweedie objective

* remove objective

* fix tokenizer test

* update xfail

* fix mistral test for CI

---------

Co-authored-by: connortann <71127464+connortann@users.noreply.github.com>
  • Loading branch information
CloseChoice and connortann committed Apr 30, 2024
1 parent 3892619 commit e2a4a47
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 0 deletions.
1 change: 1 addition & 0 deletions shap/explainers/_tree.py
Expand Up @@ -1817,6 +1817,7 @@ def __init__(self, xgb_model) -> None:
# Accounts for number of classes, targets, forest size.
self.n_trees_per_iter = int(diff[0])
self.n_targets = n_targets
self.base_score = float(learner_model_param["base_score"])
assert self.n_trees_per_iter > 0

self.name_obj = objective["name"]
Expand Down
14 changes: 14 additions & 0 deletions tests/explainers/test_tree.py
Expand Up @@ -1904,3 +1904,17 @@ def test_catboost_column_names_with_special_characters():
)
shap_values = explainer.shap_values(x_train)
assert np.allclose(shap_values.sum(1) + explainer.expected_value, cb_best.predict_proba(x_train)[:, 1])


def test_xgboost_tweedie_regression():
xgboost = pytest.importorskip("xgboost")

X, y = np.random.randn(100, 5), np.random.exponential(size=100)
model = xgboost.XGBRegressor(
objective="reg:tweedie",
)
model.fit(X, y)
explainer = shap.TreeExplainer(model)
shap_values = explainer.shap_values(X)

assert np.allclose(shap_values.sum(1) + explainer.expected_value, np.log(model.predict(X)), atol=1e-4)

0 comments on commit e2a4a47

Please sign in to comment.