Skip to content

Commit

Permalink
MAINT: Get tests passing with XGBoost 2.0 in test suite (#3257)
Browse files Browse the repository at this point in the history
* Use "Exact" XGBoost tree method

* Cast feature_names to list for XGBoost

* Use exact tree method

* Remove xgboost version pin

* Fix base_score parameter

* Define base_score in basic xgboost scenario

* Increase tolerance for xgboost margin test
  • Loading branch information
connortann committed Oct 5, 2023
1 parent 4a15b1c commit 8f41a7e
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 5 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ test = [
"pytest",
"pytest-mpl",
"pytest-cov",
"xgboost==1.7.6",
"xgboost",
"lightgbm",
"catboost",
"gpboost",
Expand Down
3 changes: 2 additions & 1 deletion tests/explainers/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ def basic_xgboost_scenario(max_samples=None, dataset=shap.datasets.adult):
X = X.values

# train an XGBoost model (but any other model type would also work)
model = xgboost.XGBClassifier()
# Specify some hyperparameters for consitency between xgboost v1.X and v2.X
model = xgboost.XGBClassifier(tree_method="exact", base_score=0.5)
model.fit(X, y)

return model, X
Expand Down
7 changes: 5 additions & 2 deletions tests/explainers/test_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,7 @@ def test_provided_background_tree_path_dependent():
xgboost = pytest.importorskip("xgboost")

X, y = shap.datasets.adult(n_points=100)
dtrain = xgboost.DMatrix(X, label=y, feature_names=X.columns)
dtrain = xgboost.DMatrix(X, label=y, feature_names=list(X.columns))

params = {
"booster": "gbtree",
Expand Down Expand Up @@ -1285,7 +1285,9 @@ def test_xgboost_classifier_independent_margin(self):
y = y + abs(min(y))
y = rs.binomial(n=1, p=y / max(y))

model = xgboost.XGBClassifier(n_estimators=10, max_depth=5, random_state=random_seed)
model = xgboost.XGBClassifier(
n_estimators=10, max_depth=5, random_state=random_seed, tree_method="exact"
)
model.fit(X, y)
predicted = model.predict(X, output_margin=True)

Expand All @@ -1305,6 +1307,7 @@ def test_xgboost_classifier_independent_margin(self):
assert np.allclose(
explanation.values.sum(1) + explanation.base_values,
predicted,
atol=1e-7,
)

def test_xgboost_classifier_independent_probability(self, random_seed):
Expand Down
2 changes: 1 addition & 1 deletion tests/plots/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def explainer():
y = y[:100]

# train an XGBoost model (but any other model type would also work)
model = xgboost.XGBClassifier(random_state=0).fit(X, y)
model = xgboost.XGBClassifier(random_state=0, tree_method="exact", base_score=0.5).fit(X, y)

# build an Exact explainer and explain the model predictions on the given dataset
return shap.TreeExplainer(model, X)

0 comments on commit 8f41a7e

Please sign in to comment.