Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[python-package] Expose ObjectiveFunction class #6586

Open
wants to merge 18 commits into
base: master
Choose a base branch
from
Prev Previous commit
Next Next commit
Refactor tests
  • Loading branch information
Atanas Dimitrov committed Aug 16, 2024
commit 4d296cf9a4af4e9b797728c4e3a68b5ed38c58dc
43 changes: 19 additions & 24 deletions tests/python_package_test/test_engine.py
Original file line number Diff line number Diff line change
@@ -4396,53 +4396,48 @@ def test_quantized_training():


@pytest.mark.parametrize("use_weight", [False, True])
@pytest.mark.parametrize("num_boost_round", [5, 15])
@pytest.mark.parametrize(
"test_data",
"custom_objective, objective_name, df, num_class",
[
{
"custom_objective": mse_obj,
"objective_name": "regression",
"df": make_synthetic_regression(),
"num_class": 1,
},
{
"custom_objective": multiclass_custom_objective,
"objective_name": "multiclass",
"df": make_blobs(n_samples=100, centers=[[-4, -4], [4, 4], [-4, 4]], random_state=42),
"num_class": 3,
},
(mse_obj, "regression", make_synthetic_regression(), 1),
(
multiclass_custom_objective,
"multiclass",
make_blobs(n_samples=100, centers=[[-4, -4], [4, 4], [-4, 4]], random_state=42),
3,
),
],
)
@pytest.mark.parametrize("num_boost_round", [5, 15])
@pytest.mark.skipif(getenv("TASK", "") == "cuda", reason="Skip due to ObjectiveFunction not exposed for cuda devices.")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why couldn't this also be exposed for the CUDA implementation?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It segfaults on the CI tests, and I cannot build the CUDA version on MacOS.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Where exactly does it segfault? 🤔 it seems like this should work 😅

def test_objective_function_class(use_weight, test_data, num_boost_round):
X, y = test_data["df"]
def test_objective_function_class(use_weight, num_boost_round, custom_objective, objective_name, df, num_class):
X, y = df
rng = np.random.default_rng()
weight = rng.choice([1, 2], y.shape) if use_weight else None
lgb_train = lgb.Dataset(X, y, weight=weight, init_score=np.zeros((len(y), test_data["num_class"])))
lgb_train = lgb.Dataset(X, y, weight=weight, init_score=np.zeros((len(y), num_class)))

params = {
"verbose": -1,
"objective": test_data["objective_name"],
"num_class": test_data["num_class"],
"objective": objective_name,
"num_class": num_class,
"device": "cpu",
}
builtin_loss = builtin_objective(test_data["objective_name"], copy.deepcopy(params))
builtin_convert_outputs = lgb.ObjectiveFunction(test_data["objective_name"], copy.deepcopy(params)).convert_outputs
builtin_loss = builtin_objective(objective_name, copy.deepcopy(params))
builtin_convert_outputs = lgb.ObjectiveFunction(objective_name, copy.deepcopy(params)).convert_outputs

params["objective"] = builtin_loss
booster_exposed = lgb.train(params, lgb_train, num_boost_round=num_boost_round)

params["objective"] = test_data["objective_name"]
params["objective"] = objective_name
booster = lgb.train(params, lgb_train, num_boost_round=num_boost_round)

params["objective"] = test_data["custom_objective"]
params["objective"] = custom_objective
booster_custom = lgb.train(params, lgb_train, num_boost_round=num_boost_round)

np.testing.assert_allclose(booster_exposed.predict(X), booster.predict(X, raw_score=True))
np.testing.assert_allclose(booster_exposed.predict(X), booster_custom.predict(X))

y_pred = np.zeros_like(booster.predict(X, raw_score=True))
np.testing.assert_allclose(builtin_loss(y_pred, lgb_train), test_data["custom_objective"](y_pred, lgb_train))
np.testing.assert_allclose(builtin_loss(y_pred, lgb_train), custom_objective(y_pred, lgb_train))

np.testing.assert_allclose(builtin_convert_outputs(booster_exposed.predict(X)), booster.predict(X))
Loading
Oops, something went wrong.