Skip to content

Commit

Permalink
♻️ gdbt -> gbdt
Browse files Browse the repository at this point in the history
  • Loading branch information
nissy-dev committed Oct 14, 2020
1 parent 84a135d commit 72ab577
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 18 deletions.
4 changes: 2 additions & 2 deletions deepchem/models/__init__.py
Expand Up @@ -26,7 +26,7 @@

# scikit-learn model
from deepchem.models.sklearn_models import SklearnModel
from deepchem.models.gdbt_models import GDBTModel
from deepchem.models.gbdt_models import GBDTModel

# PyTorch models
try:
Expand All @@ -40,7 +40,7 @@
# Compatibility imports for renamed XGBoost models. Remove below with DeepChem 3.0.
#####################################################################################

from deepchem.models.gdbt_models.gdbt_model import XGBoostModel
from deepchem.models.gbdt_models.gbdt_model import XGBoostModel

########################################################################################
# Compatibility imports for renamed TensorGraph models. Remove below with DeepChem 3.0.
Expand Down
2 changes: 2 additions & 0 deletions deepchem/models/gbdt_models/__init__.py
@@ -0,0 +1,2 @@
# flake8: noqa
from deepchem.models.gbdt_models.gbdt_model import GBDTModel
@@ -1,5 +1,5 @@
"""
Gradient boosting wrapper interface
Gradient Boosting Decision Tree wrapper interface
"""

import os
Expand All @@ -18,8 +18,8 @@
logger = logging.getLogger(__name__)


class GDBTModel(SklearnModel):
"""Wrapper class that wraps GDBT models as DeepChem models.
class GBDTModel(SklearnModel):
"""Wrapper class that wraps GBDT models as DeepChem models.
This class supports LightGBM/XGBoost models.
"""
Expand Down Expand Up @@ -145,10 +145,10 @@ def fit_with_eval(self, train_dataset: Dataset, valid_dataset: Dataset):
#########################################


class XGBoostModel(GDBTModel):
class XGBoostModel(GBDTModel):

def __init__(self, *args, **kwargs):
warnings.warn(
"XGBoostModel is deprecated and has been renamed to GDBTModel.",
"XGBoostModel is deprecated and has been renamed to GBDTModel.",
FutureWarning)
super(XGBoostModel, self).__init__(*args, **kwargs)
2 changes: 0 additions & 2 deletions deepchem/models/gdbt_models/__init__.py

This file was deleted.

Expand Up @@ -29,7 +29,7 @@ def test_xgboost_regression():

xgb_model = xgboost.XGBRegressor(
n_estimators=50, random_state=123, verbose=False)
model = dc.models.GDBTModel(xgb_model, **esr)
model = dc.models.GBDTModel(xgb_model, **esr)

# Fit trained model
model.fit(train_dataset)
Expand Down Expand Up @@ -62,7 +62,7 @@ def test_xgboost_multitask_regression():

def model_builder(model_dir):
xgb_model = xgboost.XGBRegressor(n_estimators=50, seed=123, verbose=False)
return dc.models.GDBTModel(xgb_model, model_dir, **esr)
return dc.models.GBDTModel(xgb_model, model_dir, **esr)

model = dc.models.SingletaskToMultitask(tasks, model_builder)

Expand Down Expand Up @@ -93,7 +93,7 @@ def test_xgboost_classification():
classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score)
esr = {'early_stopping_rounds': 50}
xgb_model = xgboost.XGBClassifier(n_estimators=50, seed=123, verbose=False)
model = dc.models.GDBTModel(xgb_model, **esr)
model = dc.models.GBDTModel(xgb_model, **esr)

# Fit trained model
model.fit(train_dataset)
Expand Down Expand Up @@ -123,7 +123,7 @@ def test_lightgbm_regression():

lgbm_model = lightgbm.LGBMRegressor(
n_estimators=50, random_state=123, silent=True)
model = dc.models.GDBTModel(lgbm_model, **esr)
model = dc.models.GBDTModel(lgbm_model, **esr)

# Fit trained model
model.fit(train_dataset)
Expand Down Expand Up @@ -156,7 +156,7 @@ def test_lightgbm_multitask_regression():

def model_builder(model_dir):
lgbm_model = lightgbm.LGBMRegressor(n_estimators=50, seed=123, silent=True)
return dc.models.GDBTModel(lgbm_model, model_dir, **esr)
return dc.models.GBDTModel(lgbm_model, model_dir, **esr)

model = dc.models.SingletaskToMultitask(tasks, model_builder)

Expand Down Expand Up @@ -187,7 +187,7 @@ def test_lightgbm_classification():
classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score)
esr = {'early_stopping_rounds': 50}
lgbm_model = lightgbm.LGBMClassifier(n_estimators=50, seed=123, silent=True)
model = dc.models.GDBTModel(lgbm_model, **esr)
model = dc.models.GBDTModel(lgbm_model, **esr)

# Fit trained model
model.fit(train_dataset)
Expand Down
6 changes: 3 additions & 3 deletions docs/models.rst
Expand Up @@ -12,7 +12,7 @@ Model Cheatsheet
If you're just getting started with DeepChem, you're probably interested in the
basics. The place to get started is this "model cheatsheet" that lists various
types of custom DeepChem models. Note that some wrappers like :code:`SklearnModel`
and :code:`GDBTModel` which wrap external machine learning libraries are excluded,
and :code:`GBDTModel` which wrap external machine learning libraries are excluded,
but this table is otherwise complete.

As a note about how to read this table, each row describes what's needed to
Expand Down Expand Up @@ -151,10 +151,10 @@ Gradient Boosting Models

Gradient Boosting Models (LightGBM and XGBoost) can be wrapped so they can interact with DeepChem.

GDBTModel
GBDTModel
------------

.. autoclass:: deepchem.models.GDBTModel
.. autoclass:: deepchem.models.GBDTModel
:members:


Expand Down

0 comments on commit 72ab577

Please sign in to comment.