-
Notifications
You must be signed in to change notification settings - Fork 105
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add tests for test_cvsetting * add xgboost to install requirements * add test for learnersetting * add test for metricsettings
- Loading branch information
1 parent
f0be5ef
commit 9f8f696
Showing
5 changed files
with
358 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
from __future__ import absolute_import, print_function, division, unicode_literals | ||
import unittest | ||
from sklearn.datasets import load_iris | ||
from xcessiv import functions | ||
from xcessiv.presets import cvsetting | ||
|
||
|
||
class TestKFold(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_k_fold_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.k_fold['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
list(module.return_splits_iterable(self.X, self.y)) | ||
|
||
del module | ||
|
||
|
||
class TestStratifiedKFold(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.stratified_k_fold['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
list(module.return_splits_iterable(self.X, self.y)) | ||
|
||
del module | ||
|
||
|
||
class TestShuffleSplit(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.shuffle_split['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
list(module.return_splits_iterable(self.X, self.y)) | ||
|
||
del module | ||
|
||
|
||
class TestStratifiedShuffleSplit(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.stratified_shuffle_split['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
list(module.return_splits_iterable(self.X, self.y)) | ||
|
||
del module | ||
|
||
|
||
class TestLeaveOneOut(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.leave_one_out['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
list(module.return_splits_iterable(self.X, self.y)) | ||
|
||
del module | ||
|
||
|
||
class TestGroupKFold(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.group_k_fold['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
generator = module.return_splits_iterable(self.X, self.y) | ||
self.assertRaises( | ||
ValueError, | ||
list, | ||
generator | ||
) | ||
|
||
del module | ||
|
||
|
||
class TestTimeSeriesSplit(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
|
||
def test_source(self): | ||
module = functions.import_string_code_as_module(cvsetting.leave_one_out['source']) | ||
assert hasattr(module, 'return_splits_iterable') | ||
|
||
list(module.return_splits_iterable(self.X, self.y)) | ||
|
||
del module |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,96 @@ | ||
from __future__ import absolute_import, print_function, division, unicode_literals | ||
import unittest | ||
from sklearn.datasets import load_iris, load_boston | ||
from xcessiv import functions | ||
from xcessiv.presets import learnersetting | ||
|
||
|
||
class TestClassifiers(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_iris(return_X_y=True) | ||
self.classifier_settings = [ | ||
'sklearn_random_forest_classifier', | ||
'sklearn_extra_trees_classifier', | ||
'sklearn_logistic_regression', | ||
'sklearn_knn_classifier', | ||
'sklearn_svm_classifier', | ||
'sklearn_gaussian_nb', | ||
'sklearn_adaboost_classifier', | ||
'xgboost_classifier', | ||
] | ||
|
||
def test_learner_settings(self): | ||
for key in self.classifier_settings: | ||
setting = getattr(learnersetting, key) | ||
module = functions.import_string_code_as_module( | ||
setting['source'] | ||
) | ||
|
||
assert hasattr(module.base_learner, 'get_params') | ||
assert hasattr(module.base_learner, 'set_params') | ||
assert hasattr(module.base_learner, 'fit') | ||
assert hasattr(module.base_learner, setting['meta_feature_generator']) | ||
|
||
module.base_learner.fit(self.X, self.y) | ||
|
||
del module | ||
|
||
|
||
class TestRegressors(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_boston(return_X_y=True) | ||
self.regressor_settings = [ | ||
'sklearn_random_forest_regressor', | ||
'sklearn_extra_trees_regressor', | ||
'sklearn_bagging_regressor', | ||
'sklearn_GP_regressor', | ||
'sklearn_ridge_regressor', | ||
'sklearn_lasso_regressor', | ||
'sklearn_kernel_ridge_regressor', | ||
'sklearn_knn_regressor', | ||
'sklearn_svr_regressor', | ||
'sklearn_decision_tree_regressor', | ||
'sklearn_linear_regression', | ||
'sklearn_adaboost_regressor', | ||
'xgboost_regressor', | ||
] | ||
|
||
def test_learner_settings(self): | ||
for key in self.regressor_settings: | ||
setting = getattr(learnersetting, key) | ||
module = functions.import_string_code_as_module( | ||
setting['source'] | ||
) | ||
|
||
assert hasattr(module.base_learner, 'get_params') | ||
assert hasattr(module.base_learner, 'set_params') | ||
assert hasattr(module.base_learner, 'fit') | ||
assert hasattr(module.base_learner, setting['meta_feature_generator']) | ||
|
||
module.base_learner.fit(self.X, self.y) | ||
|
||
del module | ||
|
||
|
||
class TestTransformers(unittest.TestCase): | ||
def setUp(self): | ||
self.X, self.y = load_boston(return_X_y=True) | ||
self.transformer_settings = [ | ||
'identity_transformer' | ||
] | ||
|
||
def test_learner_settings(self): | ||
for key in self.transformer_settings: | ||
setting = getattr(learnersetting, key) | ||
module = functions.import_string_code_as_module( | ||
setting['source'] | ||
) | ||
|
||
assert hasattr(module.base_learner, 'get_params') | ||
assert hasattr(module.base_learner, 'set_params') | ||
assert hasattr(module.base_learner, 'fit') | ||
assert hasattr(module.base_learner, setting['meta_feature_generator']) | ||
|
||
module.base_learner.fit(self.X, self.y) | ||
|
||
del module |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,160 @@ | ||
from __future__ import absolute_import, print_function, division, unicode_literals | ||
import unittest | ||
import numpy as np | ||
from sklearn.datasets import load_iris, load_breast_cancer, load_boston | ||
from sklearn.linear_model import LogisticRegression, LinearRegression | ||
from sklearn.model_selection import cross_val_predict | ||
from xcessiv import functions | ||
from xcessiv.presets import metricsetting | ||
|
||
|
||
clf = LogisticRegression(random_state=8) | ||
|
||
multiclass_X, multiclass_y = load_iris(return_X_y=True) | ||
multiclass_preds = cross_val_predict(clf, multiclass_X, multiclass_y, method='predict') | ||
multiclass_probas = cross_val_predict(clf, multiclass_X, multiclass_y, method='predict_proba') | ||
|
||
binary_X, binary_y = load_breast_cancer(return_X_y=True) | ||
binary_preds = cross_val_predict(clf, binary_X, binary_y, method='predict') | ||
binary_probas = cross_val_predict(clf, binary_X, binary_y, method='predict_proba') | ||
|
||
regression_X, regression_y = load_boston(return_X_y=True) | ||
reg = LinearRegression() | ||
regression_preds = cross_val_predict(reg, regression_X, regression_y, method='predict') | ||
|
||
|
||
class TestAccuracyFromScores(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.accuracy_from_scores['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.95 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestAccuracyFromPreds(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.accuracy_from_preds['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.95 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestRecallFromScores(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.recall_from_scores['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.97 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestRecallFromPreds(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.recall_from_preds['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.97 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestPrecisionFromScores(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.precision_from_scores['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.95 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestPrecisionFromPreds(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.precision_from_preds['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.95 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestF1ScoreFromScores(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.f1_score_from_scores['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.96 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestF1ScoreFromPreds(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.f1_score_from_preds['source']) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.96 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95 | ||
|
||
del module | ||
|
||
|
||
class TestROCAUCFromScores(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module( | ||
metricsetting.roc_auc_score_from_scores['source'] | ||
) | ||
|
||
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.99 | ||
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.99 | ||
|
||
del module | ||
|
||
|
||
class TestMAE(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.mae['source']) | ||
|
||
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == 6.99 | ||
|
||
del module | ||
|
||
|
||
class TestMSE(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.mse['source']) | ||
|
||
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == 168.09 | ||
|
||
del module | ||
|
||
|
||
class TestMedianAbsoluteError(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.median_absolute_error['source']) | ||
|
||
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == 3.72 | ||
|
||
del module | ||
|
||
|
||
class TestR2Score(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.r2_score['source']) | ||
|
||
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == -0.99 | ||
|
||
del module | ||
|
||
|
||
class TestExplainedVarianceScore(unittest.TestCase): | ||
def test_source(self): | ||
module = functions.import_string_code_as_module(metricsetting.explained_variance_score['source']) | ||
|
||
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == -0.89 | ||
|
||
del module |