Skip to content

Commit

Permalink
Check if numpy/sklearn error is resolved
Browse files Browse the repository at this point in the history
  • Loading branch information
mghasemi committed Feb 21, 2019
1 parent 2c17807 commit 0c6ce68
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 15 deletions.
6 changes: 5 additions & 1 deletion SKSurrogate/NpyProximation.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,11 @@ def fit(self):
return aprx


from sklearn.base import BaseEstimator, RegressorMixin
try:
from sklearn.base import BaseEstimator, RegressorMixin
except:
BaseEstimator = type('BaseEstimator', (object,), dict())
RegressorMixin = type('RegressorMixin', (object,), dict())


class HilbertRegressor(BaseEstimator, RegressorMixin):
Expand Down
6 changes: 5 additions & 1 deletion SKSurrogate/aml.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@
===============================
"""
from sklearn.base import BaseEstimator, TransformerMixin
try:
from sklearn.base import BaseEstimator, TransformerMixin
except:
BaseEstimator = type('BaseEstimator', (object,), dict())
TransformerMixin = type('TransformerMixin', (object,), dict())


class StackingEstimator(BaseEstimator, TransformerMixin):
Expand Down
6 changes: 5 additions & 1 deletion SKSurrogate/sensapprx.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,11 @@
The core functionality is provided by `SALib <https://github.com/SALib/SALib>`_ .
"""

from sklearn.base import BaseEstimator, TransformerMixin
try:
from sklearn.base import BaseEstimator, TransformerMixin
except:
BaseEstimator = type('BaseEstimator', (object,), dict())
TransformerMixin = type('TransformerMixin', (object,), dict())


class SensAprx(BaseEstimator, TransformerMixin):
Expand Down
18 changes: 6 additions & 12 deletions SKSurrogate/structsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,21 +598,11 @@ def __init__(self, a, b, **kwargs):


try:
from sklearn.base import clone, is_classifier
from sklearn.metrics.scorer import check_scoring
from sklearn.model_selection._search import BaseSearchCV, check_cv
from sklearn.model_selection._validation import _fit_and_score
from sklearn.model_selection._search import BaseSearchCV
from Optimithon import NumericDiff
except:
NumericDiff = type('NumericDiff', (object,), dict(Simple=lambda: 0., ))
clone = type('clone', (object,), dict())
is_classifier = type('is_classifier', (object,), dict())
check_scoring = type('check_scoring', (object,), dict())
BaseSearchCV = type('BaseSearchCV', (object,), dict())
check_cv = type('check_cv', (object,), dict())
_fit_and_score = type('_fit_and_score', (object,), dict())

from numpy import inf


class SurrogateRandomCV(BaseSearchCV):
Expand Down Expand Up @@ -686,7 +676,7 @@ def __init__(self, estimator, params, scoring=None, fit_params=None, n_jobs=1, i
verbose=0, pre_dispatch='2*n_jobs', error_score='raise', return_train_score=True,
max_iter=50, min_evals=25, regressor=None, sampling=CompactSample, radius=None, contraction=.95,
search_sphere=False, optimizer='scipy', scipy_solver='SLSQP', task_name='optim_task', warm_start=True,
Continue=False, max_itr_no_prog=inf, ineqs=(), init=None,
Continue=False, max_itr_no_prog=10000, ineqs=(), init=None,
# Optimithon specific options
optimithon_t_method='Cauchy_x', optimithon_dd_method='BFGS', optimithon_ls_method='Backtrack',
optimithon_ls_bt_method='Armijo', optimithon_br_func='Carrol', optimithon_penalty=1.e6,
Expand Down Expand Up @@ -740,6 +730,10 @@ def fit(self, X, y=None, groups=None, **fit_params):
"""
from random import uniform
from numpy import array, unique, sqrt
from sklearn.base import clone, is_classifier
from sklearn.metrics.scorer import check_scoring
from sklearn.model_selection._search import check_cv
from sklearn.model_selection._validation import _fit_and_score
radius_list = []
self.scorer_ = check_scoring(self.estimator, scoring=self.scoring)
target_classes = []
Expand Down

0 comments on commit 0c6ce68

Please sign in to comment.