Skip to content

Commit

Permalink
Merge 2b696b8 into 9a8b3c9
Browse files Browse the repository at this point in the history
  • Loading branch information
pizzooid committed May 8, 2019
2 parents 9a8b3c9 + 2b696b8 commit 1474b92
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 7 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ before_script:

script:
- coverage${PYVERSION} run --source ffx tests/x_square_test.py
- python tests/test_sklearn_api.py

after_success:
- coveralls
23 changes: 18 additions & 5 deletions ffx/api.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,40 @@
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils import check_array, check_X_y
from sklearn.utils.validation import check_is_fitted
from . import core

"""api.py defines user interfaces to FFX. run() runs the complete method.
FFXRegressor is a Scikit-learn style regressor."""
""" api.py defines user interfaces to FFX. run() runs the complete method.
FFXRegressor is a Scikit-learn style regressor.
"""


def run(train_X, train_y, test_X, test_y, varnames=None, verbose=False):
return core.MultiFFXModelFactory().build(train_X, train_y, test_X, test_y, varnames, verbose)
return core.MultiFFXModelFactory().build(train_X, train_y, test_X, test_y,
varnames, verbose)


class FFXRegressor(BaseEstimator, RegressorMixin):
"""This class provides a Scikit-learn style estimator."""

def __init__(self):
pass

def fit(self, X, y):
X, y = check_X_y(X, y, y_numeric=True, multi_output=False)
# if X is a Pandas DataFrame, we don't have to pass in varnames.
# otherwise we make up placeholders.
# otherwise we make up placeholders.
if hasattr(X, 'columns'):
varnames = None
else:
varnames = ["X%d" % i for i in range(len(X))]
self._models = run(X, y, X, y, varnames=varnames)
self._model = self._models[-1]
return self

def predict(self, X):
check_is_fitted(self, "_model")
X = check_array(X, accept_sparse=False)
return self._model.simulate(X)

def complexity(self):
return self._model.complexity()

6 changes: 4 additions & 2 deletions ffx/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -702,8 +702,10 @@ def build(self, X, y, ss, varnames=None, verbose=False):
minx, maxx = min(X[:, var_i]), max(X[:, var_i])
rangex = maxx - minx
stepx = 0.8 * rangex / float(ss.num_thrs_per_var + 1)
thrs = numpy.arange(
minx + 0.2 * rangex, maxx - 0.2 * rangex + 0.1 * rangex, stepx)
thrs = [maxx]
if rangex > 0:
thrs = numpy.arange(
minx + 0.2 * rangex, maxx - 0.2 * rangex + 0.1 * rangex, stepx)
for threshold_op in ss.thresholdOps():
for thr in thrs:
nonsimple_base = OperatorBase(
Expand Down
4 changes: 4 additions & 0 deletions tests/test_sklearn_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import numpy as np
import ffx
from sklearn.utils.estimator_checks import check_estimator

# This creates a dataset of 2 predictors
X = np.random.random((20, 2))
Expand All @@ -16,3 +17,6 @@
print("Score:", FFX.score(test_X, test_y))
print("Complexity:", FFX.complexity())
print("Model:", FFX._model)

# This tests the regressor using sklearn's test facilities
check_estimator(ffx.FFXRegressor)

0 comments on commit 1474b92

Please sign in to comment.