Skip to content

Commit

Permalink
fix lint errors
Browse files Browse the repository at this point in the history
  • Loading branch information
chris-smith-zocdoc committed Nov 12, 2019
1 parent 01e5fda commit a9d6fd7
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 11 deletions.
25 changes: 17 additions & 8 deletions tests/e2e/test_e2e.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ def classification_binary(model):
CLASSIFICATION,
)


def regression_random(model):
return (
model,
Expand All @@ -71,6 +72,7 @@ def classification_binary_random(model):
CLASSIFICATION,
)


# Absolute tolerance. Used in np.isclose to compare 2 values.
# We compare 6 decimal digits.
ATOL = 1.e-6
Expand All @@ -84,9 +86,10 @@ def classification_binary_random(model):
SVC_PARAMS = dict(random_state=RANDOM_SEED, decision_function_shape="ovo")

XGBOOST_PARAMS_LARGE = dict(base_score=0.6, n_estimators=100, max_depth=12,
random_state=RANDOM_SEED)
random_state=RANDOM_SEED)
LIGHT_GBM_PARAMS_LARGE = dict(n_estimators=100, num_leaves=100, max_depth=64,
random_state=RANDOM_SEED)
random_state=RANDOM_SEED)


@utils.cartesian_e2e_params(
# These are the languages which support all models specified in the
Expand All @@ -108,19 +111,25 @@ def classification_binary_random(model):
classification_binary(lightgbm.LGBMClassifier(**LIGHT_GBM_PARAMS)),
# LightGBM (Large Trees)
regression_random(lightgbm.LGBMRegressor(**LIGHT_GBM_PARAMS_LARGE)),
classification_random(lightgbm.LGBMClassifier(**LIGHT_GBM_PARAMS_LARGE)),
classification_binary_random(lightgbm.LGBMClassifier(**LIGHT_GBM_PARAMS_LARGE)),
regression_random(
lightgbm.LGBMRegressor(**LIGHT_GBM_PARAMS_LARGE)),
classification_random(
lightgbm.LGBMClassifier(**LIGHT_GBM_PARAMS_LARGE)),
classification_binary_random(
lightgbm.LGBMClassifier(**LIGHT_GBM_PARAMS_LARGE)),
# XGBoost
regression(xgboost.XGBRegressor(**XGBOOST_PARAMS)),
classification(xgboost.XGBClassifier(**XGBOOST_PARAMS)),
classification_binary(xgboost.XGBClassifier(**XGBOOST_PARAMS)),
# XGBoost (Large Trees)
regression_random(xgboost.XGBRegressor(**XGBOOST_PARAMS_LARGE)),
classification_random(xgboost.XGBClassifier(**XGBOOST_PARAMS_LARGE)),
classification_binary_random(xgboost.XGBClassifier(**XGBOOST_PARAMS_LARGE)),
regression_random(
xgboost.XGBRegressor(**XGBOOST_PARAMS_LARGE)),
classification_random(
xgboost.XGBClassifier(**XGBOOST_PARAMS_LARGE)),
classification_binary_random(
xgboost.XGBClassifier(**XGBOOST_PARAMS_LARGE)),
# Linear SVM
regression(svm.LinearSVR(random_state=RANDOM_SEED)),
Expand Down
10 changes: 7 additions & 3 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def train_model_classification_binary(estimator, test_fraction=0.1):
return _train_model(estimator, datasets.load_breast_cancer(),
test_fraction)


def train_model_regression_random_data(estimator, test_fraction=0.01):
np.random.seed(seed=7)
N = 1000
Expand All @@ -93,7 +94,8 @@ def train_model_classification_random_data(estimator, test_fraction=0.01):
return _train_model(estimator, (data, target), test_fraction)


def train_model_classification_binary_random_data(estimator, test_fraction=0.01):
def train_model_classification_binary_random_data(estimator,
test_fraction=0.01):
np.random.seed(seed=7)
N = 1000

Expand Down Expand Up @@ -151,10 +153,12 @@ def verify_python_model_is_expected(model_code, input, expected_output):


def predict_from_commandline(exec_args):
result = subprocess.Popen(exec_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = subprocess.Popen(exec_args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = result.communicate()
if result.returncode is not 0:
raise Exception("bad exit code ({}) stderr: {}".format(result.returncode, stderr.decode("utf-8")))
raise Exception("bad exit code ({}) stderr: {}".format(
result.returncode, stderr.decode("utf-8")))

items = stdout.decode("utf-8").strip().split(" ")

Expand Down

0 comments on commit a9d6fd7

Please sign in to comment.