Skip to content

Commit

Permalink
added test
Browse files Browse the repository at this point in the history
  • Loading branch information
StrikerRUS committed Mar 17, 2020
1 parent 749304d commit 355d858
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 17 deletions.
10 changes: 5 additions & 5 deletions .travis.yml
Expand Up @@ -3,14 +3,14 @@ dist: xenial
language: python
python:
- 3.5
- 3.6
- 3.7
# - 3.6
# - 3.7

env:
- TEST=API
- TEST=E2E LANG="c_lang or python or java or go_lang or javascript or php"
- TEST=E2E LANG="c_sharp or visual_basic or powershell"
- TEST=E2E LANG="r_lang or dart"
# - TEST=E2E LANG="c_lang or python or java or go_lang or javascript or php"
# - TEST=E2E LANG="c_sharp or visual_basic or powershell"
# - TEST=E2E LANG="r_lang or dart"

before_install:
- bash .travis/setup.sh
Expand Down
86 changes: 86 additions & 0 deletions tests/assemblers/test_linear.py
@@ -1,6 +1,7 @@
import pytest
import numpy as np
import statsmodels.api as sm
from statsmodels.regression.process_regression import ProcessMLE
from lightning.regression import AdaGradRegressor
from lightning.classification import AdaGradClassifier
from sklearn import linear_model
Expand Down Expand Up @@ -321,6 +322,91 @@ def test_statsmodels_unknown_constant_position():
assembler.assemble()


def test_statsmodels_processmle():
estimator = utils.StatsmodelsSklearnLikeWrapper(
ProcessMLE,
dict(init=dict(exog_scale=np.ones(
(len(utils.get_regression_model_trainer().y_train), 2)),
exog_smooth=np.ones(
(len(utils.get_regression_model_trainer().y_train), 2)),
exog_noise=np.ones(
(len(utils.get_regression_model_trainer().y_train), 2)),
time=np.kron(
np.ones(len(utils.get_regression_model_trainer().y_train) // 3),
np.arange(3)),
groups=np.kron(
np.arange(len(utils.get_regression_model_trainer().y_train) // 3),
np.ones(3))),
fit=dict(maxiter=1)))
_, __, estimator = utils.get_regression_model_trainer()(estimator)

assembler = assemblers.ProcessMLEModelAssembler(estimator)
actual = assembler.assemble()

feature_weight_mul = [
ast.BinNumExpr(
ast.FeatureRef(0),
ast.NumVal(-0.0932673973),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(1),
ast.NumVal(0.0480819091),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(2),
ast.NumVal(-0.0063734439),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(3),
ast.NumVal(2.7510656855),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(4),
ast.NumVal(-3.0836268637),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(5),
ast.NumVal(5.9605290000),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(6),
ast.NumVal(-0.0077880716),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(7),
ast.NumVal(-0.9685365627),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(8),
ast.NumVal(0.1688777882),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(9),
ast.NumVal(-0.0092446419),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(10),
ast.NumVal(-0.3924930042),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(11),
ast.NumVal(0.01506511708295605),
ast.BinNumOpType.MUL),
ast.BinNumExpr(
ast.FeatureRef(12),
ast.NumVal(-0.4177000096),
ast.BinNumOpType.MUL),
]

expected = assemblers.utils.apply_op_to_expressions(
ast.BinNumOpType.ADD,
ast.NumVal(0.0),
*feature_weight_mul)
print(actual)
print(expected)
assert utils.cmp_exprs(actual, expected)


def test_lightning_regression():
estimator = AdaGradRegressor(random_state=1)
utils.get_regression_model_trainer()(estimator)
Expand Down
24 changes: 12 additions & 12 deletions tests/e2e/test_e2e.py
Expand Up @@ -280,18 +280,6 @@ def classification_binary_random(model):
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.OLS,
dict(fit_regularized=STATSMODELS_LINEAR_REGULARIZED_PARAMS))),
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.QuantReg,
dict(init=dict(fit_intercept=True)))),
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.WLS,
dict(init=dict(fit_intercept=True, weights=np.arange(
len(utils.get_regression_model_trainer().y_train)))))),
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.WLS,
dict(init=dict(fit_intercept=True, weights=np.arange(
len(utils.get_regression_model_trainer().y_train))),
fit_regularized=STATSMODELS_LINEAR_REGULARIZED_PARAMS))),
regression(utils.StatsmodelsSklearnLikeWrapper(
ProcessMLE,
dict(init=dict(exog_scale=np.ones(
Expand All @@ -309,6 +297,18 @@ def classification_binary_random(model):
len(utils.get_regression_model_trainer().y_train) // 3),
np.ones(3))),
fit=dict(maxiter=2)))),
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.QuantReg,
dict(init=dict(fit_intercept=True)))),
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.WLS,
dict(init=dict(fit_intercept=True, weights=np.arange(
len(utils.get_regression_model_trainer().y_train)))))),
regression(utils.StatsmodelsSklearnLikeWrapper(
sm.WLS,
dict(init=dict(fit_intercept=True, weights=np.arange(
len(utils.get_regression_model_trainer().y_train))),
fit_regularized=STATSMODELS_LINEAR_REGULARIZED_PARAMS))),
# Lightning Linear Regression
regression(light_reg.AdaGradRegressor(random_state=RANDOM_SEED)),
Expand Down

0 comments on commit 355d858

Please sign in to comment.