Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MRG] Fix LassoCV cross validation split() call #8973

Merged
merged 3 commits into from Jun 7, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion sklearn/linear_model/coordinate_descent.py
Expand Up @@ -1150,7 +1150,7 @@ def fit(self, X, y):
cv = check_cv(self.cv)

# Compute path for all folds and compute MSE to get the best alpha
folds = list(cv.split(X))
folds = list(cv.split(X, y))
best_mse = np.inf

# We do a double for loop folded in one, in order to be able to
Expand Down
18 changes: 18 additions & 0 deletions sklearn/linear_model/tests/test_coordinate_descent.py
Expand Up @@ -175,6 +175,24 @@ def test_lasso_cv():
assert_greater(clf.score(X_test, y_test), 0.99)


def test_lasso_cv_with_some_model_selection():
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import StratifiedKFold
from sklearn import datasets
from sklearn.linear_model import LassoCV

diabetes = datasets.load_diabetes()
X = diabetes.data
y = diabetes.target

pipe = make_pipeline(
StandardScaler(),
LassoCV(cv=StratifiedKFold(n_splits=5))
)
pipe.fit(X, y)


def test_lasso_cv_positive_constraint():
X, y, X_test, y_test = build_dataset()
max_iter = 500
Expand Down