New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[MRG+1] Fixes #10393 Fixed error when fitting RidgeCV with integers #10397
Changes from all commits
9919e65
06b07ef
0160dbf
8b70b85
5bedf02
6c2cc71
063cf37
6e48d91
54a6d00
7ce9ba4
34b1ebe
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -778,6 +778,7 @@ class RidgeClassifier(LinearClassifierMixin, _BaseRidge): | |
a one-versus-all approach. Concretely, this is implemented by taking | ||
advantage of the multi-variate response support in Ridge. | ||
""" | ||
|
||
def __init__(self, alpha=1.0, fit_intercept=True, normalize=False, | ||
copy_X=True, max_iter=None, tol=1e-3, class_weight=None, | ||
solver="auto", random_state=None): | ||
|
@@ -1041,11 +1042,16 @@ def fit(self, X, y, sample_weight=None): | |
scorer = check_scoring(self, scoring=self.scoring, allow_none=True) | ||
error = scorer is None | ||
|
||
if np.any(self.alphas < 0): | ||
raise ValueError("alphas cannot be negative. " | ||
"Got {} containing some " | ||
"negative value instead.".format(self.alphas)) | ||
|
||
for i, alpha in enumerate(self.alphas): | ||
if error: | ||
out, c = _errors(alpha, y, v, Q, QT_y) | ||
out, c = _errors(float(alpha), y, v, Q, QT_y) | ||
else: | ||
out, c = _values(alpha, y, v, Q, QT_y) | ||
out, c = _values(float(alpha), y, v, Q, QT_y) | ||
cv_values[:, i] = out.ravel() | ||
C.append(c) | ||
|
||
|
@@ -1085,7 +1091,7 @@ def __init__(self, alphas=(0.1, 1.0, 10.0), | |
fit_intercept=True, normalize=False, scoring=None, | ||
cv=None, gcv_mode=None, | ||
store_cv_values=False): | ||
self.alphas = alphas | ||
self.alphas = np.asarray(alphas) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why doing so? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was suggested to make that changes a few lines above: to add a conversion in the |
||
self.fit_intercept = fit_intercept | ||
self.normalize = normalize | ||
self.scoring = scoring | ||
|
@@ -1328,6 +1334,7 @@ class RidgeClassifierCV(LinearClassifierMixin, _BaseRidgeCV): | |
a one-versus-all approach. Concretely, this is implemented by taking | ||
advantage of the multi-variate response support in Ridge. | ||
""" | ||
|
||
def __init__(self, alphas=(0.1, 1.0, 10.0), fit_intercept=True, | ||
normalize=False, scoring=None, cv=None, class_weight=None): | ||
super(RidgeClassifierCV, self).__init__( | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Try to get rid of this strange diff.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I don't know, the file in the master has already that line. Should I remove it?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If you actually don't change anything and find it hard to get rid of it, you might just keep it. (Hope there won't be some strange things when merging)