@@ -69,26 +69,25 @@ class KernelPCovC(LinearClassifierMixin, _BaseKPCov):
69
69
If randomized :
70
70
run randomized SVD by the method of Halko et al.
71
71
72
- classifier: {`LogisticRegression`, `LogisticRegressionCV`, `LinearSVC`, `LinearDiscriminantAnalysis`,
73
- `RidgeClassifier`, `RidgeClassifierCV`, `SGDClassifier`, `Perceptron`, `precomputed`}, default=None
72
+ classifier : {instance of `sklearn.svm.SVC`, None}, default=None
74
73
The classifier to use for computing
75
74
the evidence :math:`{\mathbf{Z}}`.
76
75
A pre-fitted classifier may be provided.
76
+ If the classifier is not `None`, its kernel parameters
77
+ (`kernel`, `gamma`, `degree`, and `coef0`)
78
+ must be identical to those passed directly to `KernelPCovC`.
77
79
78
- If None, ``sklearn.linear_model.LogisticRegression()``
79
- is used as the classifier.
80
-
81
- kernel : {"linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"}, default="linear
80
+ kernel : {'linear', 'poly', 'rbf', 'sigmoid', 'precomputed'} or callable, default='rbf'
82
81
Kernel.
83
82
84
- gamma : {'scale', 'auto'} or float, default=None
83
+ gamma : {'scale', 'auto'} or float, default='scale'
85
84
Kernel coefficient for rbf, poly and sigmoid kernels. Ignored by other
86
85
kernels.
87
86
88
87
degree : int, default=3
89
88
Degree for poly kernels. Ignored by other kernels.
90
89
91
- coef0 : float, default=1
90
+ coef0 : float, default=0.0
92
91
Independent term in poly and sigmoid kernels.
93
92
Ignored by other kernels.
94
93
@@ -223,6 +222,27 @@ def __init__(
223
222
self .classifier = classifier
224
223
225
224
def fit (self , X , Y ):
225
+ r"""Fit the model with X and Y.
226
+
227
+ Parameters
228
+ ----------
229
+ X : numpy.ndarray, shape (n_samples, n_features)
230
+ Training data, where n_samples is the number of samples and
231
+ n_features is the number of features.
232
+
233
+ It is suggested that :math:`\mathbf{X}` be centered by its column-
234
+ means and scaled. If features are related, the matrix should be scaled
235
+ to have unit variance, otherwise :math:`\mathbf{X}` should be
236
+ scaled so that each feature has a variance of 1 / n_features.
237
+
238
+ Y : numpy.ndarray, shape (n_samples,)
239
+ Training data, where n_samples is the number of samples.
240
+
241
+ Returns
242
+ -------
243
+ self: object
244
+ Returns the instance itself.
245
+ """
226
246
X , Y = validate_data (self , X , Y , y_numeric = False )
227
247
check_classification_targets (Y )
228
248
self .classes_ = np .unique (Y )
@@ -347,7 +367,7 @@ def transform(self, X):
347
367
"""Apply dimensionality reduction to X.
348
368
349
369
``X`` is projected on the first principal components as determined by the
350
- modified Kernel PCovR distances.
370
+ modified Kernel PCovC distances.
351
371
352
372
Parameters
353
373
----------
@@ -382,7 +402,31 @@ def inverse_transform(self, T):
382
402
return super ().inverse_transform (T )
383
403
384
404
def decision_function (self , X = None , T = None ):
385
- """Predicts confidence scores from X or T."""
405
+ r"""Predicts confidence scores from X or T.
406
+
407
+ .. math::
408
+ \mathbf{Z} = \mathbf{T} \mathbf{P}_{TZ}
409
+ = \mathbf{K} \mathbf{P}_{KT} \mathbf{P}_{TZ}
410
+ = \mathbf{K} \mathbf{P}_{KZ}
411
+
412
+ Parameters
413
+ ----------
414
+ X : ndarray, shape(n_samples, n_features)
415
+ Original data for which we want to get confidence scores,
416
+ where n_samples is the number of samples and n_features is the
417
+ number of features.
418
+
419
+ T : ndarray, shape (n_samples, n_components)
420
+ Projected data for which we want to get confidence scores,
421
+ where n_samples is the number of samples and n_components is the
422
+ number of components.
423
+
424
+ Returns
425
+ -------
426
+ Z : numpy.ndarray, shape (n_samples,) or (n_samples, n_classes)
427
+ Confidence scores. For binary classification, has shape `(n_samples,)`,
428
+ for multiclass classification, has shape `(n_samples, n_classes)`
429
+ """
386
430
check_is_fitted (self , attributes = ["pkz_" , "ptz_" ])
387
431
388
432
if X is None and T is None :
0 commit comments