11
11
)
12
12
from sklearn .linear_model ._base import LinearClassifierMixin
13
13
14
- from sklearn .base import MultiOutputMixin
15
14
from sklearn .multioutput import MultiOutputClassifier
16
15
from sklearn .svm import LinearSVC
17
16
from sklearn .utils import check_array
@@ -36,8 +35,8 @@ class PCovC(LinearClassifierMixin, _BasePCov):
36
35
(1 - \alpha) \mathbf{Z}\mathbf{Z}^T
37
36
38
37
where :math:`\alpha` is a mixing parameter, :math:`\mathbf{X}` is an input matrix of shape
39
- :math:`(n_{samples}, n_{features})`, and :math:`\mathbf{Z}` is a matrix of class confidence scores
40
- of shape :math:`(n_{samples}, n_{classes})`. For :math:`(n_{samples} < n_{features})`,
38
+ :math:`(n_{samples}, n_{features})`, and :math:`\mathbf{Z}` is a tensor of class confidence scores
39
+ of shape :math:`(n_{samples}, n_{classes}, n_{labels} )`. For :math:`(n_{samples} < n_{features})`,
41
40
this can be more efficiently computed using the eigendecomposition of a modified covariance matrix
42
41
:math:`\mathbf{\tilde{C}}`
43
42
@@ -112,10 +111,10 @@ class PCovC(LinearClassifierMixin, _BasePCov):
112
111
- ``sklearn.linear_model.LogisticRegressionCV()``
113
112
- ``sklearn.svm.LinearSVC()``
114
113
- ``sklearn.discriminant_analysis.LinearDiscriminantAnalysis()``
115
- - ``sklearn.multioutput.MultiOutputClassifier ()``
114
+ - ``sklearn.linear_model.Perceptron ()``
116
115
- ``sklearn.linear_model.RidgeClassifier()``
117
116
- ``sklearn.linear_model.RidgeClassifierCV()``
118
- - ``sklearn.linear_model.Perceptron ()``
117
+ - ``sklearn.multioutput.MultiOutputClassifier ()``
119
118
120
119
If a pre-fitted classifier
121
120
is provided, it is used to compute :math:`{\mathbf{Z}}`.
@@ -175,11 +174,15 @@ class PCovC(LinearClassifierMixin, _BasePCov):
175
174
the projector, or weights, from the input space :math:`\mathbf{X}`
176
175
to the latent-space projection :math:`\mathbf{T}`
177
176
178
- pxz_ : ndarray of size :math:`({n_{features}, })`, :math:`({n_{features}, n_{classes}})`
177
+ pxz_ : ndarray of size :math:`({n_{features}, {n_{classes}}})`, or list of
178
+ ndarrays of size :math:`({n_{features}, {n_{classes_i}}})` for a dataset
179
+ with :math: `i` labels.
179
180
the projector, or weights, from the input space :math:`\mathbf{X}`
180
181
to the class confidence scores :math:`\mathbf{Z}`.
181
182
182
- ptz_ : ndarray of size :math:`({n_{components}, })`, :math:`({n_{components}, n_{classes}})`
183
+ ptz_ : ndarray of size :math:`({n_{components}, {n_{classes}}})`, or list of
184
+ ndarrays of size :math:`({n_{components}, {n_{classes_i}}})` for a dataset
185
+ with :math: `i` labels.
183
186
the projector, or weights, from from the latent-space projection
184
187
:math:`\mathbf{T}` to the class confidence scores :math:`\mathbf{Z}`.
185
188
@@ -267,7 +270,7 @@ def fit(self, X, Y, W=None):
267
270
Classification weights, optional when classifier is ``precomputed``. If
268
271
not passed, it is assumed that the weights will be taken from a
269
272
linear classifier fit between :math:`\mathbf{X}` and :math:`\mathbf{Y}`.
270
- In the multioutput case,
273
+ In the multioutput case, use
271
274
`` W = np.hstack([est_.coef_.T for est_ in classifier.estimators_])``.
272
275
"""
273
276
X , Y = validate_data (self , X , Y , multi_output = True , y_numeric = False )
@@ -329,15 +332,15 @@ def fit(self, X, Y, W=None):
329
332
W = np .hstack ([_ .coef_ .T for _ in _ .estimators_ ])
330
333
else :
331
334
W = _ .coef_ .T
332
- else :
335
+ elif W is None :
333
336
self .z_classifier_ = check_cl_fit (classifier , X , Y )
334
337
if multioutput :
335
338
W = np .hstack ([est_ .coef_ .T for est_ in self .z_classifier_ .estimators_ ])
336
339
else :
337
340
W = self .z_classifier_ .coef_ .T
338
341
339
342
Z = X @ W
340
-
343
+
341
344
if self .space_ == "feature" :
342
345
self ._fit_feature_space (X , Y , Z )
343
346
else :
@@ -348,19 +351,12 @@ def fit(self, X, Y, W=None):
348
351
self .classifier_ = clone (classifier ).fit (X @ self .pxt_ , Y )
349
352
350
353
if multioutput :
351
- self .ptz_ = np .hstack (
352
- [est_ .coef_ .T for est_ in self .classifier_ .estimators_ ]
353
- )
354
- # print(f"pxt {self.pxt_.shape}")
355
- # print(f"ptz {self.ptz_.shape}")
356
- self .pxz_ = self .pxt_ @ self .ptz_
357
- # print(f"pxz {self.pxz_.shape}")
354
+ self .ptz_ = [est_ .coef_ .T for est_ in self .classifier_ .estimators_ ]
355
+ self .pxz_ = [self .pxt_ @ ptz for ptz in self .ptz_ ]
358
356
else :
359
357
self .ptz_ = self .classifier_ .coef_ .T
360
- # print(self.ptz_.shape)
361
358
self .pxz_ = self .pxt_ @ self .ptz_
362
359
363
- # print(self.ptz_.shape)
364
360
if not multioutput and type_of_target (Y ) == "binary" :
365
361
self .pxz_ = self .pxz_ .reshape (
366
362
X .shape [1 ],
@@ -531,3 +527,4 @@ def score(self, X, y, sample_weight=None):
531
527
532
528
# Inherit the docstring from scikit-learn
533
529
score .__doc__ = LinearClassifierMixin .score .__doc__
530
+
0 commit comments