@@ -65,18 +65,18 @@ def predict(self, X):
65
65
66
66
Returns
67
67
-------
68
- Ypred : array, shape = (n_samples, n_outputs)
68
+ Y_pred : array, shape = (n_samples, n_outputs)
69
69
Predicted outputs per sample.
70
70
"""
71
71
# predict multiple outputs
72
- Ypred = self ._decision_function (X )
72
+ Y_pred = self ._decision_function (X )
73
73
for i in range (self .n_outputs ):
74
74
if self .output_types [i ] == 'binary' :
75
75
# binarize classification results
76
- labels = np .zeros (Ypred [:, i ].shape )
77
- labels [Ypred [:, i ] >= 0.5 ] = 1
78
- Ypred [:, i ] = labels
79
- return Ypred
76
+ labels = np .zeros (Y_pred [:, i ].shape )
77
+ labels [Y_pred [:, i ] >= 0.5 ] = 1
78
+ Y_pred [:, i ] = labels
79
+ return Y_pred
80
80
81
81
def score (self , X , Y ):
82
82
"""Returns accuracy for each outputs.
@@ -89,20 +89,20 @@ def score(self, X, Y):
89
89
X : array-like, shape = (n_samples, n_features)
90
90
Sample matrix.
91
91
92
- y : array-like, shape = (n_samples) or (n_samples, n_outputs)
92
+ Y : array-like, shape = (n_samples, n_outputs)
93
93
True labels for X.
94
94
95
95
Returns
96
96
-------
97
97
score : list of float, shape (n_outputs,)
98
- Mean accuracy of self.predict(X) wrt. Y.
98
+ Accuracy of self.predict(X) wrt. Y.
99
99
"""
100
- Ypred = self .predict (X )
100
+ Y_pred = self .predict (X )
101
101
scores = np .empty ((self .n_outputs ))
102
102
for i in range (self .n_outputs ):
103
103
# accuracy_score for classification
104
104
if self .output_types [i ] == 'binary' :
105
- scores [i ] = accuracy_score (Y [:, i ], Ypred [:, i ])
105
+ scores [i ] = accuracy_score (Y [:, i ], Y_pred [:, i ])
106
106
# r2_score for regression
107
107
elif self .output_types [i ] == 'continuous' :
108
108
scores [i ] = r2_score (Y [:, i ], Y_pred [:, i ])
0 commit comments