-
Notifications
You must be signed in to change notification settings - Fork 28
/
evaluate.py
52 lines (35 loc) · 1.15 KB
/
evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import numpy as np
from sklearn.metrics import confusion_matrix
def precision(matrix):
avg_precise = 0
for i in range(16):
tp = matrix[i][i]
fp = np.sum(matrix[:, i])
if fp != 0:
avg_precise += tp / fp
return avg_precise / 16
def recall(matrix):
avg_recall = 0
for i in range(16):
tp = matrix[i][i]
fn = np.sum(matrix[i, :])
avg_recall += tp / fn
return avg_recall / 16
def f1_score(precision, recall):
return (2 * precision * recall) / (precision + recall)
if __name__ == "__main__":
y_true = np.load("prediction_files/Y_test.npy")
y_pred = np.load("prediction_files/Y_pred_inception_resnet.npy")
ground_truth = []
preds = []
for i in range(len(y_true)):
y_true_arg = np.argmax(y_true[i])
ground_truth.append(y_true_arg)
preds_arg = np.argmax(y_pred[i])
preds.append(preds_arg)
confusion_matrix = confusion_matrix(ground_truth, preds)
# print(confusion_matrix)
precise = precision(confusion_matrix)
recall = recall(confusion_matrix)
f1 = f1_score(precise, recall)
print(precise, recall, f1)