3
3
import numpy as np
4
4
from pycocotools import mask as mutils
5
5
import sys
6
- # sys.path.append('add your workspace here ')
6
+ # sys.path.append('/ your alignshit path ')
7
7
import os
8
8
os .environ ['CUDA_VISIBLE_DEVICES' ]= '0'
9
9
# from dataset import DeepLesionDataset
28
28
# from deeplesion.evaluation.visualize import draw_bounding_boxes_on_image_array
29
29
from deeplesion .evaluation .evaluation_metrics import sens_at_FP
30
30
31
+ from deeplesion .evaluation .evaluation_metrics import sens_at_FP , IOU
32
+ from skimage .measure import regionprops
33
+ from pycocotools .mask import decode
34
+
31
35
def parse_args ():
32
36
parser = argparse .ArgumentParser (description = 'eval deeplesion' )
33
37
# parser.add_argument('config', help='train config file path')
34
- parser .add_argument ('--config' , help = 'config path' )
38
+ parser .add_argument ('--config' , default = None , help = 'config path' )
35
39
parser .add_argument ('--checkpoint' , help = 'checkpoint path' )
36
40
args = parser .parse_args ()
37
41
@@ -67,23 +71,52 @@ def get_model(cfg_path):
67
71
model .CLASSES = dataset .CLASSES
68
72
return model , data_loadertest
69
73
74
+ # def single_gpu_test(model, data_loader):
75
+ # model.eval()
76
+ # results = []
77
+ # # dataset = data_loader.dataset
78
+ # prog_bar = mmcv.ProgressBar(len(data_loader))
79
+ # with torch.no_grad():
80
+ # for i, data in enumerate(data_loader):
81
+
82
+ # gt_boxes = data.pop('gt_bboxes')
83
+ # r = model(return_loss=False, rescale=False, **data)
84
+ # # inference_time.append(time.time() - start_time)
85
+ # data['gt_boxes'] = gt_boxes
86
+ # data['bboxes'] = r[0]
87
+ # data['segs'] = r[1]
88
+ # # data['img'] = data['img'].data[0][data['img'].data[0].shape[0]//2]
89
+ # data.pop('img')
90
+ # results.append(data)
91
+ # prog_bar.update()
92
+ # return results
93
+
70
94
def single_gpu_test (model , data_loader ):
71
95
model .eval ()
72
96
results = []
73
- # dataset = data_loader.dataset
74
97
prog_bar = mmcv .ProgressBar (len (data_loader ))
75
98
with torch .no_grad ():
99
+ ann = data_loader .dataset .ann
76
100
for i , data in enumerate (data_loader ):
77
-
78
101
gt_boxes = data .pop ('gt_bboxes' )
79
102
r = model (return_loss = False , rescale = False , ** data )
80
- # inference_time.append(time.time() - start_time)
81
- data ['gt_boxes' ] = gt_boxes
82
- data ['bboxes' ] = r [0 ]
83
- data ['segs' ] = r [1 ]
84
- # data['img'] = data['img'].data[0][data['img'].data[0].shape[0]//2]
85
- data .pop ('img' )
86
- results .append (data )
103
+ j = 0
104
+ res_dict = {}
105
+ res_dict ['gt_boxes' ] = gt_boxes .data [0 ][j ].numpy ()
106
+ res_dict ['bboxes' ] = r [0 ][j ]
107
+ res_dict ['segs' ] = r [1 ][j ]
108
+ # res_dict['img'] = data['img'].data[j][0][0]
109
+ dd = ann [i + j ]['ann' ]['diameters' ]
110
+ res_dict ['diameters' ] = dd
111
+ res_dict ['img_meta' ] = data ['img_meta' ].data [0 ][j ]
112
+ # data.pop('img')
113
+ res_dict ['gt_masks' ] = data ['gt_masks' ].data [0 ][j ]
114
+ res_dict ['spacing' ] = ann [i ]['ann' ]['spacing' ]
115
+ res_dict ['recists' ] = ann [i ]['ann' ]['recists' ]
116
+ res_dict ['thickness' ] = ann [i ]['ann' ]['slice_intv' ]
117
+ # res_dict['diameter_erro'], res_dict['pred_mask'], res_dict['gt_mask_my'], res_dict['pred_mask_index'] = mask_matrics(res_dict)
118
+ results .append (res_dict )
119
+
87
120
prog_bar .update ()
88
121
return results
89
122
@@ -96,35 +129,83 @@ def write_metrics(outputs, log_path, epoch):
96
129
s5_gt = []
97
130
so_box = []
98
131
so_gt = []
132
+ so_seg_erro = []
99
133
for d in outputs :
100
- if d ['thickness' ]. data [ 0 ] <= 2. :
134
+ if d ['thickness' ]<= 2. :
101
135
s1_box .append (np .vstack (d ['bboxes' ]))
102
- s1_gt .append (d ['gt_boxes' ]. data [ 0 ][ 0 ]. numpy () )
103
- elif d ['thickness' ]. data [ 0 ] == 5. :
136
+ s1_gt .append (d ['gt_boxes' ])
137
+ elif d ['thickness' ]== 5. :
104
138
s5_box .append (np .vstack (d ['bboxes' ]))
105
- s5_gt .append (d ['gt_boxes' ]. data [ 0 ][ 0 ]. numpy () )
139
+ s5_gt .append (d ['gt_boxes' ])
106
140
else :
107
141
so_box .append (np .vstack (d ['bboxes' ]))
108
- so_gt .append (d ['gt_boxes' ]. data [ 0 ][ 0 ]. numpy () )
109
-
142
+ so_gt .append (d ['gt_boxes' ])
143
+ # so_seg_erro.extend(d['diameter_erro'])
110
144
sens1 = sens_at_FP (s1_box , s1_gt , avgFP , iou_th )
111
145
sens2 = sens_at_FP (s5_box , s5_gt , avgFP , iou_th )
112
146
sens = sens_at_FP (s1_box + s5_box + so_box , s1_gt + s5_gt + so_gt , avgFP , iou_th )
113
- s = str (epoch )+ ':\t ' + str (sens )+ '\t ' + str (sens1 )+ '\t ' + str (sens2 )+ '\t align srl\n '
147
+
148
+ # so_seg_erro = np.array(so_seg_erro)
149
+ # diameter_erro = so_seg_erro[so_seg_erro>-1].mean()
150
+ s = str (epoch )+ ':\t ' + str (sens )+ '\t ' + str (sens1 )+ '\t ' + str (sens2 )+ f'\t \n ' #diameter_erro:{diameter_erro}\n'
114
151
print (s )
115
- with open (log_path ,'a+' ) as f :
116
- f .write (s )
152
+ # with open(log_path,'a+') as f:
153
+ # f.write(s)
117
154
return s
118
155
156
+ def mask_matrics (output , iou_thresh = 0.5 ):
157
+ erro = [- 1 ] * len (output ['gt_boxes' ])
158
+ pred_mask = []
159
+ gt_mask = []
160
+ pred_mask_index = []
161
+ pred_mask_contours = []
162
+ for i , box in enumerate (output ['gt_boxes' ]):
163
+ iou1 = IOU (box , output ['bboxes' ])
164
+ if len (iou1 )== 0 :
165
+ continue
166
+ indx = iou1 .argmax ()
167
+ try :
168
+ if iou1 [indx ] > iou_thresh :
169
+ d_seg = decode (output ['segs' ][indx ])
170
+ pred_mask .append (output ['segs' ][indx ])
171
+ l_seg = output ['gt_masks' ][i ].astype (np .uint8 )
172
+ gt_mask .append (l_seg )
173
+ # _, cnts = cv2.findContours(l_seg, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
174
+ pred_mask_index .append (indx )
175
+ prop = regionprops (d_seg )[0 ]
176
+ diameter = np .sqrt (hprop .major_axis_lengt ** 2 + prop .minor_axis_length ** 2 )/ 2
177
+ recists = output ['recists' ][i ].reshape ((4 , 2 ))
178
+ l = np .linalg .norm (recists [0 ] - recists [1 ])
179
+ s = np .linalg .norm (recists [2 ] - recists [3 ])
180
+ diameter1 = np .sqrt (l ** 2 + s ** 2 )/ 2
181
+
182
+ erro [i ] = (abs (prop .major_axis_lengt - max (l ,s )) + abs (prop .minor_axis_length - min (l ,s ))) * output ['spacing' ]
183
+ # erro[i] = np.abs(diameter1 - diameter) * output['spacing']
184
+ except Exception as e :
185
+ print (e )
186
+ print (len (output ['segs' ]), indx , i , output ['bboxes' ].shape , decode (output ['segs' ][indx ]).sum ())
187
+
188
+ return erro , pred_mask , gt_mask , pred_mask_index
189
+
190
+ def main (checkpoint , cfg_path = None ):
191
+ if cfg_path is None :
192
+ cfg_path = generate_cfg (checkpoint )
193
+ print (cfg_path )
194
+ model , dl = get_model (cfg_path )
195
+ log_path = './log/metrix_log.txt'
196
+ load_checkpoint (model , checkpoint , map_location = 'cpu' , strict = True )
197
+ outputs = single_gpu_test (model , dl )
198
+ r = write_metrics (outputs , log_path , 'N/A' )
199
+ # save_output(outputs, os.path.basename(os.path.dirname(checkpoint))+'.pkl')
200
+ with open (log_path ,'a+' ) as f :
201
+ f .write (checkpoint + ':\n ' + r )
202
+ with open (os .path .dirname (checkpoint )+ 'metrics_log.txt' ,'a+' ) as f :
203
+ f .write (r )
204
+ print (r )
205
+
119
206
if __name__ == '__main__' :
120
207
# checkpoint_path = f'/mnt/data3/deeplesion/dl/work_dirs/densenet_3d_acs_r2/latest.pth'
121
208
args = parse_args ()
122
209
checkpoint = args .checkpoint
123
210
cfg_path = args .config #generate_cfg(checkpoint)
124
- model , dl = get_model (cfg_path )
125
- # log_path = checkpoint.replace('latest.pth', 'metrix_log.txt')
126
- log_path = '/mnt/data3/alignconv/logs/metrix_log.txt'
127
- load_checkpoint (model , checkpoint , map_location = 'cpu' , strict = True )
128
- outputs = single_gpu_test (model , dl )
129
- r = write_metrics (outputs , log_path , 'N/A' )
130
- print (r )
211
+ main (checkpoint , cfg_path )
0 commit comments