@@ -13,6 +13,8 @@ def findLocalMax(corrMap, score_threshold=0.6):
13
13
'''
14
14
Get coordinates of the local maximas with values above a threshold in the image of the correlation map
15
15
'''
16
+ # Get back an array if UMat provided
17
+ if isinstance (corrMap , cv2 .UMat ): corrMap = corrMap .get ()
16
18
17
19
# IF depending on the shape of the correlation map
18
20
if corrMap .shape == (1 ,1 ): ## Template size = Image size -> Correlation map is a single digit')
@@ -64,7 +66,49 @@ def computeScoreMap(template, image, method=cv2.TM_CCOEFF_NORMED):
64
66
return cv2 .matchTemplate (template , image , method )
65
67
66
68
67
- def findMatches (listTemplates , image , method = cv2 .TM_CCOEFF_NORMED , N_object = float ("inf" ), score_threshold = 0.5 , searchBox = None ):
69
+ def checkTypes (listTemplates , image , useOpencl = False ):
70
+ '''
71
+ Check that the templates and image have the same bitDepthand 8 or 32-bit'''
72
+ templatesType = list ( set ( [template [1 ].dtype for template in listTemplates ] ) ) # get a list of unique template types
73
+
74
+ if (image .dtype == "float64" ) or ("float64" in templatesType ):
75
+ raise ValueError ("64-bit not supported, max 32-bit" )
76
+
77
+ all8 = image .dtype == "uint8" and templatesType == ["uint8" ]
78
+ all32 = image .dtype == "float32" and templatesType == ["float32" ]
79
+
80
+ if all8 or all32 :
81
+
82
+ if useOpencl :
83
+ listTemplates = [ (template [0 ], cv2 .UMat (template [1 ]) ) for template in listTemplates ]
84
+ image = cv2 .UMat (image )
85
+
86
+ else :
87
+ pass # images are either all 8-bit or all 32-bit and no need to convert to UMat
88
+
89
+
90
+ else :
91
+ # Create a lambda function for conversion
92
+ if useOpencl :
93
+ convert32 = lambda array : cv2 .UMat ( np .float32 (array ) )
94
+ else :
95
+ convert32 = lambda array : cv2 .UMat (array )
96
+
97
+ # convert to 32-bit + UMat if necessary
98
+ listTemplates = [ (template [0 ], convert32 (template [1 ]) ) for template in listTemplates ]
99
+ image = convert32 (image )
100
+
101
+ return listTemplates , image
102
+
103
+
104
+
105
+ def findMatches (listTemplates ,
106
+ image ,
107
+ method = cv2 .TM_CCOEFF_NORMED ,
108
+ N_object = float ("inf" ),
109
+ score_threshold = 0.5 ,
110
+ searchBox = None ,
111
+ useOpencl = False ):
68
112
'''
69
113
Find all possible templates locations provided a list of template to search and an image
70
114
Parameters
@@ -98,17 +142,20 @@ def findMatches(listTemplates, image, method=cv2.TM_CCOEFF_NORMED, N_object=floa
98
142
image = image [yOffset :yOffset + searchHeight , xOffset :xOffset + searchWidth ]
99
143
else :
100
144
xOffset = yOffset = 0
145
+
146
+ listTemplates , image = checkTypes (listTemplates , image , useOpencl ) # also convert to UMat if using opencl
101
147
102
148
listHit = []
103
149
for templateName , template in listTemplates :
104
150
105
151
#print('\nSearch with template : ',templateName)
106
152
107
- corrMap = computeScoreMap (template , image , method )
153
+ corrMap = cv2 . matchTemplate (template , image , method ) # automatically run with opencl if provided a UMat
108
154
109
155
## Find possible location of the object
110
156
if N_object == 1 : # Detect global Min/Max
111
157
minVal , maxVal , minLoc , maxLoc = cv2 .minMaxLoc (corrMap )
158
+ if isinstance (corrMap , cv2 .UMat ): corrMap = corrMap .get ()
112
159
113
160
if method == 1 :
114
161
Peaks = [minLoc [::- 1 ]] # opposite sorting than in the multiple detection
@@ -117,7 +164,9 @@ def findMatches(listTemplates, image, method=cv2.TM_CCOEFF_NORMED, N_object=floa
117
164
Peaks = [maxLoc [::- 1 ]]
118
165
119
166
120
- else :# Detect local max or min
167
+ else : # Detect local max or min
168
+ if isinstance (corrMap , cv2 .UMat ): corrMap = corrMap .get ()
169
+
121
170
if method == 1 : # Difference => look for local minima
122
171
Peaks = findLocalMin (corrMap , score_threshold )
123
172
@@ -130,7 +179,7 @@ def findMatches(listTemplates, image, method=cv2.TM_CCOEFF_NORMED, N_object=floa
130
179
131
180
# Once every peak was detected for this given template
132
181
## Create a dictionnary for each hit with {'TemplateName':, 'BBox': (x,y,Width, Height), 'Score':coeff}
133
-
182
+ if isinstance ( template , cv2 . UMat ): template = template . get () # get back to array from UMat
134
183
height , width = template .shape [0 :2 ] # slicing make sure it works for RGB too
135
184
136
185
for peak in Peaks :
@@ -143,7 +192,14 @@ def findMatches(listTemplates, image, method=cv2.TM_CCOEFF_NORMED, N_object=floa
143
192
return pd .DataFrame (listHit ) # All possible hits before Non-Maxima Supression
144
193
145
194
146
- def matchTemplates (listTemplates , image , method = cv2 .TM_CCOEFF_NORMED , N_object = float ("inf" ), score_threshold = 0.5 , maxOverlap = 0.25 , searchBox = None ):
195
+ def matchTemplates (listTemplates ,
196
+ image ,
197
+ method = cv2 .TM_CCOEFF_NORMED ,
198
+ N_object = float ("inf" ),
199
+ score_threshold = 0.5 ,
200
+ maxOverlap = 0.25 ,
201
+ searchBox = None ,
202
+ useOpencl = False ):
147
203
'''
148
204
Search each template in the image, and return the best N_object location which offer the best score and which do not overlap
149
205
Parameters
@@ -174,7 +230,13 @@ def matchTemplates(listTemplates, image, method=cv2.TM_CCOEFF_NORMED, N_object=f
174
230
if maxOverlap < 0 or maxOverlap > 1 :
175
231
raise ValueError ("Maximal overlap between bounding box is in range [0-1]" )
176
232
177
- tableHit = findMatches (listTemplates , image , method , N_object , score_threshold , searchBox )
233
+ tableHit = findMatches (listTemplates ,
234
+ image ,
235
+ method ,
236
+ N_object ,
237
+ score_threshold ,
238
+ searchBox ,
239
+ useOpencl )
178
240
179
241
if method == 1 : bestHits = NMS (tableHit , N_object = N_object , maxOverlap = maxOverlap , sortAscending = True )
180
242
0 commit comments