@@ -16,10 +16,10 @@ def compare_histograms(source, capture):
16
16
17
17
source_hist = cv2 .calcHist ([source ], [0 , 1 , 2 ], None , [8 , 8 , 8 ], [0 , 256 , 0 , 256 , 0 , 256 ])
18
18
capture_hist = cv2 .calcHist ([capture ], [0 , 1 , 2 ], None , [8 , 8 , 8 ], [0 , 256 , 0 , 256 , 0 , 256 ])
19
-
19
+
20
20
cv2 .normalize (source_hist , source_hist )
21
21
cv2 .normalize (capture_hist , capture_hist )
22
-
22
+
23
23
return 1 - cv2 .compareHist (source_hist , capture_hist , cv2 .HISTCMP_BHATTACHARYYA )
24
24
25
25
def compare_histograms_masked (source , capture , mask ):
@@ -34,10 +34,10 @@ def compare_histograms_masked(source, capture, mask):
34
34
"""
35
35
source_hist = cv2 .calcHist ([source ], [0 , 1 , 2 ], mask , [8 , 8 , 8 ], [0 , 256 , 0 , 256 , 0 , 256 ])
36
36
capture_hist = cv2 .calcHist ([capture ], [0 , 1 , 2 ], mask , [8 , 8 , 8 ], [0 , 256 , 0 , 256 , 0 , 256 ])
37
-
37
+
38
38
cv2 .normalize (source_hist , source_hist )
39
39
cv2 .normalize (capture_hist , capture_hist )
40
-
40
+
41
41
return 1 - cv2 .compareHist (source_hist , capture_hist , cv2 .HISTCMP_BHATTACHARYYA )
42
42
43
43
def compare_l2_norm (source , capture ):
@@ -51,10 +51,10 @@ def compare_l2_norm(source, capture):
51
51
"""
52
52
53
53
error = cv2 .norm (source , capture , cv2 .NORM_L2 )
54
-
54
+
55
55
# The L2 Error is summed across all pixels, so this normalizes
56
56
max_error = (source .size ** 0.5 ) * 255
57
-
57
+
58
58
return 1 - (error / max_error )
59
59
60
60
def compare_l2_norm_masked (source , capture , mask ):
@@ -73,6 +73,8 @@ def compare_l2_norm_masked(source, capture, mask):
73
73
# The L2 Error is summed across all pixels, so this normalizes
74
74
max_error = (3 * numpy .count_nonzero (mask ) * 255 * 255 ) ** 0.5
75
75
76
+ if not max_error :
77
+ return 0
76
78
return 1 - (error / max_error )
77
79
78
80
def compare_template (source , capture ):
@@ -117,7 +119,7 @@ def compare_phash(source, capture):
117
119
"""
118
120
Compares the pHash of the two given images and returns the similarity between
119
121
the two.
120
-
122
+
121
123
@param source: Image of any given shape as a numpy array
122
124
@param capture: Image of any given shape as a numpy array
123
125
@return: The similarity between the hashes of the image as a number 0 to 1.
@@ -135,7 +137,7 @@ def compare_phash_masked(source, capture, mask):
135
137
"""
136
138
Compares the pHash of the two given images and returns the similarity between
137
139
the two.
138
-
140
+
139
141
@param source: Image of any given shape as a numpy array
140
142
@param capture: Image of any given shape as a numpy array
141
143
@param mask: An image matching the dimensions of the source, but 1 channel grayscale
@@ -149,11 +151,27 @@ def compare_phash_masked(source, capture, mask):
149
151
# the same
150
152
source = cv2 .bitwise_and (source , source , mask = mask )
151
153
capture = cv2 .bitwise_and (capture , capture , mask = mask )
152
-
154
+
153
155
source = Image .fromarray (source )
154
156
capture = Image .fromarray (capture )
155
157
156
158
source_hash = imagehash .phash (source )
157
159
capture_hash = imagehash .phash (capture )
158
160
159
- return 1 - ((source_hash - capture_hash )/ 64.0 )
161
+ if not source_hash + capture_hash :
162
+ return 0
163
+ return 1 - ((source_hash - capture_hash ) / 64.0 )
164
+
165
+
166
+ def checkIfImageHasTransparency (image ):
167
+ # Check if there's a transparency channel (4th channel) and if at least one pixel is transparent (< 255)
168
+ if image .shape [2 ] != 4 :
169
+ return False
170
+ mean = np .mean (image [:, :, 3 ])
171
+ if mean != 0 :
172
+ # Non-transparent images code path is usually faster and simpler, so let's return that
173
+ return False
174
+ # TODO error message if all pixels are transparent
175
+ # (the image appears as all black in windows, so it's not obvious for the user what they did wrong)
176
+
177
+ return mean != 255
0 commit comments