From 6968d3d0574e346ae5d14b16d0a644ff1894659f Mon Sep 17 00:00:00 2001
From: SpeedProg <speedprog@googlemail.com>
Date: Fri, 06 Sep 2019 17:31:44 +0000
Subject: [PATCH] made set hash be 64 bit instead of same as full card hash
---
opencv_dnn.py | 28 +++++++++++++++++++++-------
1 files changed, 21 insertions(+), 7 deletions(-)
diff --git a/opencv_dnn.py b/opencv_dnn.py
index 64a9067..e1591f5 100644
--- a/opencv_dnn.py
+++ b/opencv_dnn.py
@@ -28,7 +28,7 @@
new_pool = pd.DataFrame(columns=list(card_pool.columns.values))
for hs in hash_size:
new_pool['card_hash_%d' % hs] = np.NaN
- new_pool['set_hash_%d' % hs] = np.NaN
+ new_pool['set_hash_%d' % 64] = np.NaN
#new_pool['art_hash_%d' % hs] = np.NaN
for ind, card_info in card_pool.iterrows():
if ind % 100 == 0:
@@ -68,8 +68,22 @@
if card_img is None:
print('WARNING: card %s is not found!' % img_name)
continue
-
- set_img = card_img[575:638, 567:700]
+ """
+ img_cc = cv2.cvtColor(card_img, cv2.COLOR_BGR2GRAY)
+ img_thresh = cv2.adaptiveThreshold(img_cc, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY_INV, 11, 5)
+ # Dilute the image, then erode them to remove minor noises
+ kernel = np.ones((3, 3), np.uint8)
+ img_dilate = cv2.dilate(img_thresh, kernel, iterations=1)
+ img_erode = cv2.erode(img_dilate, kernel, iterations=1)
+ cnts, hier = cv2.findContours(img_erode, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
+ cnts2 = sorted(cnts, key=cv2.contourArea, reverse=True)
+ cnts2 = cnts2[:10]
+ if True:
+ cv2.drawContours(img_cc, cnts2, -1, (0, 255, 0), 3)
+ #cv2.imshow('Contours', card_img)
+ #cv2.waitKey(10000)
+ """
+ set_img = card_img[595:635, 600:690]
#cv2.imshow(card_info['name'], set_img)
# Compute value of the card's perceptual hash, then store it to the database
#img_art = Image.fromarray(card_img[121:580, 63:685]) # For 745*1040 size card image
@@ -77,9 +91,9 @@
img_set = Image.fromarray(set_img)
for hs in hash_size:
card_hash = ih.phash(img_card, hash_size=hs)
- set_hash = ih.whash(img_set, hash_size=hs)
+ set_hash = ih.whash(img_set, hash_size=64)
card_info['card_hash_%d' % hs] = card_hash
- card_info['set_hash_%d' % hs] = set_hash
+ card_info['set_hash_%d' % 64] = set_hash
#print('Setting set_hash_%d' % hs)
#art_hash = ih.phash(img_art, hash_size=hs)
#card_info['art_hash_%d' % hs] = art_hash
@@ -448,7 +462,7 @@
print('Idx:', ix, 'Name:', cd['name'], 'Set:', cd['set'], 'Diff:', top_matches[ix])
- cd_data['set_hash_diff'] = cd_data['set_hash_%d' % hash_size]
+ cd_data['set_hash_diff'] = cd_data['set_hash_%d' % 64]
cd_data['set_hash_diff'] = cd_data['set_hash_diff'].apply(lambda x: np.count_nonzero(x != set_img_hash))
conf = sorted(cd_data['set_hash_diff'])
print('Confs:', conf)
@@ -640,7 +654,7 @@
card_pool.drop('Unnamed: 0', axis=1, inplace=True, errors='ignore')
card_pool = calc_image_hashes(card_pool, save_to=pck_path, hash_size=hash_sizes)
ch_key = 'card_hash_%d' % args.hash_size
- set_key = 'set_hash_%d' % args.hash_size
+ set_key = 'set_hash_%d' % 64
if ch_key not in card_pool.columns:
# we did not generate this hash_size yet
print('We need to add hash_size=%d' % (args.hash_size,))
--
Gitblit v1.10.0