added FAISS Searcher
This commit is contained in:
parent
2761ccbe95
commit
7484f767b3
|
|
@ -2,7 +2,7 @@ from pathlib import Path
|
||||||
import tqdm
|
import tqdm
|
||||||
|
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import BEBLIDExtractor as lf
|
import BEBLIDExtractorQ as lf
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ from pathlib import Path
|
||||||
import tqdm
|
import tqdm
|
||||||
|
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import BEBLIDExtractor as lf
|
import BEBLIDExtractorQ as lf
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,12 +7,12 @@ import LFUtilities
|
||||||
|
|
||||||
import BEBLIDParameters as params
|
import BEBLIDParameters as params
|
||||||
|
|
||||||
detector = cv2.ORB_create(params.KEYPOINTS)
|
detector = cv2.ORB_create(params.KEYPOINTS_D)
|
||||||
descriptor = cv2.xfeatures2d.BEBLID_create(0.75, 101)
|
descriptor = cv2.xfeatures2d.BEBLID_create(0.75, 101)
|
||||||
|
|
||||||
|
|
||||||
def extract(img_path):
|
def extract(img_path):
|
||||||
img = LFUtilities.resize(params.IMG_SIZE, cv2.imread(img_path))
|
img = LFUtilities.resize(params.IMG_SIZE_D, cv2.imread(img_path))
|
||||||
kp = detector.detect(img, None)
|
kp = detector.detect(img, None)
|
||||||
kp, des = descriptor.compute(img, kp)
|
kp, des = descriptor.compute(img, kp)
|
||||||
return (kp, des)
|
return (kp, des)
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
import cv2
|
||||||
|
from pathlib import Path
|
||||||
|
import tqdm
|
||||||
|
import pickle
|
||||||
|
import os
|
||||||
|
import LFUtilities
|
||||||
|
|
||||||
|
import BEBLIDParameters as params
|
||||||
|
|
||||||
|
detector = cv2.ORB_create(params.KEYPOINTS_Q)
|
||||||
|
descriptor = cv2.xfeatures2d.BEBLID_create(0.75, 101)
|
||||||
|
|
||||||
|
|
||||||
|
def extract(img_path):
|
||||||
|
img = LFUtilities.resize(params.IMG_SIZE_Q, cv2.imread(img_path))
|
||||||
|
kp = detector.detect(img, None)
|
||||||
|
kp, des = descriptor.compute(img, kp)
|
||||||
|
return (kp, des)
|
||||||
|
|
||||||
|
|
@ -1,5 +1,8 @@
|
||||||
NN_MATCH_RATIO = 0.8
|
NN_MATCH_RATIO = 0.8
|
||||||
MIN_GOOD_MATCHES = 22
|
MIN_GOOD_MATCHES = 20
|
||||||
MIN_INLIERS = 15
|
MIN_INLIERS = 15
|
||||||
KEYPOINTS = 800
|
KEYPOINTS_D = 250
|
||||||
IMG_SIZE = 500
|
IMG_SIZE_D = 500
|
||||||
|
KEYPOINTS_Q = 800
|
||||||
|
IMG_SIZE_Q = 500
|
||||||
|
K_REORDERING = 1000
|
||||||
|
|
@ -4,7 +4,7 @@ import numpy as np
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import BEBLIDParameters
|
import BEBLIDParameters
|
||||||
import ImageRecognitionSettings as settings
|
import ImageRecognitionSettings as settings
|
||||||
|
from line_profiler_pycharm import profile
|
||||||
|
|
||||||
class BEBLIDRescorer:
|
class BEBLIDRescorer:
|
||||||
|
|
||||||
|
|
@ -19,24 +19,27 @@ class BEBLIDRescorer:
|
||||||
query = LFUtilities.load_img_lf(settings.DATASET_LF_FOLDER, query_id)
|
query = LFUtilities.load_img_lf(settings.DATASET_LF_FOLDER, query_id)
|
||||||
return self.rescore_by_img(query, resultset)
|
return self.rescore_by_img(query, resultset)
|
||||||
|
|
||||||
|
@profile
|
||||||
def rescore_by_img(self, query, resultset):
|
def rescore_by_img(self, query, resultset):
|
||||||
max_inliers = -1
|
max_inliers = -1
|
||||||
res = []
|
res = []
|
||||||
counter = 0
|
counter = 0
|
||||||
if len(query[0]) > 0:
|
if len(query[0]) > BEBLIDParameters.MIN_GOOD_MATCHES:
|
||||||
for data_id, _ in resultset:
|
for data_id, _ in resultset:
|
||||||
try:
|
try:
|
||||||
data_el = LFUtilities.load_img_lf(settings.DATASET_LF_FOLDER, data_id)
|
#data_el = LFUtilities.loadz_img_lf(settings.DATASET_LF_FOLDER, data_id)
|
||||||
|
data_el = LFUtilities.unpickle_img_lf(settings.DATASET_LF_FOLDER, data_id)
|
||||||
|
|
||||||
if len(data_el[1]) > 0:
|
if len(data_el[1]) > BEBLIDParameters.MIN_GOOD_MATCHES:
|
||||||
nn_matches = self.bf.knnMatch(query[1], data_el[1], 2)
|
nn_matches = self.bf.knnMatch(query[1], data_el[1], 2)
|
||||||
good = [m for m, n in nn_matches if m.distance < BEBLIDParameters.NN_MATCH_RATIO * n.distance]
|
good = [m for m, n in nn_matches if m.distance < BEBLIDParameters.NN_MATCH_RATIO * n.distance]
|
||||||
|
|
||||||
if len(good) > BEBLIDParameters.MIN_GOOD_MATCHES:
|
if len(good) > BEBLIDParameters.MIN_GOOD_MATCHES:
|
||||||
src_pts = np.float32([query[0][m.queryIdx].pt for m in good]).reshape(-1, 1, 2)
|
src_pts = np.float32([query[0][m.queryIdx].pt for m in good]).reshape(-1, 1, 2)
|
||||||
dst_pts = np.float32([data_el[0][m.trainIdx].pt for m in good]).reshape(-1, 1, 2)
|
#dst_pts = np.float32([data_el[0][m.trainIdx].pt for m in good]).reshape(-1, 1, 2)
|
||||||
|
dst_pts = data_el[0][[m.trainIdx for m in good]].reshape(-1, 1, 2)
|
||||||
|
|
||||||
M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 3.0)
|
M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
|
||||||
matches_mask = mask.ravel().tolist()
|
matches_mask = mask.ravel().tolist()
|
||||||
# print(len(good))
|
# print(len(good))
|
||||||
inliers = np.count_nonzero(matches_mask)
|
inliers = np.count_nonzero(matches_mask)
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ if __name__ == '__main__':
|
||||||
for path in tqdm.tqdm(paths_list):
|
for path in tqdm.tqdm(paths_list):
|
||||||
key = path.name
|
key = path.name
|
||||||
exprected_id = groundtruth[key]
|
exprected_id = groundtruth[key]
|
||||||
print(exprected_id)
|
# print(exprected_id)
|
||||||
try:
|
try:
|
||||||
img_file = {'image': (
|
img_file = {'image': (
|
||||||
'query', open(os.path.join(path.parent, path.name), 'rb'))}
|
'query', open(os.path.join(path.parent, path.name), 'rb'))}
|
||||||
|
|
@ -53,7 +53,7 @@ if __name__ == '__main__':
|
||||||
res = r.json()
|
res = r.json()
|
||||||
|
|
||||||
for i in range (0, len(res)):
|
for i in range (0, len(res)):
|
||||||
print(res[i][0])
|
#print(res[i][0])
|
||||||
if res[i][0] in exprected_id:
|
if res[i][0] in exprected_id:
|
||||||
precision_at[i] = precision_at[i] + 1
|
precision_at[i] = precision_at[i] + 1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ from pathlib import Path
|
||||||
import tqdm
|
import tqdm
|
||||||
|
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import BEBLIDExtractor as lf
|
import BEBLIDExtractorQ as lf
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ from pathlib import Path
|
||||||
import tqdm
|
import tqdm
|
||||||
|
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import BEBLIDExtractor as lf
|
import BEBLIDExtractorD as lf
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
@ -24,9 +24,12 @@ if __name__ == '__main__':
|
||||||
try:
|
try:
|
||||||
kp, des = lf.extract(os.path.join(path.parent, path.name))
|
kp, des = lf.extract(os.path.join(path.parent, path.name))
|
||||||
filename = os.path.splitext(path.name)[0]
|
filename = os.path.splitext(path.name)[0]
|
||||||
LFUtilities.save_img_lf(dest, filename, kp, des)
|
#LFUtilities.save_img_lf(dest, filename, kp, des)
|
||||||
except:
|
#LFUtilities.savez_img_lf(dest, filename, kp, des)
|
||||||
|
LFUtilities.pickle_img_lf(dest, filename, kp, des)
|
||||||
|
except Exception as e:
|
||||||
print("cannot process '%s'" % path)
|
print("cannot process '%s'" % path)
|
||||||
|
print(e)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print('lf extracted.')
|
print('lf extracted.')
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ from pathlib import Path
|
||||||
import tqdm
|
import tqdm
|
||||||
|
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import BEBLIDExtractor as lf
|
import BEBLIDExtractorQ as lf
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
from LFDB import LFDB
|
from LFDB import LFDB
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pickle as pickle
|
import pickle as pickle
|
||||||
import os
|
import os
|
||||||
|
from line_profiler_pycharm import profile
|
||||||
|
|
||||||
def resize(max_side, img):
|
def resize(max_side, img):
|
||||||
if img.shape[1] > img.shape[0]:
|
if img.shape[1] > img.shape[0]:
|
||||||
|
|
@ -36,9 +36,101 @@ def deserialize_object(serialized_obj):
|
||||||
return pickle.loads(serialized_obj)
|
return pickle.loads(serialized_obj)
|
||||||
|
|
||||||
|
|
||||||
|
def serializeV1(keypoints, descriptors):
|
||||||
|
temp_array = []
|
||||||
|
for point in keypoints:
|
||||||
|
kp = [point.pt, point.size, point.angle, point.response, point.octave, point.class_id]
|
||||||
|
temp_array.append(kp)
|
||||||
|
return temp_array, descriptors
|
||||||
|
|
||||||
|
|
||||||
|
def serialize(keypoints, descriptors):
|
||||||
|
pts = np.float32([keypoints[i].pt for i in range(0, len(keypoints))])
|
||||||
|
return pts, descriptors
|
||||||
|
|
||||||
|
def deserialize(ser_kp, ser_des):
|
||||||
|
keypoints = []
|
||||||
|
#data_list = array.tolist()
|
||||||
|
for point in ser_kp:
|
||||||
|
temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1], size=point[1], angle=point[2], response=point[3], octave=point[4], class_id=point[5])
|
||||||
|
keypoints.append(temp_feature)
|
||||||
|
return keypoints, ser_des
|
||||||
|
|
||||||
|
|
||||||
|
def deserializev1(ser_kp, ser_des):
|
||||||
|
keypoints = []
|
||||||
|
#data_list = array.tolist()
|
||||||
|
for point in ser_kp:
|
||||||
|
temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1], size=point[1], angle=point[2], response=point[3], octave=point[4], class_id=point[5])
|
||||||
|
keypoints.append(temp_feature)
|
||||||
|
return keypoints, ser_des
|
||||||
|
|
||||||
|
def pickle_img_lf(dest, id, keypoints, descriptors):
|
||||||
|
dest_folder_name = id[0:3]
|
||||||
|
filename = id + '.dat'
|
||||||
|
dest_folder_path = os.path.join(dest, dest_folder_name)
|
||||||
|
if (not os.path.exists(dest_folder_path)):
|
||||||
|
os.mkdir(dest_folder_path)
|
||||||
|
dest_path = os.path.join(dest_folder_path, filename)
|
||||||
|
kps, des = serialize(keypoints, descriptors)
|
||||||
|
pickle.dump([kps, des], open(dest_path, 'wb'))
|
||||||
|
|
||||||
|
@profile
|
||||||
|
def unpickle_img_lf(lf_path, id):
|
||||||
|
dest_folder_name = id[0:3]
|
||||||
|
filename = id + '.dat'
|
||||||
|
dest_folder_path = os.path.join(lf_path, dest_folder_name)
|
||||||
|
dest_path = os.path.join(dest_folder_path, filename)
|
||||||
|
kps, des = pickle.load((open(dest_path, "rb")))
|
||||||
|
return kps, des
|
||||||
|
|
||||||
|
|
||||||
|
@profile
|
||||||
|
def loadz_img_lf(lf_path, id):
|
||||||
|
dest_folder_name = id[0:3]
|
||||||
|
filename = id + '.dat.npz'
|
||||||
|
dest_folder_path = os.path.join(lf_path, dest_folder_name)
|
||||||
|
dest_path = os.path.join(dest_folder_path, filename)
|
||||||
|
data = np.load(dest_path, allow_pickle=False)
|
||||||
|
kps = data.f.kps
|
||||||
|
des = data.f.des
|
||||||
|
#kps = data['kps']
|
||||||
|
#des = data['des']
|
||||||
|
#kp, desc = deserialize(data['kps'], data['des'])
|
||||||
|
return kps, des
|
||||||
|
|
||||||
|
|
||||||
|
def savez_img_lf(dest, id, keypoints, descriptors):
|
||||||
|
dest_folder_name = id[0:3]
|
||||||
|
filename = id + '.dat'
|
||||||
|
dest_folder_path = os.path.join(dest, dest_folder_name)
|
||||||
|
if (not os.path.exists(dest_folder_path)):
|
||||||
|
os.mkdir(dest_folder_path)
|
||||||
|
dest_path = os.path.join(dest_folder_path, filename)
|
||||||
|
kps, des = serialize(keypoints, descriptors)
|
||||||
|
#np.savez(dest_path, data)
|
||||||
|
np.savez(dest_path, kps=kps, des=des)
|
||||||
|
|
||||||
|
|
||||||
|
@profile
|
||||||
|
def loadz_img_lf(lf_path, id):
|
||||||
|
dest_folder_name = id[0:3]
|
||||||
|
filename = id + '.dat.npz'
|
||||||
|
dest_folder_path = os.path.join(lf_path, dest_folder_name)
|
||||||
|
dest_path = os.path.join(dest_folder_path, filename)
|
||||||
|
data = np.load(dest_path, allow_pickle=False)
|
||||||
|
kps = data.f.kps
|
||||||
|
des = data.f.des
|
||||||
|
#kps = data['kps']
|
||||||
|
#des = data['des']
|
||||||
|
#kp, desc = deserialize(data['kps'], data['des'])
|
||||||
|
return kps, des
|
||||||
|
|
||||||
|
|
||||||
def unpickle_keypoints(array):
|
def unpickle_keypoints(array):
|
||||||
keypoints = []
|
keypoints = []
|
||||||
descriptors = []
|
descriptors = []
|
||||||
|
data_list = array.tolist()
|
||||||
for point in array:
|
for point in array:
|
||||||
temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1], size=point[1], angle=point[2], response=point[3], octave=point[4], class_id=point[5])
|
temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1], size=point[1], angle=point[2], response=point[3], octave=point[4], class_id=point[5])
|
||||||
temp_descriptor = point[6]
|
temp_descriptor = point[6]
|
||||||
|
|
|
||||||
|
|
@ -4,22 +4,24 @@ import pickle as pickle
|
||||||
|
|
||||||
import LFUtilities
|
import LFUtilities
|
||||||
import ImageRecognitionSettings as settings
|
import ImageRecognitionSettings as settings
|
||||||
from BEBLIDRescorerDB import BEBLIDRescorerDB
|
#from BEBLIDRescorerDB import BEBLIDRescorerDB
|
||||||
#from BEBLIDRescorerGPU import BEBLIDRescorerGPU
|
from BEBLIDRescorer import BEBLIDRescorer
|
||||||
|
import BEBLIDParameters
|
||||||
|
|
||||||
from FAISSSearchEngine import FAISSSearchEngine
|
from FAISSSearchEngine import FAISSSearchEngine
|
||||||
import FeatureExtractor as fe
|
import FeatureExtractor as fe
|
||||||
import BEBLIDExtractor as lf
|
import BEBLIDExtractorQ as lf
|
||||||
|
from line_profiler_pycharm import profile
|
||||||
|
|
||||||
|
|
||||||
class Searcher:
|
class Searcher:
|
||||||
K_REORDERING = 1000
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# self.dataset = h5py.File(settings.dataset_file, 'r')['rmac'][...]
|
# self.dataset = h5py.File(settings.dataset_file, 'r')['rmac'][...]
|
||||||
|
|
||||||
# np.save('/media/Data/data/beni_culturali/deploy/dataset', self.dataset)
|
# np.save('/media/Data/data/beni_culturali/deploy/dataset', self.dataset)
|
||||||
self.search_engine = FAISSSearchEngine()
|
self.search_engine = FAISSSearchEngine()
|
||||||
self.rescorer = BEBLIDRescorerDB()
|
self.rescorer = BEBLIDRescorer()
|
||||||
|
|
||||||
def get_id(self, idx):
|
def get_id(self, idx):
|
||||||
return self.search_engine.get_id(idx)
|
return self.search_engine.get_id(idx)
|
||||||
|
|
@ -45,17 +47,18 @@ class Searcher:
|
||||||
def search_by_id(self, query_id, k=10, rescorer=False):
|
def search_by_id(self, query_id, k=10, rescorer=False):
|
||||||
kq = k
|
kq = k
|
||||||
if rescorer:
|
if rescorer:
|
||||||
kq = self.K_REORDERING
|
kq = BEBLIDParameters.K_REORDERING
|
||||||
res = self.search_engine.search_by_id(query_id, kq)
|
res = self.search_engine.search_by_id(query_id, kq)
|
||||||
if rescorer:
|
if rescorer:
|
||||||
res_lf = self.rescorer.rescore_by_id(query_id, res)
|
res_lf = self.rescorer.rescore_by_id(query_id, res)
|
||||||
res = res_lf if res_lf else res[:k]
|
res = res_lf if res_lf else res[:k]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
@profile
|
||||||
def search_by_img(self, query_img, k=10, rescorer=False):
|
def search_by_img(self, query_img, k=10, rescorer=False):
|
||||||
kq = k
|
kq = k
|
||||||
if rescorer:
|
if rescorer:
|
||||||
kq = self.K_REORDERING
|
kq = BEBLIDParameters.K_REORDERING
|
||||||
query_desc = fe.extract(query_img)
|
query_desc = fe.extract(query_img)
|
||||||
res = self.search_engine.search_by_img(query_desc, kq)
|
res = self.search_engine.search_by_img(query_desc, kq)
|
||||||
if rescorer:
|
if rescorer:
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue