Commit f7082cdb authored by Oleh Astappiev's avatar Oleh Astappiev
Browse files

feat: more export options

parent e0bd938e
......@@ -17,31 +17,31 @@ cifar10_images = np.concatenate([train_images, test_images])
cifar10_labels = np.concatenate([train_labels, test_labels])
cifar10_vds = tf.data.Dataset.from_tensor_slices((cifar10_images, cifar10_labels))
def export_hsv():
def export_hsv(bin0=256, bin1=256, bin2=256):
header = ['ID', 'Label', 'HSV vector']
with open('../data/hsv.csv', 'w', encoding='UTF8', newline='') as f:
with open('../data/hsv_' + str(features) + '.csv', 'w', encoding='UTF8', newline='') as f:
writer = csv.writer(f, delimiter=";")
# write the header
writer.writerow(header)
for i, (image, label) in enumerate(cifar10_vds):
a, b, c, hist_array = extract_hsv(image.numpy())
img = process_images(image).numpy()
a, b, c, hist_array = extract_hsv(img, bin0, bin1, bin2)
label_str = ','.join(map(str, label.numpy()))
value_str = ','.join(map(str, hist_array))
writer.writerow([i, label_str, value_str])
def export_sift():
def export_sift(features=8):
header = ['ID', 'Label', 'SIFT descriptors']
with open('../data/sift.csv', 'w', encoding='UTF8', newline='') as f:
with open('../data/sift_' + str(features) + '.csv', 'w', encoding='UTF8', newline='') as f:
writer = csv.writer(f, delimiter=";")
# write the header
writer.writerow(header)
for i, (image, label) in enumerate(cifar10_vds):
# from smaller image only smaller number of key points can be extracted
img = cv2.resize(image.numpy(), (230, 230))
keypoints, features = extract_sift(img)
img = process_images(image).numpy()
keypoints, features = extract_sift(img, features)
label_str = ','.join(map(str, label.numpy()))
if features is not None:
value_str = ','.join(map(str, features.flatten()))
......@@ -70,7 +70,8 @@ def export_embeddings():
value_str = ','.join(map(str, embeddings[i]))
writer.writerow([i, label_str, value_str])
# hsv 170, 171, 171
# 512, 1024, 2048, 4096
# export_hsv()
# export_sift()
export_embeddings()
......
......@@ -37,6 +37,7 @@ print_resized(cifar10_vds)
# print('test SIFT')
# plot_sift(cifar10_vds)
# smaller
# 906, 1692, 1711, 2610, 3259, 3418, 3789, 4277, 4975, 5010, 5255, 5867, 5988, 6406, 7089, 7365, 8072
# 8443, 8998, 9008, 9323, 9664, 9881, 9903, 9985, 10095, 11650, 13043, 13075, 13841, 14698, 15443
# 16004, 16733, 16888, 18948, 19378, 20015, 20233, 20467, 20621, 20696, 20778, 22672, 22804, 22904
......@@ -47,4 +48,7 @@ print_resized(cifar10_vds)
# 53429, 53444, 53660, 53759, 53952, 54957, 55164, 55189, 55762, 56549, 56574, 57105, 57171, 58485
# 58572, 58826, 59318, 59970
# bigger
# 6452, 7365, 7811, 9592, 12075, 15443, 16888, 17623, 22576, 23654, 25931, 33862, 35877, 41902, 44226, 45110, 45801, 48884, 53759, 59318
print('done')
......@@ -5,14 +5,14 @@ import cv2
from src.utils.common import *
def extract_hsv(image):
def extract_hsv(image, bin0=256, bin1=256, bin2=256):
"""Extract a 3 color channels histogram from the HSV"""
hsv = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
# The ranges of the 3 HSV channels in opencv are 0-180, 0-256, 0-256 respectively
# Bins is set to 1365, so that each picture can be represented by a 4000-dimensional vector
histh = cv2.calcHist([hsv], [0], None, [170], [0, 180])
hists = cv2.calcHist([hsv], [1], None, [171], [0, 256])
histv = cv2.calcHist([hsv], [2], None, [171], [0, 256])
histh = cv2.calcHist([hsv], [0], None, [bin0], [0, 180])
hists = cv2.calcHist([hsv], [1], None, [bin1], [0, 256])
histv = cv2.calcHist([hsv], [2], None, [bin2], [0, 256])
# normalize the histogram
histh /= histh.sum()
hists /= hists.sum()
......
......@@ -4,8 +4,9 @@ import cv2
from src.utils.common import *
def extract_sift(image):
sift = cv2.SIFT_create(8)
def extract_sift(image, features=500):
# the result number of features is the number of keypoints * 128
sift = cv2.SIFT_create(features)
# Calculate the keypoint and each point description of the image
keypoints, features = sift.detectAndCompute(image, None)
return keypoints, features
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment