Commit 2608c81b authored by Oleh Astappiev's avatar Oleh Astappiev
Browse files

feat: some work from office

parent 72f55184
...@@ -20,11 +20,16 @@ emb_model = model.get_embedding_model() ...@@ -20,11 +20,16 @@ emb_model = model.get_embedding_model()
emb_vectors, emb_labels = get_embeddings_of(emb_model, dataset) emb_vectors, emb_labels = get_embeddings_of(emb_model, dataset)
emb_ds = SiameseModel.prepare_dataset(emb_vectors, emb_labels) emb_ds = SiameseModel.prepare_dataset(emb_vectors, emb_labels)
siamese = SiameseModel(emb_model, image_vector_dimensions=512, loss_margin=1, fit_epochs=5) for x in [2, 1.5, 1, 0.75, 0.5, 0.1]:
siamese.compile() print("Calculating for margin", x)
siamese.fit(emb_ds, num_classes=dataset.num_classes) for y in [1, 3, 5, 10, 30, 50]:
print("Calculating for epochs", y)
siamese = SiameseModel(emb_model, image_vector_dimensions=512, loss_margin=x, fit_epochs=y)
siamese.compile()
siamese.fit(emb_ds, num_classes=dataset.num_classes)
projection_vectors = siamese.projection_model.predict(emb_vectors)
save_vectors(projection_vectors, emb_labels, dataset.name + '_' + siamese.name + '_vectors')
project_embeddings(projection_vectors, emb_labels, siamese.name + '_' + dataset.name)
projection_vectors = siamese.projection_model.predict(emb_vectors)
save_vectors(projection_vectors, emb_labels, dataset.name + '_' + siamese.name + '_vectors')
project_embeddings(projection_vectors, emb_labels, siamese.name + '_' + dataset.name)
print('Done!') print('Done!')
...@@ -35,7 +35,7 @@ class SiameseModel(Model): ...@@ -35,7 +35,7 @@ class SiameseModel(Model):
# layers.Dense(image_vector_dimensions, activation=ACTIVATION_FN, input_shape=(embedding_vector_dimension,)) # layers.Dense(image_vector_dimensions, activation=ACTIVATION_FN, input_shape=(embedding_vector_dimension,))
layers.Dense(128, activation='relu', input_shape=(embedding_vector_dimension,)), layers.Dense(128, activation='relu', input_shape=(embedding_vector_dimension,)),
layers.Dense(image_vector_dimensions, activation=None), layers.Dense(image_vector_dimensions, activation=None),
layers.Lambda(lambda x: tf.keras.backend.l2_normalize(x, axis=1)), layers.Lambda(lambda x: tf.keras.backend.l2_normalize(x, axis=1)), # TODO: remove normalization when play with distance formula
], name='siamese_projection') ], name='siamese_projection')
v1 = projection_model(emb_input_1) v1 = projection_model(emb_input_1)
......
...@@ -4,17 +4,17 @@ import cv2 ...@@ -4,17 +4,17 @@ import cv2
def sift_features(image, nfeatures=None): def sift_features(image, nfeatures=None):
sift = cv2.SIFT_create(nfeatures) sift = cv2.SIFT_create(nfeatures)
# Calculate the keypoint and each point description of the image # Calculate the keypoint and each point description of the image
keypoints, features = sift.detectAndCompute(image, None) keypoints, descriptors = sift.detectAndCompute(image, None)
return keypoints, features return keypoints, descriptors
def extract_sift(image, features=512): def extract_sift(image, features=512):
# the result number of features is the number of keypoints * 128 # the result number of features is the number of keypoints * 128
nfeatures = int(features / 128) nfeatures = int(features / 128)
keypoints, features = sift_features(image, nfeatures) keypoints, descriptors = sift_features(image, nfeatures)
if features is None or len(features) < nfeatures: if descriptors is None or len(descriptors) < nfeatures:
return None return None
elif len(features) > nfeatures: elif len(descriptors) > nfeatures:
features = features[:nfeatures] descriptors = descriptors[:nfeatures]
return features.flatten() return descriptors.flatten()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment