Commit 04f1733a authored by Oleh Astappiev's avatar Oleh Astappiev
Browse files

chore: rename map function

parent 6f761cb4
......@@ -11,7 +11,7 @@ from src.model.siamese import SiameseModel
model_name = 'imagenette_alexnet'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=AlexNetModel.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=AlexNetModel.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
# create model
......
......@@ -11,7 +11,7 @@ from src.model.siamese import SiameseModel
model_name = 'cifar10_alexnet'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=AlexNetModel.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=AlexNetModel.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
# create model
......
......@@ -7,7 +7,7 @@ NUM_CLASSES = 10
CLASS_NAMES = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=None):
def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, map_fn=None):
train_ds = tf.keras.utils.image_dataset_from_directory(
directory='../datasets/cifar10/train/',
labels='inferred',
......@@ -27,15 +27,15 @@ def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=Non
interpolation='nearest'
)
if preprocess_fn is not None:
train_ds = train_ds.map(preprocess_fn).prefetch(tf.data.AUTOTUNE)
test_ds = test_ds.map(preprocess_fn).prefetch(tf.data.AUTOTUNE)
if map_fn is not None:
train_ds = train_ds.map(map_fn).prefetch(tf.data.AUTOTUNE)
test_ds = test_ds.map(map_fn).prefetch(tf.data.AUTOTUNE)
return train_ds, test_ds
def load_dataset3(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=None):
train_ds, test_ds = load_dataset(image_size=image_size, batch_size=batch_size, preprocess_fn=preprocess_fn)
def load_dataset3(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, map_fn=None):
train_ds, test_ds = load_dataset(image_size=image_size, batch_size=batch_size, map_fn=map_fn)
train_ds_size = tf.data.experimental.cardinality(train_ds).numpy()
train_ds = train_ds.skip(train_ds_size / 10)
......
......@@ -7,7 +7,7 @@ NUM_CLASSES = 10
CLASS_NAMES = ['fish', 'dog', 'player', 'saw', 'building', 'music', 'truck', 'gas', 'ball', 'parachute']
def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=None):
def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, map_fn=None):
train_ds = tf.keras.utils.image_dataset_from_directory(
directory='../datasets/imagenette2/train/',
labels='inferred',
......@@ -27,15 +27,15 @@ def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=Non
interpolation='nearest'
)
if preprocess_fn is not None:
train_ds = train_ds.map(preprocess_fn).prefetch(tf.data.AUTOTUNE)
test_ds = test_ds.map(preprocess_fn).prefetch(tf.data.AUTOTUNE)
if map_fn is not None:
train_ds = train_ds.map(map_fn).prefetch(tf.data.AUTOTUNE)
test_ds = test_ds.map(map_fn).prefetch(tf.data.AUTOTUNE)
return train_ds, test_ds
def load_dataset3(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=None):
train_ds, test_ds = load_dataset(image_size=image_size, batch_size=batch_size, preprocess_fn=preprocess_fn)
def load_dataset3(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, map_fn=None):
train_ds, test_ds = load_dataset(image_size=image_size, batch_size=batch_size, map_fn=map_fn)
test_ds_size = tf.data.experimental.cardinality(test_ds).numpy()
val_ds = test_ds.take(test_ds_size / 2)
......
......@@ -7,7 +7,7 @@ NUM_CLASSES = 3
CLASS_NAMES = ['building', 'dog', 'player']
def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=None):
def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, map_fn=None):
ds = tf.keras.utils.image_dataset_from_directory(
directory='../datasets/simple3/',
labels='inferred',
......@@ -17,14 +17,14 @@ def load_dataset(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=Non
interpolation='nearest'
)
if preprocess_fn is not None:
ds = ds.map(preprocess_fn).prefetch(tf.data.AUTOTUNE)
if map_fn is not None:
ds = ds.map(map_fn).prefetch(tf.data.AUTOTUNE)
return ds
def load_dataset3(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, preprocess_fn=None):
ds = load_dataset(image_size=image_size, batch_size=batch_size, preprocess_fn=preprocess_fn)
def load_dataset3(image_size=IMAGE_SIZE, batch_size=BATCH_SIZE, map_fn=None):
ds = load_dataset(image_size=image_size, batch_size=batch_size, map_fn=map_fn)
ds_size = tf.data.experimental.cardinality(ds).numpy()
train_ds = ds.take(ds_size * 0.6)
......
......@@ -10,7 +10,7 @@ from src.model.siamese import SiameseModel
model_name = 'cifar10_efficientnet'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, batch_size=BATCH_SIZE, preprocess_fn=EfficientNetModel.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, batch_size=BATCH_SIZE, map_fn=EfficientNetModel.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
model = EfficientNetModel()
......
......@@ -8,24 +8,23 @@ from src.utils.common import get_modeldir
from src.model.mobilenet import MobileNetModel, PRETRAIN_EPOCHS, TARGET_SHAPE, EMBEDDING_VECTOR_DIMENSION
from src.model.siamese import SiameseModel
model_name = 'imagenet_mobilenet'
model_name = 'imagenette_mobilenet'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=MobileNetModel.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=MobileNetModel.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
PRETRAIN_TOTAL_STEPS = PRETRAIN_EPOCHS * len(train_ds)
# create model
model = MobileNetModel()
model.compile(optimizer=tf.keras.optimizers.RMSprop(tf.keras.optimizers.schedules.CosineDecay(1e-3, PRETRAIN_TOTAL_STEPS)))
model.summary()
# load weights
model.load_weights(get_modeldir(model_name + '.h5'))
# model.load_weights(get_modeldir(model_name + '.h5'))
# train & save model
# model.fit(train_ds, epochs=PRETRAIN_EPOCHS, validation_data=val_ds)
# model.save_weights(get_modeldir(model_name + '.h5'))
model.fit(train_ds, epochs=PRETRAIN_EPOCHS, validation_data=val_ds)
model.save_weights(get_modeldir(model_name + '.h5'))
# evaluate
print('evaluating...')
......
......@@ -11,7 +11,7 @@ from src.model.siamese import SiameseModel
model_name = 'cifar10_mobilenet'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=MobileNetModel.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=MobileNetModel.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
PRETRAIN_TOTAL_STEPS = PRETRAIN_EPOCHS * len(train_ds)
......@@ -55,6 +55,6 @@ inference_model.save(get_modeldir(model_name + '_inference.tf'))
print('visualization')
# compute embeddings of the images and their labels, store them in a tsv file for visualization
# compute vectors of the images and their labels, store them in a tsv file for visualization
siamese_vectors, siamese_labels = calc_vectors(comb_ds, inference_model)
project_embeddings(siamese_vectors, siamese_labels, model_name + '_siamese')
import time
import tensorflow as tf
from os import path, curdir
import time
def normalize_image(image):
......
......@@ -11,7 +11,7 @@ from src.model.siamese import SiameseModel
model_name = 'imagenet_vgg16'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=VGG16Model.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=VGG16Model.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
PRETRAIN_TOTAL_STEPS = PRETRAIN_EPOCHS * len(train_ds)
......
......@@ -13,7 +13,7 @@ embeddings_name = model_name + '_embeddings'
TARGET_SHAPE = (32, 32)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=VGG16Model.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=VGG16Model.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
PRETRAIN_TOTAL_STEPS = PRETRAIN_EPOCHS * len(train_ds)
......
......@@ -13,7 +13,7 @@ embeddings_name = model_name + '_embeddings'
TARGET_SHAPE = (32, 32)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, preprocess_fn=VGG16Model.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, map_fn=VGG16Model.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
PRETRAIN_TOTAL_STEPS = PRETRAIN_EPOCHS * len(train_ds)
......
......@@ -11,7 +11,7 @@ from src.model.siamese import SiameseModel
model_name = 'cifar10_vit'
embeddings_name = model_name + '_embeddings'
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, batch_size=BATCH_SIZE, preprocess_fn=VitModel.preprocess_input)
train_ds, val_ds, test_ds = load_dataset3(image_size=TARGET_SHAPE, batch_size=BATCH_SIZE, map_fn=VitModel.preprocess_input)
comb_ds = train_ds.concatenate(val_ds).concatenate(test_ds)
model = VitModel()
......@@ -24,7 +24,7 @@ save_embeddings(emb_vectors, emb_labels, embeddings_name)
# emb_vectors, emb_labels = load_embeddings(embeddings_name)
# siamese is the model we train
siamese = SiameseModel(embedding_vector_dimension=384, image_vector_dimensions=512)
siamese = SiameseModel(embedding_vector_dimension=384, image_vector_dimensions=3)
siamese.compile(loss_margin=0.05, optimizer=tf.keras.optimizers.Adam(learning_rate=0.001))
siamese.summary()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment