Skip to content

Instantly share code, notes, and snippets.

@allenday allenday/
Last active Aug 19, 2019

What would you like to do?
tokenize bottlenecks
#docker run -d -p 8080:8080 -v /root:/data
import os
import sys
import tempfile
import shutil
import numpy as np
from keras.preprocessing.image import ImageDataGenerator, img_to_array, load_img
from keras.models import Sequential
from keras.layers import Dropout, Flatten, Dense
from keras import applications
from keras.utils.np_utils import to_categorical
import matplotlib.pyplot as plt
import math
import cv2
# dimensions of our images.
img_width, img_height = 224, 224
top_model_weights_path = 'bottleneck_fc_model.h5'
def save_bottlebeck_features(model, train_data_dir):
datagen = ImageDataGenerator()#rescale=1. / 255)
generator = datagen.flow_from_directory(
target_size=(img_width, img_height),
bottleneck_features_train = model.predict_generator(generator, 1)
return bottleneck_features_train
model = applications.xception.Xception(include_top=False, weights='imagenet')
d = sys.argv[1]
for f in os.listdir(d):
path0 = tempfile.mkdtemp()
path1 = "%s/%s" % (path0, "1")
a = save_bottlebeck_features(model,path0)
aflat = np.multiply(np.divide(a,np.amax(a)),8).astype('uint8').flatten()
i = 0
sys.stdout.write("%s\t" % f)
for x in np.nditer(aflat):
if x > 0:
for k in range(0,x):
sys.stdout.write('X%X ' % i)
i = i + 1
shutil.rmtree(path0)'bottleneck_features_train.npy', a)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.