Skip to content

Instantly share code, notes, and snippets.

@val314159
Last active April 26, 2020 04:54
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save val314159/bc616c64fc7590b425bd1f08f5c5f340 to your computer and use it in GitHub Desktop.
Save val314159/bc616c64fc7590b425bd1f08f5c5f340 to your computer and use it in GitHub Desktop.
tensorflow example
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os.path
import re
import sys
import tarfile
import numpy as np
from six.moves import urllib
import tensorflow as tf
FLAGS = None
# pylint: disable=line-too-long
DATA_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz'
# pylint: enable=line-too-long
class NodeLookup(object):
"""Converts integer node ID's to human readable labels."""
def __init__(self,
label_lookup_path=None,
uid_lookup_path=None):
if not label_lookup_path:
label_lookup_path = os.path.join(
FLAGS.model_dir, 'imagenet_2012_challenge_label_map_proto.pbtxt')
if not uid_lookup_path:
uid_lookup_path = os.path.join(
FLAGS.model_dir, 'imagenet_synset_to_human_label_map.txt')
self.node_lookup = self.load(label_lookup_path, uid_lookup_path)
def load(self, label_lookup_path, uid_lookup_path):
"""Loads a human readable English name for each softmax node.
Args:
label_lookup_path: string UID to integer node ID.
uid_lookup_path: string UID to human-readable string.
Returns:
dict from integer node ID to human-readable string.
"""
if not tf.gfile.Exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not tf.gfile.Exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)
# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string
# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]
# Loads the final mapping of integer node ID to human-readable string
node_id_to_name = {}
for key, val in node_id_to_uid.items():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name
return node_id_to_name
def id_to_string(self, node_id):
if node_id not in self.node_lookup:
return ''
return self.node_lookup[node_id]
def create_graph():
"""Creates a graph from saved GraphDef file and returns a saver."""
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb'), 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
def run_inference_on_image(image):
"""Runs inference on an image.
Args:
image: Image file name.
Returns:
Nothing
"""
if not tf.gfile.Exists(image):
tf.logging.fatal('File does not exist %s', image)
image_data = tf.gfile.FastGFile(image, 'rb').read()
# Creates graph from saved GraphDef.
create_graph()
with tf.Session() as sess:
# Some useful tensors:
# 'softmax:0': A tensor containing the normalized prediction across
# 1000 labels.
# 'pool_3:0': A tensor containing the next-to-last layer containing 2048
# float description of the image.
# 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG
# encoding of the image.
# Runs the softmax tensor by feeding the image_data as input to the graph.
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
predictions = sess.run(softmax_tensor,
{'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
# Creates node ID --> English string lookup.
node_lookup = NodeLookup()
top_k = predictions.argsort()[-FLAGS.num_top_predictions:][::-1]
for node_id in top_k:
human_string = node_lookup.id_to_string(node_id)
score = predictions[node_id]
print('%s (score = %.5f)' % (human_string, score))
def maybe_download_and_extract():
"""Download and extract model tar file."""
dest_directory = FLAGS.model_dir
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
filename = DATA_URL.split('/')[-1]
filepath = os.path.join(dest_directory, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (
filename, float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
tarfile.open(filepath, 'r:gz').extractall(dest_directory)
def main(_):
maybe_download_and_extract()
image = (FLAGS.image_file if FLAGS.image_file else
os.path.join(FLAGS.model_dir, 'cropped_panda.jpg'))
run_inference_on_image(image)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# classify_image_graph_def.pb:
# Binary representation of the GraphDef protocol buffer.
# imagenet_synset_to_human_label_map.txt:
# Map from synset ID to a human readable string.
# imagenet_2012_challenge_label_map_proto.pbtxt:
# Text representation of a protocol buffer mapping a label to synset ID.
parser.add_argument(
'--model_dir',
type=str,
default='/tmp/imagenet',
help="""\
Path to classify_image_graph_def.pb,
imagenet_synset_to_human_label_map.txt, and
imagenet_2012_challenge_label_map_proto.pbtxt.\
"""
)
parser.add_argument(
'--image_file',
type=str,
default='',
help='Absolute path to image file.'
)
parser.add_argument(
'--num_top_predictions',
type=int,
default=5,
help='Display this many predictions.'
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)

Tensorflow example

cd $HOME
mkdir workspace
cd workspace
git clone http://github.com/tensorflow/tensorflow
git clone http://github.com/tensorflow/models
cd tensorflow/tensorflow/models/image/imagenet
python classify_image.py

Grab the flowers data set

mkdir $HOME
cd $HOME
curl -O http://download.tensorflow.org/example_images/flower_photos.tgz
tar xzf flower_photos.tgz

Grab the cars data set

mkdir -p $HOME/cars
cd $HOME/cars
wget http://imagenet.stanford.edu/internal/car196/cars_train.tgz
wget http://imagenet.stanford.edu/internal/car196/cars_test.tgz
wget http://ai.stanford.edu/~jkrause/cars/car_devkit.tgz
wget http://imagenet.stanford.edu/internal/car196/cars_annos.mat
wget http://imagenet.stanford.edu/internal/car196/cars_test_annos_withlabels.mat
tar xvf cars_train.tgz
tar xvf cars_test.tgz
tar xvf car_devkit.tgz
pip install scipy
pip3 install scipy

Example 2 - let's pick some flowers

Grab the flowers data set

mkdir $HOME/p
cd $HOME/p
curl -O http://download.tensorflow.org/example_images/flower_photos.tgz
tar xzf flower_photos.tgz

make the c++ image retrainer

cd ~/workspace3/tensorflow
bazel build -c opt --copt=-mavx tensorflow/examples/image_retraining:retrain

do the retraining!

  • what outputs does this have?
bazel-bin/tensorflow/examples/image_retraining/retrain --image_dir ~/p/flower_photos

Artifacts are:

  • /tmp/bottleneck
  • /tmp/retrain_logs
bazel build tensorflow/examples/label_image:label_image && \
 bazel-bin/tensorflow/examples/label_image/label_image \
--graph=/tmp/output_graph.pb --labels=/tmp/output_labels.txt \
--output_layer=final_result \
--image=$HOME/p/flower_photos/daisy/21652746_cc379e0eea_m.jpg

Build the C++ retrainer:

bazel build -c opt --copt=-mavx tensorflow/examples/image_retraining:retrain

Build the C++ labeller:

bazel build tensorflow/examples/label_image:label_image

bazel build -c opt --copt=-mavx tensorflow/examples/label_image:label_image

For C++ retraining (flowers):

PFX=/tmp/flower
bazel-bin/tensorflow/examples/image_retraining/retrain \
    --image_dir /home/ubuntu/flower_photos \
    --summaries_dir=$PFX/retrain_logs \
    --bottleneck_dir=$PFX/bottlenecks \
    --output_graph=$PFX/output_graph.pb \
    --output_labels=$PFX/output_labels.txt \
    --final_tensor_name=final_result

For C++ labelling (flowers):

PFX=/tmp/flower
bazel-bin/tensorflow/examples/label_image/label_image \
    --graph=/path/output_graph.pb --labels=/path/output_labels.txt \
    --output_layer=final_result \
    --image=/path/to/test/image

Let's try it all in python

training for flowers

PFX=/tmp/flower
python tensorflow/examples/image_retraining/retrain.py --image_dir /home/ubuntu/flower_photos --summaries_dir=$PFX/retrain_logs --bottleneck_dir=$PFX/bottlenecks --output_graph=$PFX
/output_graph.pb --output_labels=$PFX/output_labels.txt --final_tensor_name=final_result

classify for flowers

PFX=/tmp/flower
python label_image.py $1 $PFX/output_graph.pb $PFX/output_labels.txt

training for cars

PFX=/tmp/cars
echo SOMEHOW CREATE THE ~/car_photos directory here
python tensorflow/examples/image_retraining/retrain.py --image_dir /home/ubuntu/car_photos \
  --summaries_dir=$PFX/retrain_logs --bottleneck_dir=$PFX/bottlenecks \
  --output_graph=$PFX/output_graph.pb --output_labels=$PFX/output_labels.txt \
  --final_tensor_name=final_result

classify for cars

PFX=/tmp/cars
python label_image.py $1 $PFX/output_graph.pb $PFX/output_labels.txt
#!/usr/bin/env python3
import os, sys
from scipy.io import loadmat
def makedirs(x):
try:
os.makedirs(x)
except:
pass
def main():
print("EXTRACT")
makedirs('train')
makedirs('test')
x = loadmat('devkit/cars_meta.mat')
y = loadmat('devkit/cars_test_annos.mat')
z = loadmat('devkit/cars_train_annos.mat')
w = loadmat('cars_test_annos_withlabels.mat')
class_names = [z[0] for z in x['class_names'][0]]
class_dict = {}
for n, name in enumerate(class_names):
class_dict[name] = n
makedirs('train/'+name)
makedirs('test/'+name)
f = open('class_names.txt','w')
f.write('\n'.join(class_names))
f.close()
tests = [(_[4][0][0],_[5][0]) for _ in w['annotations'][0]]
trains = [(_[4][0][0],_[5][0]) for _ in z['annotations'][0]]
for class_no, short_name in tests:
class_name = class_names[class_no-1]
os.link('cars_test/'+short_name,
'test/'+class_name+'/'+short_name)
pass
for class_no, short_name in trains:
class_name = class_names[class_no-1]
os.link('cars_train/'+short_name,
'train/'+class_name+'/'+short_name)
pass
if __name__=='__main__': main()
import os, sys
import numpy as np
import tensorflow as tf
class NodeLookup:
def __init__(self, labelsFullPath):
with open(labelsFullPath, 'rb') as f:
self.lines = f.readlines()
self.labels = [str(w).replace("\n", "") for w in self.lines]
def id_to_string(self, node_id):
return self.labels[node_id]
def run_inference_on_image(
image = '/home/ubuntu/flower_photos/daisy/15029936576_8d6f96c72c_n.jpg',
modelFullPath = '/tmp/flower/output_graph.pb',
labelsFullPath = '/tmp/flower/output_labels.txt',
#finalLayer='softmax'+':0'):
finalLayer='final_result'):
if not tf.gfile.Exists(image):
tf.logging.fatal('File does not exist %s', image)
return
image_data = tf.gfile.FastGFile(image, 'rb').read()
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(modelFullPath, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
with tf.Session() as sess:
softmax_tensor = sess.graph.get_tensor_by_name(finalLayer+':0')
predictions = sess.run(softmax_tensor,
{'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
# Creates node ID --> English string lookup.
node_lookup = NodeLookup(labelsFullPath)
top_k = predictions.argsort()[-5:][::-1] # Getting top 5 predictions
for node_id in top_k:
human_string = node_lookup.id_to_string(node_id)
score = predictions[node_id]
print('%s (score = %.5f)' % (human_string, score))
if __name__ == '__main__': run_inference_on_image(*sys.argv[1:])
class NodeLookup:
def __init__(self, labelsFullPath):
with open(labelsFullPath, 'rb') as f:
self.lines = f.readlines()
self.labels = [str(w).replace("\n", "") for w in self.lines]
def id_to_string(self, node_id):
return self.labels[node_id]
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os, sys, re, tensorflow as tf
class NodeLookup(object):
"""Converts integer node ID's to human readable labels."""
def __init__(self,
label_lookup_path=None,
uid_lookup_path=None):
if not label_lookup_path:
label_lookup_path = os.path.join(
FLAGS.model_dir, 'imagenet_2012_challenge_label_map_proto.pbtxt')
if not uid_lookup_path:
uid_lookup_path = os.path.join(
FLAGS.model_dir, 'imagenet_synset_to_human_label_map.txt')
self.node_lookup = self.load(label_lookup_path, uid_lookup_path)
def load(self, label_lookup_path, uid_lookup_path):
"""Loads a human readable English name for each softmax node.
Args:
label_lookup_path: string UID to integer node ID.
uid_lookup_path: string UID to human-readable string.
Returns:
dict from integer node ID to human-readable string.
"""
if not tf.gfile.Exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not tf.gfile.Exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)
# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string
# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]
# Loads the final mapping of integer node ID to human-readable string
node_id_to_name = {}
for key, val in node_id_to_uid.items():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name
return node_id_to_name
def id_to_string(self, node_id):
if node_id not in self.node_lookup:
return ''
return self.node_lookup[node_id]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment