Skip to content

Instantly share code, notes, and snippets.

@abidrahmank
Last active July 21, 2016 05:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save abidrahmank/d715ae0526b31dc6967ed44e36c713db to your computer and use it in GitHub Desktop.
Save abidrahmank/d715ae0526b31dc6967ed44e36c713db to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.9.0\n"
]
}
],
"source": [
"from __future__ import print_function\n",
"# boilerplate code\n",
"import sys\n",
"\n",
"import os\n",
"from io import BytesIO\n",
"import numpy as np\n",
"from functools import partial\n",
"import PIL.Image as PILImage\n",
"from IPython.display import clear_output, Image, display, HTML\n",
"\n",
"import tensorflow as tf\n",
"print(tf.__version__)\n",
"#from showgraph import show_graph, rename_nodes\n",
"\n",
"qops = tf.load_op_library('/home/auviz23/Repos/tensorflow/bazel-bin/tensorflow/contrib/quantization/_quantized_ops.so')\n",
"qkernelops = tf.load_op_library('/home/auviz23/Repos/tensorflow/bazel-bin/tensorflow/contrib/quantization/kernels/_quantized_kernels.so')\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"import/data Placeholder\n",
"import/Pad/paddings Const\n",
"import/Pad Pad\n",
"import/Weights1_quint8_const Const\n",
"import/Weights1_min Const\n",
"import/Weights1_max Const\n",
"import/Conv2D_eightbit_reshape_dims Const\n",
"import/Conv2D_eightbit_reduction_dims Const\n",
"import/Conv2D_eightbit_reshape_Pad Reshape\n",
"import/Conv2D_eightbit_min_Pad Min\n",
"import/Conv2D_eightbit_max_Pad Max\n",
"import/Conv2D_eightbit_quantize_Pad QuantizeV2\n",
"import/Conv2D_eightbit_quantized_conv QuantizedConv2D\n",
"import/Conv2D_eightbit_quantize_down QuantizeDownAndShrinkRange\n",
"import/Conv2D Dequantize\n",
"import/Const_4_quint8_const Const\n",
"import/Const_4_min Const\n",
"import/Const_4_max Const\n",
"import/Const_4 Dequantize\n",
"import/add Add\n",
"import/Relu_eightbit_reshape_dims Const\n",
"import/Relu_eightbit_reduction_dims Const\n",
"import/Relu_eightbit_reshape_add Reshape\n",
"import/Relu_eightbit_min_add Min\n",
"import/Relu_eightbit_max_add Max\n",
"import/Relu_eightbit_quantize_add QuantizeV2\n",
"import/Relu_eightbit_quantized QuantizedRelu\n",
"import/MaxPool_eightbit_quantized QuantizedMaxPool\n",
"import/MaxPool Dequantize\n",
"import/Pad_1/paddings Const\n",
"import/Pad_1 Pad\n",
"import/Const_quint8_const Const\n",
"import/Const_min Const\n",
"import/Const_max Const\n",
"import/Conv2D_1_eightbit_reshape_dims Const\n",
"import/Conv2D_1_eightbit_reduction_dims Const\n",
"import/Conv2D_1_eightbit_reshape_Pad_1 Reshape\n",
"import/Conv2D_1_eightbit_min_Pad_1 Min\n",
"import/Conv2D_1_eightbit_max_Pad_1 Max\n",
"import/Conv2D_1_eightbit_quantize_Pad_1 QuantizeV2\n",
"import/Conv2D_1_eightbit_quantized_conv QuantizedConv2D\n",
"import/Conv2D_1_eightbit_quantize_down QuantizeDownAndShrinkRange\n",
"import/Conv2D_1 Dequantize\n",
"import/Const_5_quint8_const Const\n",
"import/Const_5_min Const\n",
"import/Const_5_max Const\n",
"import/Const_5 Dequantize\n",
"import/add_1 Add\n",
"import/Relu_1_eightbit_reshape_dims Const\n",
"import/Relu_1_eightbit_reduction_dims Const\n",
"import/Relu_1_eightbit_reshape_add_1 Reshape\n",
"import/Relu_1_eightbit_min_add_1 Min\n",
"import/Relu_1_eightbit_max_add_1 Max\n",
"import/Relu_1_eightbit_quantize_add_1 QuantizeV2\n",
"import/Relu_1_eightbit_quantized QuantizedRelu\n",
"import/MaxPool_1_eightbit_quantized QuantizedMaxPool\n",
"import/MaxPool_1 Dequantize\n",
"import/Pad_2/paddings Const\n",
"import/Pad_2 Pad\n",
"import/Const_1_quint8_const Const\n",
"import/Const_1_min Const\n",
"import/Const_1_max Const\n",
"import/Conv2D_2_eightbit_reshape_dims Const\n",
"import/Conv2D_2_eightbit_reduction_dims Const\n",
"import/Conv2D_2_eightbit_reshape_Pad_2 Reshape\n",
"import/Conv2D_2_eightbit_min_Pad_2 Min\n",
"import/Conv2D_2_eightbit_max_Pad_2 Max\n",
"import/Conv2D_2_eightbit_quantize_Pad_2 QuantizeV2\n",
"import/Conv2D_2_eightbit_quantized_conv QuantizedConv2D\n",
"import/Conv2D_2_eightbit_quantize_down QuantizeDownAndShrinkRange\n",
"import/Conv2D_2 Dequantize\n",
"import/Const_6_quint8_const Const\n",
"import/Const_6_min Const\n",
"import/Const_6_max Const\n",
"import/Const_6 Dequantize\n",
"import/add_2 Add\n",
"import/Relu_2_eightbit_reshape_dims Const\n",
"import/Relu_2_eightbit_reduction_dims Const\n",
"import/Relu_2_eightbit_reshape_add_2 Reshape\n",
"import/Relu_2_eightbit_min_add_2 Min\n",
"import/Relu_2_eightbit_max_add_2 Max\n",
"import/Relu_2_eightbit_quantize_add_2 QuantizeV2\n",
"import/Relu_2_eightbit_quantized QuantizedRelu\n",
"import/MaxPool_2_eightbit_quantized QuantizedMaxPool\n",
"import/MaxPool_2 Dequantize\n",
"import/transpose/perm Const\n",
"import/transpose Transpose\n",
"import/Reshape/shape Const\n",
"import/Reshape Reshape\n",
"import/Const_2_quint8_const Const\n",
"import/Const_2_min Const\n",
"import/Const_2_max Const\n",
"import/MatMul_eightbit_reshape_dims Const\n",
"import/MatMul_eightbit_reduction_dims Const\n",
"import/MatMul_eightbit_reshape_Reshape Reshape\n",
"import/MatMul_eightbit_min_Reshape Min\n",
"import/MatMul_eightbit_max_Reshape Max\n",
"import/MatMul_eightbit_quantize_Reshape QuantizeV2\n",
"import/MatMul_eightbit_quantized_bias_add QuantizedMatMul\n",
"import/MatMul_eightbit_quantize_down QuantizeDownAndShrinkRange\n",
"import/MatMul Dequantize\n",
"import/Const_7_quint8_const Const\n",
"import/Const_7_min Const\n",
"import/Const_7_max Const\n",
"import/Const_7 Dequantize\n",
"import/add_3 Add\n",
"import/Const_3_quint8_const Const\n",
"import/Const_3_min Const\n",
"import/Const_3_max Const\n",
"import/MatMul_1_eightbit_reshape_dims Const\n",
"import/MatMul_1_eightbit_reduction_dims Const\n",
"import/MatMul_1_eightbit_reshape_add_3 Reshape\n",
"import/MatMul_1_eightbit_min_add_3 Min\n",
"import/MatMul_1_eightbit_max_add_3 Max\n",
"import/MatMul_1_eightbit_quantize_add_3 QuantizeV2\n",
"import/MatMul_1_eightbit_quantized_bias_add QuantizedMatMul\n",
"import/MatMul_1_eightbit_quantize_down QuantizeDownAndShrinkRange\n",
"import/MatMul_1 Dequantize\n",
"import/Const_8_quint8_const Const\n",
"import/Const_8_min Const\n",
"import/Const_8_max Const\n",
"import/Const_8 Dequantize\n",
"import/add_4 Add\n",
"import/ArgMax/dimension Const\n",
"import/ArgMax ArgMax\n"
]
}
],
"source": [
"# Load the Quantized Cifar Graph\n",
"model_fn = 'qcifar.pb'\n",
"# creating TensorFlow session and loading the model\n",
"#sess = tf.Session()\n",
"tf.reset_default_graph()\n",
"with tf.gfile.FastGFile(model_fn, 'rb') as f:\n",
" graph_def = tf.GraphDef()\n",
" graph_def.ParseFromString(f.read())\n",
" \n",
"tf.import_graph_def(graph_def)\n",
"graph = tf.get_default_graph()\n",
"for op in graph.get_operations():\n",
" print(op.name, op.type)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[3] [[ -8.94129181 -1.37928915 -6.49476147 19.4162178 -3.2697897\n",
" 8.85165596 -2.38014269 5.07065392 -4.82667255 7.2947731 ]]\n"
]
}
],
"source": [
"with tf.Session() as sess:\n",
" img = PILImage.open('/home/auviz23/workspace/modelZoo/cifar/cifar10_test_images/3/00000.png')\n",
" im = np.array(img, dtype=np.float32)\n",
" im = im[:, :, ::-1]\n",
" meanFile = np.loadtxt('/home/auviz23/Downloads/Cifar10_256P_Demo/DataCifarMAX/mean_file.txt')\n",
" mean = np.transpose(meanFile.reshape((3, 32, 32)), (1,2,0))\n",
" im -= mean\n",
" im = im[np.newaxis, ...]\n",
" #im[...] = 0.0\n",
" tf_out = graph.get_tensor_by_name('import/ArgMax:0')\n",
" tf_in = graph.get_tensor_by_name('import/data:0')\n",
" tf_ip2_out = graph.get_tensor_by_name('import/MatMul_1:0')\n",
" out, ip2_out = sess.run([tf_out, tf_ip2_out], feed_dict={tf_in:im})\n",
" print(out, ip2_out)\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.12"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment