Skip to content

Instantly share code, notes, and snippets.

@aoikonomop
Last active October 5, 2017 16:36
Show Gist options
  • Save aoikonomop/9c907dc7b27b0ca97f3eb5bbccfdcf8e to your computer and use it in GitHub Desktop.
Save aoikonomop/9c907dc7b27b0ca97f3eb5bbccfdcf8e to your computer and use it in GitHub Desktop.
queurunner vs dataset api
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:root:Test\n"
]
}
],
"source": [
"import os\n",
"import numpy as np\n",
"import time\n",
"import logging\n",
"import tensorflow as tf\n",
"\n",
"from hudl_beatrix.dataset import BrainWashDataset\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib.patches as patches\n",
"%matplotlib inline\n",
"\n",
"logging.info('Test')\n",
"logger = logging.getLogger()\n",
"logger.setLevel(logging.INFO)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"dataset = BrainWashDataset(image_method='imageio')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"ERROR:root:Directory already exists, no data will be converted! Did you mean to set the overwrite flag to True?\n"
]
}
],
"source": [
"dataset.convert(dataset_path='./brainwash_tf_records_imageio')"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"train_dir = './brainwash_tf_records_imageio/train'\n",
"records = [os.path.join(train_dir, record) for record in os.listdir(train_dir)]"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:root:Queue time : 13.206629753112793 sec\n",
"INFO:root:API time : 13.020855903625488 sec\n"
]
}
],
"source": [
"n_threads = 4\n",
"batch_size = 1\n",
"min_after_dequeue = 8\n",
"\n",
"input_fn_queue = dataset.input_fn(records, \n",
" batch_size=batch_size,\n",
" n_threads=n_threads,\n",
" min_after_dequeue=min_after_dequeue,\n",
" method='queuerunner')\n",
"\n",
"input_fn_api = dataset.input_fn(records, \n",
" batch_size=batch_size,\n",
" n_threads=n_threads,\n",
" min_after_dequeue=min_after_dequeue,\n",
" method='api')\n",
"\n",
"record_queue = input_fn_queue()\n",
"record_api = input_fn_api()\n",
"\n",
"elapsed_queue = 0\n",
"elapsed_api = 0\n",
"\n",
"with tf.Session() as sess:\n",
" sess.run(tf.global_variables_initializer())\n",
"\n",
" coord = tf.train.Coordinator()\n",
" threads = tf.train.start_queue_runners(coord=coord)\n",
"\n",
" for i in range(len(records)):\n",
" \n",
" start_time = time.time()\n",
" img, bx = sess.run(record_queue)\n",
" elapsed_queue += time.time() - start_time\n",
" \n",
" start_time = time.time()\n",
" img, bx = sess.run(record_api)\n",
" elapsed_api += time.time() - start_time\n",
"\n",
" coord.request_stop()\n",
" coord.join(threads)\n",
"\n",
"logger.info('Queue time : {} sec'.format(elapsed_queue))\n",
"logger.info('API time : {} sec'.format(elapsed_api))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment