Skip to content

Instantly share code, notes, and snippets.

@jeanpat
Created April 20, 2017 17:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save jeanpat/df407399def84d65fa3e1bb61a57c219 to your computer and use it in GitHub Desktop.
Save jeanpat/df407399def84d65fa3e1bb61a57c219 to your computer and use it in GitHub Desktop.
Try to load dataset in tf_unet
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# First try for training an neural network for semantic segmentation:\n",
"\n",
"semantic segmentation framework : \n",
"\n",
"\n",
" * https://github.com/jakeret/tf_unet\n",
" * https://tf-unet.readthedocs.io/en/latest/index.html\n",
" \n",
"dataset :\n",
"\n",
" * https://github.com/chromosome-seg/DeepFISH/tree/master/dataset/LowRes\n",
" \n",
" "
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from __future__ import division, print_function\n",
"%matplotlib inline\n",
"import matplotlib.pyplot as plt\n",
"import matplotlib\n",
"import numpy as np\n",
"import os"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from tf_unet import image_gen\n",
"from tf_unet import unet\n",
"from tf_unet import util\n",
"from tf_unet.util import Image"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from tf_unet import image_util"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from skimage import io"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Original data are stored as:\n",
"\n",
"train\n",
"\n",
" |-- grey\n",
" grey0000001.png\n",
" grey0000002.png\n",
" ....\n",
" |--groundtruth\n",
" gtruth0000001.png\n",
" gtruth0000002.png\n",
"some pair of images were gathered in the same directory to comply **image_util.ImageDataProvider()** as follow:\n",
"\n",
"train\n",
"\n",
" |-- UnetData\n",
" 0000001.tif\n",
" 0000001_mask.tif\n",
" 0000002.tif\n",
" 0000002_mask.tif\n",
" ...\n",
" \n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Images come by pairs of greyscaled (input) and label uses for training:"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"## make a dir if not exists\n",
"#os.mkdir(os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData'))\n",
"#os.mkdir(os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData','model'))\n",
"images_list = os.listdir(os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData'))\n",
"first = images_list[0]\n",
"grey_name = first[0:7]+'.tif'\n",
"groundtruth_name = first[0:7]+'_mask.tif'\n",
"\n",
"grey = io.imread(os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData',grey_name))\n",
"groundtruth = io.imread(os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData',groundtruth_name))"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x7f9c4f7fe898>"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAADDCAYAAACS2+oqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGWJJREFUeJzt3X2UXXV97/H3JzMkk2TyCDjmgVsSnhSwSJsleIO9VKVV\nVKBLl1dFhS5o9Fp7aWuXgveuQlrbJbbVSm1dRriUq1YU5AINeLmWCqlLSgWlhRAIgQSTkEcgDzN5\nTr73j71/5+w5zmTOPJyH2fm81jrrnP10zm/2/OY73/Pdv723IgIzMxv/JrS6AWZmNjYc0M3MSsIB\n3cysJBzQzcxKwgHdzKwkHNDNzErCAd0AkPRZSTcXpn9L0npJvZLObWXbzKw+DuiDkDRb0v+R1Cfp\nRUkfKiz7UD6vT9LdkmbXbPsBSavy5c9Lekth2RRJfydpu6SdklYUlknSjZJezh83SlJheeTv2Zs/\nigH4+4X5vZIOSHpykJ/tQkkbivMi4s8j4urCrL8EPhkR3RHxs5HsQ7N2I+lKST8axfYrJV2Yv5ak\nWyW9KunfxqyRo+CAPri/BQ4APcDlwFclnSXpLOBrwEfyZXuAv0sbSboIuBH4bWAa8GvAC4X3XQbM\nBl6fP/9BYdkS4DLgHOCXgfcAH6tp1zl5kO0uBuCIeGdhfjfwY+COUfz8vwSsHMX2NkbKnFy0M0l/\nL+lzxXkRcVZEPJRPXgBcBMyPiDc1u30Digg/ah7AVLJgfnph3jeAzwN/DvxDYf4p+brT8ukfA1cN\n8r6vA3YB0wdZ/mNgSWH6KuBfC9MBnFpH+08GDgMnD/Kz7QWOAL35Yy5wA/BNYFI+L4A+4PlW/z6O\n9QfwbeA7QDdZENkJnJU/dpMlDd3APwC3F7a7CHgROJ8seZsHzCss/yZwO3Ai0AH8amHZx4Bngfn5\ndk8DHx9uX8zXfQj441bvx7wtVwI/qnPdvwc+d5TlH673vZr287W6Ae34AM4F9tTM+yPgH4F7gM/U\nLOsFfjX/ozgAXAusATYAXwEm5+t9FHgS+BKwPX/93sL77ATOK0wvAnYXpgN4CdgM3DVQwM7X+2Pg\noaP8fBcCG2rm3QB8s+az6vqD9aOhfbG0ycUA6wbwCeA5sn9Uf5r/TD/O2/pdYGK+7ixgObANeDV/\nPb/wXleSfTPeDawFLi/M/1Fhvb8AfgTMqGnLEuBgvj97gX/M568D3p7vj335z9YLLG11X4kIl1wG\n0U3WgYp2kpVQuvPXAy3rAY4D3ge8BXgj2T+H/5mvNx84O19/LvBJ4DZJry98bvG9dwLdha+6/4Xs\nD+R1ZIF9uaTOAdr/UbLswsa/04FDEbG6MO/fqWbo/55mRsTz5MFfUgdZQnCipDWSNkj6iqTJ+epv\nIsvel+YllyclvbfwGf3eu/CZRSskbZZ0l6STB2n/R4F/iYh1df68v0mWHJ0PfJqsRPlh4CSyv50P\n5utNAG4lKw3+J7JvnV8BkDQVuAl4Z0RMA/4z8ETxQyRNkPR1stLmb0REv7/piFgGfAv4QmRlzPfU\nLL8F+DjwSL78+jp/voZyQB9YLzC9Zt50sv/2R1u2N5/+m4jYFBHbgS8CF+fz95L91/9cRByIiIeB\nHwK/McjnTgd6I6UvESvy7XYA1wALyGrxFZIuAF4L3Dnsn9ra0bGWXHwhInZFxErgKeD/RcQLecD9\nfv4zEBEvR8T3ImJPROwG/ixvU3IEOFvS5PxvsXg86DiyMtZs4D0RsWcY7WtrDugDWw10SjqtMO8c\nsoOEK/PXAEhaSFZ3Xh0Rr5KVWYqXsCy+/o8BPqu4vN97Fz5zMAGoZt4VwF0R0TvEdjY+HGvJxZbC\n670DTHfn7z1F0tfyA8K7gBXATEkdEdEH/FeyDHqTpPskva7wPqcCl5KVSQ4Mo21tzwF9AHmHuAv4\nE0lTJS0m6wDfIPsa9h5Jb8m/2v0JWQDdnW9+K/B7kl4jaRbZKJbl+bIVwM+B6yR15u/768AD+fL/\nDfyhpHmS5gKfIs9u8hE2b5TUIakb+CtgI7AqtTv/Ov1+hs6ItgDHS5oxkv1jTVX25GKkPgWcQXbM\naTrZgWFSGyLigYi4CJgDPAN8vbDtKrJRaN+XdMZRPmPcJT4O6IP7BDAZ2Er29ey/RcTK/Kvbx8kC\n+1ayr7efKGz3p8BPyP4QVwE/I/s6SEQcJPvHcDHZV9ivAx+NiGfybb9GduD1SbKvm/fl8yD7Cv0d\nsq/fL5B93X13/p7JZcAOskyrn3z87OV5O57Jf6YXJO3I/3lYGzoGkouRmkaWse/Ih2pWatiSeiRd\nmu+T/WTfNo4UN46IbwOfBf5J0imDfMYWYGEjGt8wrT4q64cffhz9QVbrvZtsGOnPgQ8Vln0on9dH\nNgJrdmHZcWTnSOwgGxl1E9BVWH4W8Ei+7dPAbxWWCfgC8Er++AKgfNlbyYY09pElNXcDp9W0+YNk\nB101jJ+z38gZstEnVxamPwfcnL+eSzYcspcsefpYvn0nWVb+MFnStCNf78x8uyvpP8rld/J2nkx2\nrKG3sOw0soOpO4C783nrgLcP9F7t8Ei/IDMzG+dccjEzK4lRBXRJ75D0bD7O9dqxapRZq7lvj728\n1t870KPVbSuLEZdc8hMXVpOdXryB7EDgByPi6bFrnlnzuW/beDXQiQD1ehOwJiJeAJB0O9nR90E7\nvSQX7K2hIqJ26NxIDKtvS1MCZo7Bx5oNZtP2iDhxqLVGE9DnAesL0xuA82pXkrSE7LoIZuPFkH27\nf7+egbu4NdbSF+tZq+EHRSNiWUQsiohFjf4ss2bp36+ntLo5ZsDoAvpGsgvmJPPzeWbjnfu2jUuj\nCeg/AU6TtEDSROADwL1j0yyzlnLftnFpxDX0iDgk6ZNkpwp3AP8r+l/RzGxcct+28Wo0B0WJiPuB\n+8eoLWZtw33bxiOfKWpmVhIO6GZmJeGAbmZWEg7oZmYl4YBuZlYSDuhmZiXhgG5mVhIO6GZmJeGA\nbmZWEg7oZmYl4YBuZlYSDuhmZiXhgG5mVhIO6GZmJeGAbmZWEg7oZmYl4YBuZlYSDuhmZiXhgG5m\nVhIO6GZmJeGAbmZWEg7oZmYl4YBuZlYSDuhmZiXhgG5mVhIO6GZmJeGAbmZWEg7oZmYlMWRAl3SS\npB9KelrSSknX5PNnS/qBpOfy51mNb67Z2HHftrKpJ0M/BHwqIs4Ezgd+V9KZwLXAgxFxGvBgPm02\nnrhvW6kMGdAjYlNE/DR/vRtYBcwDLgVuy1e7DbisUY20/iQhqdXNGPfct9tLbFxKbFza6maMa53D\nWVnSycC5wKNAT0RsyhdtBnoG2WYJsGTkTTRrvOH27f79ekYzmmg2pLoDuqRu4HvA70fErmKGGBEh\nKQbaLiKWAcvy9xhwHTu6E044AYDJkycDcODAAQB6e3sB6Ovra03DSmIkfbt/v57rfj0Cg2Xjab7m\nXd/M5pRCXaNcJB1H1uG/FRF35bO3SJqTL58DbG1ME80ax33bymTIDF1ZunILsCoivlhYdC9wBfD5\n/PmehrTwGHb++ecD1Qz9lFNOAWD16tUAbNu2DYDt27cDsG7duia3cHxz326NeuvkztSHr56Sy2Lg\nI8CTkp7I532WrLN/V9JVwIvA+xvTRLOGcd+2UlFE88p/rqHX74ILLmDhwoVANVO/+uqrAVixYgUA\nN998MwDd3d0A7Ny5E4A77rijqW1tJxHR9OE/WQ3dx/3rMZpRLMd2pr708YhYNNRaPlPUzKwkhjVs\n0RrvjDPOAKCnp4c5c+YAMHXqVAA2btwIwJQpUwA4/fTTAVi8eDEA999/f1PbalYvjy9vDmfoZmYl\n4Qy9TfT0ZOeupLp5d3c3nZ3ZryeNO1++fDlQHd2yf/9+AO6++24AfvrTnwIwa9YsXn311Sa13Gxw\nY5mZx8alx3gdfWgO6C02YUL2JWnatGlAtZwyZcoUDh8+DMD69euBamA/dOhQv+mXX3653/yIqFwa\noJkHvc2SRpVYPJTx6FxyMTMrCWfoLZZOGpo/fz5QPQDa1dVVya5379494LZ79+4FqqWXdCmA3t5e\nZ+ZWSs7Mj84ZuplZSThDb5FU4+7q6gKqJwd1dHRU1kk18vScHDx4EIAdO3YAsGvXrn7PqfZu1mwe\nnthaztDNzErCGXqLzJiRXUN77ty5QPXSuClj7+joqGTiR44cAaojVtL8VDPfsmULAK+88kq/9czK\nxPXzoTlDNzMrCWfoTZZq5ykjnz59OlCtoafnrq6uSi28WFcH2LNnD1C9GFc6iShl7mbN5tp5e3CG\nbmZWEs7Qmyxl5vPmzQNg5syZQDVTT8sBJk6c2O85jTtPmXka5ZIy9FRrNysT187r5wzdzKwknKE3\nWcq207VbUmaenlO9/NChQ5VsPdXG03Mab54y9XQNFzM7tjmgN0k6GDpp0iSg/yn+UA3kacjhxIkT\nKxfu2rdvX7/ndFA0XRLAAd1apZEHQ11qGT6XXMzMSsIZepOka5unUstxxx0HVDPzdEAzlVWKB0fT\nwdB0EDQ9p4tymZmBM3Qzs9Jwht4k6WDo7NmzgWqGnurktaf579+/v5K9pww9DU/cvn078IsX7TKz\nY5szdDOzknCG3gSSKhl67UW4kjSCpbhNGs1Se3nctK4vwmWt1KgRLh7dMnLO0M3MSsIZepOkenga\n7VJ7wa1UO0+ZfPHyucVby4HHnZvZwJyhm5mVhDP0JklniKYMPKm9XVzKviPiF27+nMaf+zK5ZjaQ\nujN0SR2SfiZpeT69QNKjktZI+o6kiUO9h1m7cb+2MhlOyeUaYFVh+kbgSxFxKvAqcNVYNqxMJNHR\n0UFHRwednZ10dnbS1dVFV1cXkpDEkSNHOHLkCIcPH+bw4cPs27ePvXv3snfvXg4cONDvEREe4TJ2\n3K+tNOoK6JLmA+8Cbs6nBbwVuDNf5TbgskY0sCwmTJhQOYkIstJK8ZECeQrafX19lce+ffvYt2+f\nA/oYc79uL5p3vYcsjlK9GfpfA58G0h0Ujgd2REQabrEBmDfQhpKWSHpM0mOjaqnZ2Bujfr2n8S01\nq8OQB0UlvRvYGhGPS7pwuB8QEcuAZfl7HZNp5YQJEyrDFWuHHKbhiungaMq8Dx48WDnlP51gVHsA\n1UZubPv13GOyX98w4L+6mnU2Nr4dVlXPKJfFwCWSLga6gOnAl4GZkjrzbGY+4F+djSfu11Y6Qwb0\niLgOuA4gz2T+KCIul3QH8D7gduAK4J4GtnNcmjJlCpANWXzta18LwEknnQTAiSee2G/dlIUXT+t3\nht447tej8NQN2fPZQ5/6P1AW76y9cUZzYtFngD+UtIas9njL2DTJrKXcr23cGtaJRRHxEPBQ/voF\n4E1j36TyeNe73lV5PWvWLADe8IY3ADBz5sx+66bbyaVL4xZHsaSs3af8N4b7dXPVZu1L8ciWseJT\n/83MSsKn/jfAokWLALj88ssBePjhhys3tjjnnHMA6OnpAaqZ+NatWwHYsGEDkJ2M9MorrwCwbds2\noHrKf7rRhVlTpdr5GLuerBbvTH30nKGbmZWEM/QGSBfgevjhh4FsrPkpp5wCwIIFCwA44YQTgOrl\ndFONPdXWJ02aVKmdp4t0pZtjvPzyy/2eU/3dbFyr/QZw9g0DrWVH4QzdzKwknKGPoZR1L168GIBL\nLrkEgBUrVnD88ccD1ZtD197wIo1ZT+/R29vLwoULgeo49LRuytTTe2SXIKmu59EwNqYaVDsf9uc6\nYx+SM3Qzs5Jwhj4GUoacMuY0CuWRRx4BsrHlBw4cAKr17rRuuvFFmp/ODu3o6GDGjBkAvOY1rwF+\n8cYW6Tow6fPTDTB27NjhLN1Gr1WZ+WCeusFZ+hAc0MdAGnqYhhk+8MADADzxxBMAnHrqqaxbtw6o\nBueuri6gGpRTAE6X2O3o6KgcXE3lmO7uboBK+SZtm7ZJ77F//376+vr6rWM2bCl4tlNgr1x24IZW\ntqJtueRiZlYSztDHUCqJpJOEUpmlq6uLNWvWADBnzhygmnVPnz4dqB7wTNl2moZfvChXOiiaMvX0\nDSENcyxm5R7SaKNWk6lff7Za1hQ7OmfoZmYl4Qy9AdKJQMULbqWLbm3cmF07NGXoKctOz6luPnHi\nxMoQx5SBF2vkUM3iU209XV5g3759lXXSt4Q0bTZi7VS3di19QM7QzcxKwhl6A6Wa+sGDBysjYF56\n6SWgmlX39vYC1eGKaRRMRFRGwqRhiWmd9Jwy+FR3nzZtGlDN9qGa1adM3TeXttFKN6io5xZ0DedM\nvR9n6GZmJeEMvYFShr53795KPT1dfCtl6Fu2bAGqtfPzzjsPgLVr11ZGy+zatQuo1sHTqJfa56lT\np1bWT++XTlxKWb4zdBstzcsucxsbq7ega4ts3Zyhm5mVhTP0JnjppZcqI1BS/Xv16tWVZVA9yzTd\nAGPy5MmVm13U3uAiSdl2qpmnSwVIqmT3qYbuM0ZtrGne9ZUsfTR19aVPjeJbo2vn/ThDNzMrCWfo\nTZJGs9x3331AtbadRqikbHzTpk2VbVLWnkaopLp4Gt2SxqGnC309//zzAGzevLlyLZd09qhZM6RM\nvTJ9lIx9VJm5DcgBvUnSxbnSwdEU0JMUnFetWgVk5ZTauxmlywSk0ko6wJpOUkoHPnfv3l0J6OmA\nqlkjDHSAtGigUsyYBHKXWgbkkouZWUk4Q2+ydB/QlH0PNpzw0KFDlcw8HUhNB0XTuldffXW/bW66\n6SYgK++41GLNVG+mntazxnCGbmZWEs7QW6SeOwqlOx+l55SZp4OiK1euBKp3Klq7dm1lOm1j1kx1\nZeC19e/h3EDDtfOjcoZuZlYSztDHkfXr1w/4nE75T5m6R7bYuDKajN36qStDlzRT0p2SnpG0StKb\nJc2W9ANJz+XPsxrdWLOx5r5tZaJ6LtYk6TbgXyLiZkkTgSnAZ4FXIuLzkq4FZkXEZ4Z4H59J0ACp\npp4u0nUsn+YfEcO6P9pY9G1pbsCSUbXbBlCbqR/T9fOlj0fEoqHWGjJDlzQD+DXgFoCIOBARO4BL\ngdvy1W4DLht5Y82az33byqaeGvoCYBtwq6RzgMeBa4CeiEjnqW8GehrTRBtK7UW7rG7u2+3smM7I\nR6aeGnon8CvAVyPiXKAPuLa4QmR1mwHLKZKWSHpM0mOjbazZGBtx3+7fr/c0pbFmQ6knoG8ANkTE\no/n0nWR/BFskzQHIn7cOtHFELIuIRfXUf8yabMR9u3+/ntK0BpsdzZABPSI2A+slnZHPehvwNHAv\ncEU+7wrgnoa00KxB3LetbOodh/57wLfyUQAvAL9N9s/gu5KuAl4E3t+YJpo1lPu2lUZdAT0ingAG\nKpm8bWybY9Zc7ttWJj7138ysJBzQzcxKwgHdzKwkHNDNzErCAd3MrCQc0M3MSsIB3cysJBzQzcxK\nwgHdzKwkHNDNzErCAd3MrCQc0M3MSsIB3cysJBzQzcxKwgHdzKwkHNDNzErCAd3MrCQc0M3MSsIB\n3cysJBzQzcxKwgHdzKwkHNDNzErCAd3MrCQc0M3MSsIB3cysJBzQzcxKwgHdzKwkHNDNzErCAd3M\nrCQc0M3MSsIB3cysJBQRzfswaRvQB2xv2ofW5wTcpnq0e5t+KSJObHYD3K+HrR3b1e5tqqtvNzWg\nA0h6LCIWNfVDh+A21cdtav92FLVjm6A921WWNrnkYmZWEg7oZmYl0YqAvqwFnzkUt6k+btPg2qUd\nRe3YJmjPdpWiTU2voZuZWWO45GJmVhIO6GZmJdG0gC7pHZKelbRG0rXN+tyaNpwk6YeSnpa0UtI1\n+fzZkn4g6bn8eVYL2tYh6WeSlufTCyQ9mu+v70ia2II2zZR0p6RnJK2S9OZW7ytJf5D/7p6S9G1J\nXa3eV+7bQ7atrfp2mft1UwK6pA7gb4F3AmcCH5R0ZjM+u8Yh4FMRcSZwPvC7eTuuBR6MiNOAB/Pp\nZrsGWFWYvhH4UkScCrwKXNWCNn0Z+L8R8TrgnLx9LdtXkuYB/x1YFBFnAx3AB2jhvnLfrku79e3y\n9uuIaPgDeDPwQGH6OuC6Znz2EO26B7gIeBaYk8+bAzzb5HbMJ+tEbwWWAyI7Q6xzoP3XpDbNANaS\nHzgvzG/ZvgLmAeuB2UBnvq9+s5X7yn17yHa0Vd8ue79uVsklNTjZkM9rGUknA+cCjwI9EbEpX7QZ\n6Glyc/4a+DRwJJ8+HtgREYfy6VbsrwXANuDW/OvyzZKm0sJ9FREbgb8Efg5sAnYCj9PafeW+fXTt\n1rdL3a+PyYOikrqB7wG/HxG7issi+3fYtLGckt4NbI2Ix5v1mXXqBH4F+GpEnEt2rZJ+X0NbsK9m\nAZeS/VHOBaYC72jW548H7ttDKnW/blZA3wicVJien89rOknHkXX4b0XEXfnsLZLm5MvnAFub2KTF\nwCWS1gG3k301/TIwU1Jnvk4r9tcGYENEPJpP30n2h9DKffV2YG1EbIuIg8BdZPuvlfvKfXtw7di3\nS92vmxXQfwKclh+1nUhW8L+3SZ9dIUnALcCqiPhiYdG9wBX56yvI6o9NERHXRcT8iDiZbL/8c0Rc\nDvwQeF8r2pS3azOwXtIZ+ay3AU/Twn1F9pX0fElT8t9lalMr95X79iDasW+Xvl83sfB/MbAaeB74\nH8363Jo2XED2Veo/gCfyx8Vkdb0HgeeAfwJmt6h9FwLL89cLgX8D1gB3AJNa0J43Ao/l++tuYFar\n9xWwFHgGeAr4BjCp1fvKfbuu9rVN3y5zv/ap/2ZmJXFMHhQ1MysjB3Qzs5JwQDczKwkHdDOzknBA\nNzMrCQd0M7OScEA3MyuJ/w9cPAn0OM40nAAAAABJRU5ErkJggg==\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7f9c4f924c18>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"plt.subplot(121)\n",
"plt.title(grey_name)\n",
"plt.imshow(grey, cmap=plt.cm.gray)\n",
"plt.subplot(122)\n",
"plt.title(groundtruth_name)\n",
"plt.imshow(groundtruth, cmap=plt.cm.jet)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Try to load images"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Number of files used: 200\n"
]
}
],
"source": [
"search = os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData','*.tif')\n",
"data_provider = image_util.ImageDataProvider(search)#, data_suffix= '.png', mask_suffix='_mask.png')\n",
"#setup & training"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"2017-04-20 11:45:50,674 Layers 3, features 64, filter size 3x3, pool size: 2x2\n"
]
}
],
"source": [
"#setup & training\n",
"## grey: greyscaled -> one channel\n",
"## groundtruth (greyscaled image):4 classes\n",
"### 0 background;\n",
"### 1,2: objects\n",
"### 3 : objects overlapp\n",
"net = unet.Unet(layers=3, features_root=64, channels=1, n_class=4)\n",
"trainer = unet.Trainer(net)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"2017-04-20 11:45:56,963 Removing '/home/jeanpat/DeepFISH-Github_projects/tf_unet/prediction'\n",
"2017-04-20 11:45:56,964 Removing '/home/jeanpat/DeepFISH-Github_projects/DeepFISH/dataset/LowRes/train/UnetData/model'\n",
"2017-04-20 11:45:56,965 Allocating '/home/jeanpat/DeepFISH-Github_projects/tf_unet/prediction'\n",
"2017-04-20 11:45:56,966 Allocating '/home/jeanpat/DeepFISH-Github_projects/DeepFISH/dataset/LowRes/train/UnetData/model'\n"
]
},
{
"ename": "ValueError",
"evalue": "Cannot feed value of shape (4, 80, 82, 2) for Tensor 'Placeholder_1:0', which has shape '(?, ?, ?, 4)'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-9-bccf75a9b4d8>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0moutput_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'..'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'DeepFISH'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'dataset'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'LowRes'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'train'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'UnetData'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'model'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mpath\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata_provider\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moutput_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtraining_iters\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/home/jeanpat/DeepFISH-Github_projects/tf_unet/tf_unet/unet.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self, data_provider, output_path, training_iters, epochs, dropout, display_step, restore)\u001b[0m\n\u001b[1;32m 398\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 399\u001b[0m \u001b[0mtest_x\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_y\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdata_provider\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mverification_batch_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 400\u001b[0;31m \u001b[0mpred_shape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstore_prediction\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msess\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_x\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_y\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"_init\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 401\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 402\u001b[0m \u001b[0msummary_writer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msummary\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFileWriter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0moutput_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgraph\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jeanpat/DeepFISH-Github_projects/tf_unet/tf_unet/unet.py\u001b[0m in \u001b[0;36mstore_prediction\u001b[0;34m(self, sess, batch_x, batch_y, name)\u001b[0m\n\u001b[1;32m 439\u001b[0m prediction = sess.run(self.net.predicter, feed_dict={self.net.x: batch_x, \n\u001b[1;32m 440\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnet\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mbatch_y\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m self.net.keep_prob: 1.})\n\u001b[0m\u001b[1;32m 442\u001b[0m \u001b[0mpred_shape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mprediction\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 443\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jeanpat/VirtualEnv/venv3/lib/python3.5/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 765\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 766\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 767\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 768\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 769\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jeanpat/VirtualEnv/venv3/lib/python3.5/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 942\u001b[0m \u001b[0;34m'Cannot feed value of shape %r for Tensor %r, '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 943\u001b[0m \u001b[0;34m'which has shape %r'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 944\u001b[0;31m % (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))\n\u001b[0m\u001b[1;32m 945\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_feedable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msubfeed_t\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 946\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Tensor %s may not be fed.'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0msubfeed_t\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mValueError\u001b[0m: Cannot feed value of shape (4, 80, 82, 2) for Tensor 'Placeholder_1:0', which has shape '(?, ?, ?, 4)'"
]
}
],
"source": [
"output_path = os.path.join('..','DeepFISH','dataset','LowRes','train','UnetData','model')\n",
"path = trainer.train(data_provider, output_path, training_iters=10, epochs=5)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"#verification\n",
"...\n",
"\n",
"prediction = net.predict(path, data)\n",
"\n",
"unet.error_rate(prediction, util.crop_to_shape(label, prediction.shape))\n",
"\n",
"img = util.combine_img_prediction(data, label, prediction)\n",
"util.save_image(img, \"prediction.jpg\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "IPython (Python 3)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
@JunMa11
Copy link

JunMa11 commented May 6, 2017

Thank you for sharing.
I have a training data set with the following shape:
Train_image.shape = (512, 512, 200), and the corresponding label.shape = (512, 512, 200). Both the types are int16.

Would it be possible for you to tell me how to gathered them in a variable (named Train_data) that can be used in trainer.train function?
path = trainer.train(Train_data, output_path, training_iters=10, epochs=5)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment