Skip to content

Instantly share code, notes, and snippets.

@takotab
Last active February 18, 2019 13:43
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save takotab/f23e5bc3b3aa3f0ea7c743eee534e507 to your computer and use it in GitHub Desktop.
Save takotab/f23e5bc3b3aa3f0ea7c743eee534e507 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"%matplotlib inline\n",
"%reload_ext autoreload\n",
"%autoreload 2"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"from fastai import *\n",
"from fastai.vision import *"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"size = 224\n",
"bs = 4"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"dataDir = \"/home/tako/devtools/furry-geras/coco\"\n",
"dataType = \"val2017\"\n",
"annFile = \"{}/instances_{}.json\".format(dataDir, dataType)\n",
"\n",
"images, lbl_bbox = get_annotations(annFile)\n",
"img2bbox = dict(zip(images, lbl_bbox))\n"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[[249.08, 96.54, 445.64, 269.15000000000003], [226.35, 417.67, 328.07, 517.57], [265.36, 254.83, 337.95000000000005, 282.93]], ['horse', 'person', 'person']]\n",
"[None, None]\n"
]
}
],
"source": [
"get_y_func = lambda o:img2bbox[o.name] if o.name in img2bbox else [None,None]\n",
"print(get_y_func(Path(\"coco/val2017/000000140286.jpg\")))\n",
"print(get_y_func(Path(\"coco/val2017/000000200152.jpg\")))"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[[226.35, 417.67, 328.07, 517.57], [265.36, 254.83, 337.95000000000005, 282.93]], ['person', 'person']]\n",
"[None, None]\n"
]
}
],
"source": [
"def get_y_func(o):\n",
" result = [[],[]]\n",
" if o.name in img2bbox:\n",
" lbl_bbox = img2bbox[o.name] \n",
" else:\n",
" return [None,None]\n",
" for i, obj in enumerate(lbl_bbox[1]):\n",
" if obj =='person':\n",
" result[1].append(obj)\n",
" result[0].append(lbl_bbox[0][i])\n",
" return result\n",
"\n",
"print(get_y_func(Path(\"coco/val2017/000000140286.jpg\")))\n",
"print(get_y_func(Path(\"coco/val2017/000000200152.jpg\")))"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"True\n",
"False\n",
"True\n"
]
}
],
"source": [
"def remove_non_human(f):\n",
" classes = get_y_func(f)[1]\n",
" if classes is None:\n",
" return False\n",
" return 'person' in classes\n",
"\n",
"print(remove_non_human(Path(\"coco/val2017/000000140286.jpg\")))\n",
"print(remove_non_human(Path(\"coco/val2017/000000200152.jpg\")))"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"LabelLists;\n",
"\n",
"Train: LabelList\n",
"y: ObjectCategoryList (748 items)\n",
"[ImageBBox (521, 640), ImageBBox (393, 640), ImageBBox (424, 640), ImageBBox (640, 428), ImageBBox (426, 640)]...\n",
"Path: coco/val2017\n",
"x: ObjectItemList (748 items)\n",
"[Image (3, 521, 640), Image (3, 393, 640), Image (3, 424, 640), Image (3, 640, 428), Image (3, 426, 640)]...\n",
"Path: coco/val2017;\n",
"\n",
"Valid: LabelList\n",
"y: ObjectCategoryList (186 items)\n",
"[ImageBBox (512, 640), ImageBBox (480, 640), ImageBBox (427, 640), ImageBBox (428, 640), ImageBBox (640, 480)]...\n",
"Path: coco/val2017\n",
"x: ObjectItemList (186 items)\n",
"[Image (3, 512, 640), Image (3, 480, 640), Image (3, 427, 640), Image (3, 428, 640), Image (3, 640, 480)]...\n",
"Path: coco/val2017;\n",
"\n",
"Test: None"
]
},
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"src = (ObjectItemList.from_folder('coco/val2017')\n",
" .filter_by_func(remove_non_human)\n",
" #Where are the images?\n",
" .random_split_by_pct() \n",
" #How to split in train/valid? -> randomly with the default 20% in valid\n",
" .label_from_func(get_y_func)\n",
" #How to find the labels? -> use get_y_func\n",
" )\n",
"src"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Note the size differences. In other areas this does not seem to be a problem. However here it seems it does."
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"LabelLists;\n",
"\n",
"Train: LabelList\n",
"y: ObjectCategoryList (748 items)\n",
"[ImageBBox (521, 640), ImageBBox (393, 640), ImageBBox (424, 640), ImageBBox (640, 428), ImageBBox (426, 640)]...\n",
"Path: coco/val2017\n",
"x: ObjectItemList (748 items)\n",
"[Image (3, 521, 640), Image (3, 393, 640), Image (3, 424, 640), Image (3, 640, 428), Image (3, 426, 640)]...\n",
"Path: coco/val2017;\n",
"\n",
"Valid: LabelList\n",
"y: ObjectCategoryList (186 items)\n",
"[ImageBBox (512, 640), ImageBBox (480, 640), ImageBBox (427, 640), ImageBBox (428, 640), ImageBBox (640, 480)]...\n",
"Path: coco/val2017\n",
"x: ObjectItemList (186 items)\n",
"[Image (3, 512, 640), Image (3, 480, 640), Image (3, 427, 640), Image (3, 428, 640), Image (3, 640, 480)]...\n",
"Path: coco/val2017;\n",
"\n",
"Test: None"
]
},
"execution_count": 19,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"tfms = get_transforms() # or tfms=None if none are needed\n",
"data = src.transform(tfms=tfms, tfm_y=True, size=128)\n",
"data"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"You can deactivate this warning by passing `no_check=True`.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/basic_data.py:236: UserWarning: It's not possible to collate samples of your dataset together in a batch.\n",
" warn(message)\n"
]
},
{
"ename": "RuntimeError",
"evalue": "Traceback (most recent call last):\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 138, in _worker_loop\n samples = collate_fn([dataset[i] for i in batch_indices])\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/torch_core.py\", line 110, in data_collate\n return torch.utils.data.dataloader.default_collate(to_data(batch))\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in default_collate\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in <listcomp>\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in default_collate\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in <listcomp>\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 209, in default_collate\n return torch.stack(batch, 0, out=out)\nRuntimeError: invalid argument 0: Sizes of tensors must match except in dimension 0. Got 4 and 1 in dimension 1 at /pytorch/aten/src/TH/generic/THTensorMoreMath.cpp:1333\n",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-20-8b6fa74e7604>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdatabunch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mbs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_workers\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnormalize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/vision/data.py\u001b[0m in \u001b[0;36mnormalize\u001b[0;34m(self, stats, do_x, do_y)\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\"Add normalize transform using `stats` (defaults to `DataBunch.batch_stats`)\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 178\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'norm'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Can not call normalize twice'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 179\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mstats\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstats\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbatch_stats\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 180\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstats\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mstats\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 181\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnorm\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdenorm\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnormalize_funcs\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstats\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdo_x\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdo_x\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdo_y\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdo_y\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/vision/data.py\u001b[0m in \u001b[0;36mbatch_stats\u001b[0;34m(self, funcs)\u001b[0m\n\u001b[1;32m 171\u001b[0m \u001b[0;34m\"Grab a batch of data and call reduction function `func` per channel\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 172\u001b[0m \u001b[0mfuncs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mifnone\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfuncs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmean\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstd\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 173\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mone_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mds_type\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mDatasetType\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mValid\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdenorm\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 174\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchannel_view\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfunc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfuncs\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 175\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/basic_data.py\u001b[0m in \u001b[0;36mone_batch\u001b[0;34m(self, ds_type, detach, denorm, cpu)\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0mw\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_workers\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_workers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 142\u001b[0;31m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 143\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_workers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mw\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 144\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdetach\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_detach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mto_detach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/basic_data.py\u001b[0m in \u001b[0;36m__iter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__iter__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 70\u001b[0m \u001b[0;34m\"Process and returns items from `DataLoader`.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 71\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mb\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32myield\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproc_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 72\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 73\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mclassmethod\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 635\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreorder_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 636\u001b[0m \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 637\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_process_next_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 638\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 639\u001b[0m \u001b[0mnext\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m__next__\u001b[0m \u001b[0;31m# Python 2 compatibility\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m_process_next_batch\u001b[0;34m(self, batch)\u001b[0m\n\u001b[1;32m 656\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_put_indices\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 657\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mExceptionWrapper\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 658\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc_msg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 659\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 660\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mRuntimeError\u001b[0m: Traceback (most recent call last):\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 138, in _worker_loop\n samples = collate_fn([dataset[i] for i in batch_indices])\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/torch_core.py\", line 110, in data_collate\n return torch.utils.data.dataloader.default_collate(to_data(batch))\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in default_collate\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in <listcomp>\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in default_collate\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 232, in <listcomp>\n return [default_collate(samples) for samples in transposed]\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 209, in default_collate\n return torch.stack(batch, 0, out=out)\nRuntimeError: invalid argument 0: Sizes of tensors must match except in dimension 0. Got 4 and 1 in dimension 1 at /pytorch/aten/src/TH/generic/THTensorMoreMath.cpp:1333\n"
]
}
],
"source": [
"data = data.databunch(bs=bs, num_workers=4).normalize()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"As I understand this error it means that the image size do not match however I did specify the desired size."
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"ename": "TypeError",
"evalue": "Traceback (most recent call last):\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 138, in _worker_loop\n samples = collate_fn([dataset[i] for i in batch_indices])\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 138, in <listcomp>\n samples = collate_fn([dataset[i] for i in batch_indices])\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/data_block.py\", line 567, in __getitem__\n x = x.apply_tfms(self.tfms, **self.tfmargs)\nTypeError: apply_tfms() got an unexpected keyword argument 'do_crop'\n",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-15-496036fe073d>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0msrc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshow_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mds_type\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mDatasetType\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrain\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfigsize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m6\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m6\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/basic_data.py\u001b[0m in \u001b[0;36mshow_batch\u001b[0;34m(self, rows, ds_type, **kwargs)\u001b[0m\n\u001b[1;32m 157\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mshow_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrows\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mds_type\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mDatasetType\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mDatasetType\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrain\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m->\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 158\u001b[0m \u001b[0;34m\"Show a batch of data in `ds_type` on a few `rows`.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 159\u001b[0;31m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mone_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mds_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 160\u001b[0m \u001b[0mn_items\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrows\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0;36m2\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_ds\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_square_show\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mrows\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 161\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mds_type\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbatch_size\u001b[0m \u001b[0;34m<\u001b[0m \u001b[0mn_items\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mn_items\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mds_type\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/basic_data.py\u001b[0m in \u001b[0;36mone_batch\u001b[0;34m(self, ds_type, detach, denorm, cpu)\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0mw\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_workers\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_workers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 142\u001b[0;31m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 143\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_workers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mw\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 144\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdetach\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_detach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mto_detach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcpu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/basic_data.py\u001b[0m in \u001b[0;36m__iter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__iter__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 70\u001b[0m \u001b[0;34m\"Process and returns items from `DataLoader`.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 71\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mb\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdl\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32myield\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproc_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 72\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 73\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mclassmethod\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 635\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreorder_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 636\u001b[0m \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 637\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_process_next_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 638\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 639\u001b[0m \u001b[0mnext\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m__next__\u001b[0m \u001b[0;31m# Python 2 compatibility\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m_process_next_batch\u001b[0;34m(self, batch)\u001b[0m\n\u001b[1;32m 656\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_put_indices\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 657\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mExceptionWrapper\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 658\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc_msg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 659\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 660\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mTypeError\u001b[0m: Traceback (most recent call last):\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 138, in _worker_loop\n samples = collate_fn([dataset[i] for i in batch_indices])\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/torch/utils/data/dataloader.py\", line 138, in <listcomp>\n samples = collate_fn([dataset[i] for i in batch_indices])\n File \"/home/tako/devtools/find_human/env-find-human/lib/python3.7/site-packages/fastai/data_block.py\", line 567, in __getitem__\n x = x.apply_tfms(self.tfms, **self.tfmargs)\nTypeError: apply_tfms() got an unexpected keyword argument 'do_crop'\n"
]
}
],
"source": [
"src.show_batch(rows=2, ds_type=DatasetType.Train, figsize=(6,6))"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAZQAAAEKCAYAAAA1qaOTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAEbZJREFUeJzt3XuwXXV5xvHvI1GqRbkGxMQYBEYn1oqyC1priwIB/tCA0g60jkFpM1aptbRO03FGFG94pVov0xQv0VFRcRyDVmMEGVsLmhNAMQomgA6hKEgQpV4QfPvHXpHDYSdnJ+d3zs7hfD8ze7LWb71rrXcT2A/rstdOVSFJ0lQ9ZNQNSJIeHAwUSVITBookqQkDRZLUhIEiSWrCQJEkNWGgSJKaMFAkSU0YKJKkJuaNuoGZdMABB9TixYtH3YYkzSobNmz4SVXNn6xuTgXK4sWLGRsbG3UbkjSrJPnhMHWe8pIkNWGgSJKaMFAkSU0YKJKkJgwUSVITBookqQkDRZLUhIEiSWrCQJEkNWGgSJKaMFAkSU0YKJKkJgwUSVITBookqQkDRZLUhIEiSWrCQJEkNWGgSJKaMFAkSU0YKJKkJgwUSVITBookqQkDRZLUhIEiSWpipIGS5MQk1yXZnGTlgOV7Jvlkt/wbSRZPWL4oyV1J/mmmepYkDTayQEmyB/Be4CRgCXB6kiUTys4E7qiqw4DzgbdMWP5O4IvT3askaXKjPEI5CthcVTdU1d3AhcCyCTXLgNXd9EXAsUkCkORk4EZg4wz1K0nagVEGygLgpnHzW7qxgTVVdQ9wJ7B/kr2AfwZeNwN9SpKGMFsvyr8WOL+q7pqsMMmKJGNJxm677bbp70yS5qh5I9z3zcBjx80v7MYG1WxJMg/YG7gdOBo4NclbgX2A3yb5VVW9Z+JOqmoVsAqg1+tV83chSQJGGyjrgcOTHEI/OE4D/nJCzRpgOXA5cCpwaVUV8KxtBUleC9w1KEwkSTNnZIFSVfckOQtYC+wBfLCqNiY5FxirqjXAB4CPJtkMbKUfOpKk3VD6/8M/N/R6vRobGxt1G5I0qyTZUFW9yepm60V5SdJuxkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKmJkQZKkhOTXJdkc5KVA5bvmeST3fJvJFncjR+fZEOSa7o/nzPTvUuS7m9kgZJkD+C9wEnAEuD0JEsmlJ0J3FFVhwHnA2/pxn8CPLeqngwsBz46M11LkrZnlEcoRwGbq+qGqrobuBBYNqFmGbC6m74IODZJquqqqvrfbnwj8PAke85I15KkgUYZKAuAm8bNb+nGBtZU1T3AncD+E2peAFxZVb+epj4lSUOYN+oGpiLJk+ifBlu6g5oVwAqARYsWzVBnkjT3jPII5WbgsePmF3ZjA2uSzAP2Bm7v5hcCnwVeVFXXb28nVbWqqnpV1Zs/f37D9iVJ440yUNYDhyc5JMnDgNOANRNq1tC/6A5wKnBpVVWSfYAvACur6usz1rEkabtGFijdNZGzgLXA94BPVdXGJOcmeV5X9gFg/ySbgbOBbbcWnwUcBrwmydXd68AZfguSpHFSVaPuYcb0er0aGxsbdRuSNKsk2VBVvcnq/Ka8JKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKmJoQIlyaFJ9uymj0nyiiT7TG9rkqTZZNgjlM8A9yY5DFgFPBb4+LR1JUmadYYNlN9W1T3AKcC/VdWrgIOnry1J0mwzbKD8JsnpwHLg893YQ6enJUnSbDRsoLwYeAbwxqq6MckhwEenry1J0mwzVKBU1Xer6hVV9Ykk+wKPrKq3THXnSU5Mcl2SzUlWDli+Z5JPdsu/kWTxuGX/0o1fl+SEqfYiSZqaYe/yuizJo5LsB1wJ/EeSd05lx0n2AN4LnAQsAU5PsmRC2ZnAHVV1GHA+8JZu3SXAacCTgBOB93XbkySNyLCnvPauqp8Bzwc+UlVHA8dNcd9HAZur6oaquhu4EFg2oWYZsLqbvgg4Nkm68Qur6tdVdSOwudueJGlEhg2UeUkOBv6C+y7KT9UC4KZx81u6sYE13V1mdwL7D7muJGkGDRso5wJrgeuran2SxwObpq+tdpKsSDKWZOy2224bdTuS9KA17EX5T1fVH1bV33bzN1TVC6a475vpf0Fym4Xd2MCaJPOAvYHbh1x3W++rqqpXVb358+dPsWVJ0vYMe1F+YZLPJrm1e30mycIp7ns9cHiSQ5I8jP5F9jUTatbQ/+4LwKnApVVV3fhp3V1ghwCHA9+cYj+SpCkY9pTXh+h/iD+me13cje2y7prIWfRPpX0P+FRVbUxybpLndWUfAPZPshk4G1jZrbsR+BTwXeBLwMur6t6p9CNJmpr0/4d/kqLk6qo6YrKx3V2v16uxsbFRtyFJs0qSDVXVm6xu2COU25O8MMke3euF9K9lSJIEDB8oL6F/y/CPgFvoX884Y5p6kiTNQsPe5fXDqnpeVc2vqgOr6mRgqnd5SZIeRKbyi41nN+tCkjTrTSVQ0qwLSdKsN5VAmfz2MEnSnDFvRwuT/JzBwRHg4dPSkSRpVtphoFTVI2eqEUnS7DaVU16SJP2OgSJJasJAkSQ1YaBIkpowUCRJTRgokqQmDBRJUhMGiiSpCQNFktSEgSJJasJAkSQ1YaBIkpowUCRJTRgokqQmDBRJUhMGiiSpCQNFktSEgSJJasJAkSQ1YaBIkpowUCRJTRgokqQmDBRJUhMjCZQk+yVZl2RT9+e+26lb3tVsSrK8G3tEki8kuTbJxiTnzWz3kqRBRnWEshK4pKoOBy7p5u8nyX7AOcDRwFHAOeOC5+1V9UTgqcAzk5w0M21LkrZnVIGyDFjdTa8GTh5QcwKwrqq2VtUdwDrgxKr6RVV9FaCq7gauBBbOQM+SpB0YVaAcVFW3dNM/Ag4aULMAuGnc/JZu7HeS7AM8l/5RjiRphOZN14aTfAV49IBFrx4/U1WVpHZh+/OATwDvrqobdlC3AlgBsGjRop3djSRpSNMWKFV13PaWJflxkoOr6pYkBwO3Dii7GThm3PxC4LJx86uATVX1r5P0saqrpdfr7XRwSZKGM6pTXmuA5d30cuBzA2rWAkuT7NtdjF/ajZHkDcDewCtnoFdJ0hBGFSjnAccn2QQc182TpJfkAoCq2gq8Hljfvc6tqq1JFtI/bbYEuDLJ1Un+ehRvQpJ0n1TNnbNAvV6vxsbGRt2GJM0qSTZUVW+yOr8pL0lqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEyMJlCT7JVmXZFP3577bqVve1WxKsnzA8jVJvjP9HUuSJjOqI5SVwCVVdThwSTd/P0n2A84BjgaOAs4ZHzxJng/cNTPtSpImM6pAWQas7qZXAycPqDkBWFdVW6vqDmAdcCJAkr2As4E3zECvkqQhjCpQDqqqW7rpHwEHDahZANw0bn5LNwbweuAdwC+mrUNJ0k6ZN10bTvIV4NEDFr16/ExVVZLaie0eARxaVf+QZPEQ9SuAFQCLFi0adjeSpJ00bYFSVcdtb1mSHyc5uKpuSXIwcOuAspuBY8bNLwQuA54B9JL8gH7/Bya5rKqOYYCqWgWsAuj1ekMHlyRp54zqlNcaYNtdW8uBzw2oWQssTbJvdzF+KbC2qt5fVY+pqsXAnwDf316YSJJmzqgC5Tzg+CSbgOO6eZL0klwAUFVb6V8rWd+9zu3GJEm7oVTNnbNAvV6vxsbGRt2GJM0qSTZUVW+yOr8pL0lqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqwkCRJDVhoEiSmjBQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklqIlU16h5mTJLbgJ8Cd+7C6gcAP2nbkXZgb3bt72l3tru+p1H1Nd37bb39VtubynZ2dd2pfn49rqrmT1Y0pwIFIMmqqlqxC+uNVVVvOnrSA+3q39PubHd9T6Pqa7r323r7rbY3le3s7p9fc/GU18WjbkBDeTD+Pe2u72lUfU33fltvv9X2prKd3fXfIWAOHqHsKo9QJM1WHqHsflaNugFJ2kUz8vnlEYokqQmPUCRJTcyZQEnywSS3JvlOo+0tT7Kpey0fN35kkmuSbE7y7iRpsT9Jc9sMfoa9MclNSe7a2W3OmUABPgycuLMrJbksyeIJY/sB5wBHA0cB5yTZt1v8fuBvgMO7107vU5IG+DAz8xl2cTe20+ZMoFTV14Ct48eSHJrkS0k2JPmvJE8ccnMnAOuqamtV3QGsA05McjDwqKq6ovoXpz4CnNzyfUiam2biM6zbzxVVdcuu9DhvV1Z6EFkFvLSqNiU5Gngf8Jwh1lsA3DRufks3tqCbnjguSdOh9WfYlMzZQEmyF/DHwKfHXebYs1v2YuDvu7HDgP9McjdwY1WdMtO9StJEu+Nn2JwNFPqn+35aVUdMXFBVHwI+BP3zj8AZVfWDcSU3A8eMm18IXNaNL5wwfnPDniVpm+n4DJtyQ3NSVf0MuDHJnwOk7ylDrr4WWJpk3+5C1lJgbXfe8WdJnt7d3fUi4HPT0b+kuW06PsOm2tOcCZQknwAuB56QZEuSM4G/As5M8i1gI7BsmG1V1Vbg9cD67nVuNwbwMuACYDNwPfDFpm9E0pw0U59hSd6aZAvwiG4/rx26R78pL0lqYc4coUiSppeBIklqwkCRJDVhoEiSmjBQJElNGCia03bliapT3N8FSZY02ta9Sa5O8p0kFyfZZ5L6fZK8rMW+pUG8bVhzWpK7qmqvhtubV1X3tNreJPv6Xe9JVgPfr6o37qB+MfD5qvqDmehPc49HKNIESeYn+UyS9d3rmd34UUkuT3JVkv9J8oRu/Iwka5JcClyS5JjukeEXJbk2yce2/S5ON97rpu/qfnviW0muSHJQN35oN39NkjcMeRR1Od3D/ZLsleSSJFd229j2ZbfzgEO7o5q3dbWv6t7jt5O8ruE/Rs1BBor0QO8Czq+qPwJeQP/JBwDXAs+qqqcCrwHeNG6dpwGnVtWfdfNPBV4JLAEeDzxzwH5+H7iiqp4CfI3+7+hs2/+7qurJ3P/p1QMl2QM4FljTDf0KOKWqngY8G3hHF2grgeur6oiqelWSpfR/s+co4AjgyCR/Otn+pO2Zyw+HlLbnOGDJuCe4Pqp7suvewOokhwMFPHTcOuvGPX4H4JtVtQUgydXAYuC/J+znbuDz3fQG4Phu+hnc9zs6Hwfevp0+H95tewHwPfq/aQEQ4E1dOPy2W37QgPWXdq+ruvm96AfM17azP2mHDBTpgR4CPL2qfjV+MMl7gK9W1Snd9YjLxi3+vwnb+PW46XsZ/N/ab+q+i5jbq9mRX1bVEUkeQf/Bfi8H3k3/+U7zgSOr6jdJfgD83oD1A7y5qv59J/crDeQpL+mBvgz83baZJNseD7439/0cwRnTuP8r6J9qAzhtsuKq+gXwCuAfk8yj3+etXZg8G3hcV/pz4JHjVl0LvKQ7+iLJgiQHNnoPmoMMFM11256ouu11Nv0P5153ofq7wEu72rcCb05yFdN7dP9K4Owk36b/40h3TrZCVV0FfBs4HfgY/f6vof8TCtd2NbcDX+9uM35bVX2Z/im1y7vai7h/4Eg7xduGpd1Mdwrrl1VVSU4DTq+qoR5LLo2S11Ck3c+RwHu6O7N+CrxkxP1IQ/EIRZLUhNdQJElNGCiSpCYMFElSEwaKJKkJA0WS1ISBIklq4v8ByxvC7M87tbUAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "env-37-find-human",
"language": "python",
"name": "env-37-find-human"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment