Skip to content

Instantly share code, notes, and snippets.

@oraoto
Last active April 16, 2018 04:14
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save oraoto/01354f1f234c4a02dd13ae7f9bd81656 to your computer and use it in GitHub Desktop.
Save oraoto/01354f1f234c4a02dd13ae7f9bd81656 to your computer and use it in GitHub Desktop.
NNabla to NCS, maxpooling + tanh example
Display the source blob
Display the rendered blob
Raw
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:19.335479Z",
"start_time": "2018-04-16T04:08:19.238985Z"
}
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"2018-04-16 12:08:19,252 [nnabla][INFO]: Initializing CPU extension...\n"
]
}
],
"source": [
"import nnabla as nn\n",
"import nnabla.functions as F\n",
"import nnabla.parametric_functions as PF\n",
"import nnabla.solvers as S\n",
"import numpy as np\n",
"from nnabla.parameter import get_parameter\n",
"import sys\n",
"\n",
"sys.path.append(\"/usr/local/bin/ncsdk/\")"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:19.339126Z",
"start_time": "2018-04-16T04:08:19.337022Z"
}
},
"outputs": [],
"source": [
"nn.clear_parameters()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:19.409092Z",
"start_time": "2018-04-16T04:08:19.340584Z"
}
},
"outputs": [],
"source": [
"dim = 5\n",
"depth = 3\n",
"\n",
"def network(x):\n",
" y = F.max_pooling(x, (2,2), (2,2))\n",
" y = F.tanh(y)\n",
" return y\n",
"\n",
"test_x = nn.Variable([1, depth, dim, dim ])\n",
"test_y = network(test_x)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:19.489746Z",
"start_time": "2018-04-16T04:08:19.410781Z"
}
},
"outputs": [],
"source": [
"# Init input\n",
"test_x.d = (np.linspace(1, np.prod(test_x.d.shape), np.prod(test_x.d.shape)).reshape(test_x.d.shape).astype(np.float16) - 10.0) / 10.0\n",
"test_y.forward()"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:19.614419Z",
"start_time": "2018-04-16T04:08:19.494560Z"
}
},
"outputs": [],
"source": [
"from Models.Network import *\n",
"from Models.NetworkStage import *\n",
"from Models.Blob import *\n",
"from Models.EnumDeclarations import *\n",
"from Models.MyriadParam import *\n",
"from Controllers.DataTransforms import *"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:19.881737Z",
"start_time": "2018-04-16T04:08:19.615890Z"
}
},
"outputs": [
{
"data": {
"text/plain": [
"([], <Models.Network.Network at 0x7f842a6a7c50>)"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"class StorageArgument:\n",
" def __init__(self, network):\n",
" self.network = network\n",
" \n",
"def to_ncs(input_var, output_var, nshaves=1):\n",
" \n",
" myriad_config = MyriadParam(0, nshaves - 1)\n",
" \n",
" input_data = np.ones_like(input_var.d)\n",
" net = Network(\"NNabla Network\", input_data)\n",
" net.outputTensor = output_var.shape\n",
" storage_arg = StorageArgument(net)\n",
" \n",
" layers = []\n",
" layer_count = {}\n",
" variables = {\"Input\": input_var}\n",
" params = nn.get_parameters(grad_only=False)\n",
" node = None\n",
" node_name = None\n",
" \n",
" def unique_layer_name(f):\n",
" if f.name not in layer_count:\n",
" layer_count[f.name] = 1\n",
" else:\n",
" layer_count[f.name] += 1\n",
" return f.name + '_' + str(layer_count[f.name])\n",
" \n",
" def unique_variable_name(n, layer_prefix):\n",
" if n in params.values():\n",
" return False\n",
" if n not in variables.values():\n",
" n_name = layer_prefix + str(id(n))\n",
" variables[n_name] = n\n",
" else:\n",
" n_name = list(variables.keys())[list(variables.values()).index(n)]\n",
" return n_name\n",
" \n",
" def layer_of_vars(variables, curr=None):\n",
" l = [v.split(':')[0] for v in variables if v != \"Input\"]\n",
" if curr:\n",
" l = [v for v in l if v != curr]\n",
" if len(l) == 0:\n",
" return None\n",
" else:\n",
" return l\n",
" \n",
" def visit(f):\n",
" nonlocal node\n",
" layer_name = unique_layer_name(f)\n",
" \n",
" if f.info.type_name == 'Sink':\n",
" return\n",
"\n",
" inputs = []\n",
" outputs = []\n",
"\n",
" for inp in f.inputs:\n",
" n_id = unique_variable_name(inp, layer_name + ':Input')\n",
" if n_id:\n",
" inputs.append(n_id)\n",
"\n",
" for oup in f.outputs:\n",
" unique_variable_name(oup, layer_name + ':Output')\n",
" \n",
" if f.info.type_name == 'AveragePooling' or f.info.type_name == 'MaxPooling':\n",
" inp = f.inputs[0].d\n",
" out = f.outputs[0].d\n",
"\n",
" kernel = f.info.args['kernel']\n",
" stride = f.info.args['stride']\n",
" ignore_border = f.info.args['ignore_border']\n",
" pad = f.info.args['pad']\n",
"\n",
" stagetype = StageType.average_pooling if f.info.type_name == 'AveragePooling' else StageType.max_pooling\n",
" \n",
" node = NetworkStage(\n",
" layer_name, layer_of_vars(inputs), StorageOrder.orderYXZ,\n",
" pad[0], pad[1], PadStyle.tfvalid if ignore_border else PadStyle.caffe,\n",
" DataType.fp16, DataType.fp16,\n",
" stagetype,\n",
" kernel[0], kernel[1],\n",
" stride[0], stride[1],\n",
" inp.shape[2], inp.shape[3], inp.shape[1],\n",
" kernel[0], kernel[1], out.shape[1], # taps\n",
" None,\n",
" TapsOrder.orderKCHW,\n",
" None, \n",
" None,\n",
" None,\n",
" None,\n",
" 0, 0,\n",
" myriad_config=myriad_config, \n",
" args=storage_arg)\n",
" net.attach(node)\n",
" \n",
" elif f.info.type_name == 'Sigmoid' or f.info.type_name == 'Tanh':\n",
" inp = f.inputs[0].d\n",
" out = f.outputs[0].d\n",
" opParams = np.array([1], dtype=np.dtype(\"<i4\")) \n",
" \n",
" stagetype = StageType.tanh if f.info.type_name == 'Tanh' else StageType.sigmoid\n",
"\n",
" node = NetworkStage(\n",
" layer_name, layer_of_vars(inputs), StorageOrder.orderYXZ,\n",
" 0, 0, PadStyle.none,\n",
" DataType.fp16, DataType.fp16,\n",
" stagetype,\n",
" 1, 1,\n",
" 1, 1,\n",
" inp.shape[2], inp.shape[3], inp.shape[1],\n",
" 1, 1, inp.shape[1],\n",
" None,\n",
" None,\n",
" None,\n",
" None,\n",
" None,\n",
" None,\n",
" 0, 0,\n",
" myriad_config=myriad_config,\n",
" args=storage_arg)\n",
" net.attach(node)\n",
" output_var.visit(visit)\n",
" return layers, net\n",
"g, net = to_ncs(test_x, test_y)\n",
"g, net"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:21.052129Z",
"start_time": "2018-04-16T04:08:19.883205Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"USB: Transferring Data...\n",
"\u001b[94mTime to Execute : 3.28 ms\u001b[39m\n",
"USB: Myriad Execution Finished\n",
"\u001b[94mTime to Execute : 2.93 ms\u001b[39m\n",
"USB: Myriad Execution Finished\n",
"USB: Myriad Connection Closing.\n",
"USB: Myriad Connection Closed.\n",
"Network Summary\n",
"\n",
"Detailed Per Layer Profile\n",
" Bandwidth time\n",
"# Name MFLOPs (MB/s) (ms)\n",
"===============================================================================\n",
"0 MaxPooling_1 0.0 5.5 0.027\n",
"1 Tanh_1 0.0 1.2 0.023\n",
"-------------------------------------------------------------------------------\n",
" Total inference time 0.05\n",
"-------------------------------------------------------------------------------\n",
"Generating Profile Report 'output_report.html'...\n"
]
}
],
"source": [
"from Views.Graphs import generate_graphviz\n",
"from Views.Summary import print_summary_of_network\n",
"from Controllers.MiscIO import run_emulation, run_myriad\n",
"\n",
"class Arguments:\n",
" def __init__(self, network):\n",
" self.network = network\n",
" self.device_no = None\n",
" self.save_input = open(\"input\", \"wb\")\n",
" self.save_output = open(\"output\", \"wb\")\n",
" self.parser = Parser.TensorFlow\n",
" \n",
" self.input_node_name = \"Input\"\n",
" self.output_node_name = \"Convolution_1\"\n",
" self.image = 'Debug'\n",
" self.raw_scale = 1\n",
" self.mean = None\n",
" self.channel_swap = None\n",
" self.explicit_concat = False\n",
" self.acm = 0\n",
" self.timer = True\n",
" self.number_of_iterations = 1\n",
" self.upper_temperature_limit = -1\n",
" self.lower_temperature_limit = -1\n",
" self.backoff_time_normal = -1\n",
" self.backoff_time_high = -1\n",
" self.backoff_time_critical = -1\n",
" self.temperature_mode = 'Simple'\n",
" self.network_level_throttling = 1\n",
" self.stress_full_run = 2\n",
" self.stress_usblink_write = 1\n",
" self.stress_usblink_read = 1\n",
" self.debug_readX = 100\n",
" self.mode = 'profile'\n",
" self.outputs_name = 'output'\n",
" \n",
"args = Arguments(net)\n",
"myriad_config = MyriadParam(0, 0)\n",
"outfile = \"graph\"\n",
"major_version = np.uint32(2)\n",
"graph_file = Blob(major_version, net.name, '', myriad_config, net, outfile)\n",
"net.finalize()\n",
"net.optimize()\n",
"graph_file.generate()\n",
"timings, myriad_output = run_myriad(graph_file, args, file_gen=False)\n",
"net.gather_metrics(timings)\n",
"print_summary_of_network(graph_file)\n",
"generate_graphviz(net, graph_file, filename=\"output\")"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:22.088187Z",
"start_time": "2018-04-16T04:08:21.053876Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
">>>> xyz_to_zyx (3, 5, 5) 0 0 0\n"
]
}
],
"source": [
"from mvnc import mvncapi as mvnc\n",
"\n",
"devices = mvnc.EnumerateDevices()\n",
"\n",
"device = mvnc.Device(devices[0])\n",
"device.OpenDevice()\n",
"\n",
"with open(\"graph\", mode='rb') as f:\n",
" graphfile = f.read()\n",
"\n",
"graph = device.AllocateGraph(graphfile)\n",
"\n",
"img = test_x.d[0]\n",
"\n",
"\n",
"img_zyx = xyz_to_zyx(img)\n",
"graph.LoadTensor(img.astype(np.float16), 'user object')\n",
"output, userobj = graph.GetResult()\n",
"\n",
"graph.LoadTensor(img_zyx.astype(np.float16), 'user object')\n",
"output_zyx, userobj = graph.GetResult()\n",
"\n",
"graph.DeallocateGraph()\n",
"device.CloseDevice()"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:22.099483Z",
"start_time": "2018-04-16T04:08:22.093223Z"
}
},
"outputs": [],
"source": [
"## Reshape output_zyx\n",
"if len(test_y.shape) == 4:\n",
" output_zyx = output_zyx.reshape(*list(reversed(test_y.shape[1:]))).transpose(2, 1, 0)\n",
"else:\n",
" output_zyx = output_zyx.reshape(test_y.shape)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"ExecuteTime": {
"end_time": "2018-04-16T04:08:22.202084Z",
"start_time": "2018-04-16T04:08:22.101047Z"
},
"scrolled": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Referecne output:\n",
"[-0.29135731 -0.09964382 0.60449171 0.71625036 0.97570568 0.98368752\n",
" 0.99667722 0.99777669 0.9998343 0.99988878 0.99997759 0.99998492]\n",
"Wrong output:\n",
"[ 0.71630859 0.76123047 0.80029297 0.90527344 0.92138672 0.93554688\n",
" 1. 1. 1. 1. 1. 1. ]\n",
"Correct output:\n",
"[-0.29125977 -0.09960938 0.60449219 0.71630859 0.97558594 0.98388672\n",
" 0.99658203 0.99755859 1. 1. 1. 1. ]\n"
]
}
],
"source": [
"print(\"Referecne output:\")\n",
"print(test_y.d.reshape(np.prod(test_y.shape)))\n",
"print(\"Wrong output:\")\n",
"print(output)\n",
"print(\"Correct output:\")\n",
"print(output_zyx.reshape(np.prod(test_y.shape)))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment