Skip to content

Instantly share code, notes, and snippets.

@oiacrasec
Created May 5, 2019 20:19
Show Gist options
  • Save oiacrasec/9c62b8fc01b06973a00b5f19ccec7e1f to your computer and use it in GitHub Desktop.
Save oiacrasec/9c62b8fc01b06973a00b5f19ccec7e1f to your computer and use it in GitHub Desktop.
Keras-MLP.ipynb
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Keras-MLP.ipynb",
"version": "0.3.2",
"provenance": [],
"include_colab_link": true
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.13"
},
"kernelspec": {
"name": "python2",
"display_name": "Python 2"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/oiacrasec/9c62b8fc01b06973a00b5f19ccec7e1f/keras-mlp.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "Bep1SgHLK-Gl",
"colab_type": "code",
"colab": {}
},
"source": [
"%matplotlib inline"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "vRwuD0XyK-Go",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 204
},
"outputId": "91cbc722-aa74-4173-a54f-fe5dd1e765f7"
},
"source": [
"import numpy as np\n",
"import pandas as pd\n",
"import seaborn as sns\n",
"# a versao corrente do seaborn gera muito warnings que iremos ignorar\n",
"import warnings\n",
"warnings.filterwarnings(\"ignore\")\n",
"import matplotlib.pyplot as plt\n",
"import keras\n",
"from keras.wrappers.scikit_learn import KerasClassifier\n",
"\n",
"sns.set(style=\"white\", color_codes=True)\n",
"data = pd.read_csv(\"IRIS.csv\")\n",
"data.head()"
],
"execution_count": 219,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>sepal_length</th>\n",
" <th>sepal_width</th>\n",
" <th>petal_length</th>\n",
" <th>petal_width</th>\n",
" <th>species</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>5.1</td>\n",
" <td>3.5</td>\n",
" <td>1.4</td>\n",
" <td>0.2</td>\n",
" <td>Iris-setosa</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>4.9</td>\n",
" <td>3.0</td>\n",
" <td>1.4</td>\n",
" <td>0.2</td>\n",
" <td>Iris-setosa</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>4.7</td>\n",
" <td>3.2</td>\n",
" <td>1.3</td>\n",
" <td>0.2</td>\n",
" <td>Iris-setosa</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4.6</td>\n",
" <td>3.1</td>\n",
" <td>1.5</td>\n",
" <td>0.2</td>\n",
" <td>Iris-setosa</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5.0</td>\n",
" <td>3.6</td>\n",
" <td>1.4</td>\n",
" <td>0.2</td>\n",
" <td>Iris-setosa</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" sepal_length sepal_width petal_length petal_width species\n",
"0 5.1 3.5 1.4 0.2 Iris-setosa\n",
"1 4.9 3.0 1.4 0.2 Iris-setosa\n",
"2 4.7 3.2 1.3 0.2 Iris-setosa\n",
"3 4.6 3.1 1.5 0.2 Iris-setosa\n",
"4 5.0 3.6 1.4 0.2 Iris-setosa"
]
},
"metadata": {
"tags": []
},
"execution_count": 219
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "eiUtA4GXK-Gs",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 204
},
"outputId": "f7863550-ccec-4d94-9435-880d4d2b7e02"
},
"source": [
"# Alterando string especie para inteiro que representa especie\n",
"data.set_value(data['species']=='Iris-setosa',['species'],0)\n",
"data.set_value(data['species']=='Iris-versicolor',['species'],1)\n",
"data.set_value(data['species']=='Iris-virginica',['species'],2)\n",
"\n",
"data = data.apply(pd.to_numeric)\n",
"data.head()"
],
"execution_count": 220,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>sepal_length</th>\n",
" <th>sepal_width</th>\n",
" <th>petal_length</th>\n",
" <th>petal_width</th>\n",
" <th>species</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>5.1</td>\n",
" <td>3.5</td>\n",
" <td>1.4</td>\n",
" <td>0.2</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>4.9</td>\n",
" <td>3.0</td>\n",
" <td>1.4</td>\n",
" <td>0.2</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>4.7</td>\n",
" <td>3.2</td>\n",
" <td>1.3</td>\n",
" <td>0.2</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4.6</td>\n",
" <td>3.1</td>\n",
" <td>1.5</td>\n",
" <td>0.2</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5.0</td>\n",
" <td>3.6</td>\n",
" <td>1.4</td>\n",
" <td>0.2</td>\n",
" <td>0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" sepal_length sepal_width petal_length petal_width species\n",
"0 5.1 3.5 1.4 0.2 0\n",
"1 4.9 3.0 1.4 0.2 0\n",
"2 4.7 3.2 1.3 0.2 0\n",
"3 4.6 3.1 1.5 0.2 0\n",
"4 5.0 3.6 1.4 0.2 0"
]
},
"metadata": {
"tags": []
},
"execution_count": 220
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "lXtTPia9K-Gv",
"colab_type": "code",
"colab": {}
},
"source": [
"# Dividindo o dataset em conjunto de treino e conjunto de teste\n",
"\n",
"train=data.sample(frac=0.8,random_state=200)\n",
"test=data.drop(train.index)\n",
"\n",
"X_train = train[['sepal_length', 'sepal_width', 'petal_length', 'petal_width']]\n",
"X_test = test[['sepal_length', 'sepal_width', 'petal_length', 'petal_width']]\n",
"y_train = train['species']\n",
"y_test = test['species']\n",
"\n",
"X_train_array = X_train.as_matrix()\n",
"X_train = X_train_array\n",
"\n",
"#print(X_train)\n",
"#print(y_train)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "WoEBcciHK-Gy",
"colab_type": "code",
"colab": {}
},
"source": [
"# Encodando Variáveis Categóricas\n",
"\n",
"from keras.utils import np_utils\n",
"y_train = np_utils.to_categorical(y_train)\n",
"#print(y_train)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "cNY32uLlK-G1",
"colab_type": "code",
"colab": {}
},
"source": [
"# Construindo o modelo no keras\n",
"\n",
"\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Activation\n",
"\n",
"# Inicializando a rede neural\n",
"model = Sequential()\n",
"# Adicionando a camada de entradas e a primeira camada oculta\n",
"model.add(Dense(output_dim=10, input_dim=4))\n",
"model.add(Activation(\"relu\"))\n",
"# Adicionando a segunda camada oculta\n",
"model.add(Dense(output_dim=10))\n",
"model.add(Activation(\"relu\"))\n",
"# Adicionando a camada de saída\n",
"model.add(Dense(output_dim=3))\n",
"model.add(Activation(\"softmax\"))"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "RaSEOeS9K-G4",
"colab_type": "code",
"colab": {}
},
"source": [
"# Configura o processo de aprendizado com o algorítimo de otimização \n",
"#(para achar os melhores pesos), a função de perda e as métricas de acuracia\n",
"model.compile(\n",
" loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']\n",
")"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "WPEZCR9FK-G8",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 17034
},
"outputId": "f22d5ad7-a925-40ca-906b-f7db9d406d8a"
},
"source": [
"# Treino do modelo\n",
"epochs = 500\n",
"history = model.fit(\n",
" X_train, y_train, nb_epoch=epochs, batch_size=120, validation_split=0.33\n",
")"
],
"execution_count": 225,
"outputs": [
{
"output_type": "stream",
"text": [
"Train on 80 samples, validate on 40 samples\n",
"Epoch 1/500\n",
"80/80 [==============================] - 1s 9ms/step - loss: 1.4571 - acc: 0.3750 - val_loss: 1.5552 - val_acc: 0.3000\n",
"Epoch 2/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 1.4416 - acc: 0.3750 - val_loss: 1.5363 - val_acc: 0.3000\n",
"Epoch 3/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 1.4265 - acc: 0.3750 - val_loss: 1.5180 - val_acc: 0.3000\n",
"Epoch 4/500\n",
"80/80 [==============================] - 0s 77us/step - loss: 1.4119 - acc: 0.3750 - val_loss: 1.5001 - val_acc: 0.3000\n",
"Epoch 5/500\n",
"80/80 [==============================] - 0s 64us/step - loss: 1.3977 - acc: 0.3750 - val_loss: 1.4827 - val_acc: 0.3000\n",
"Epoch 6/500\n",
"80/80 [==============================] - 0s 62us/step - loss: 1.3839 - acc: 0.3750 - val_loss: 1.4659 - val_acc: 0.3000\n",
"Epoch 7/500\n",
"80/80 [==============================] - 0s 69us/step - loss: 1.3706 - acc: 0.3750 - val_loss: 1.4495 - val_acc: 0.3000\n",
"Epoch 8/500\n",
"80/80 [==============================] - 0s 64us/step - loss: 1.3577 - acc: 0.3750 - val_loss: 1.4336 - val_acc: 0.3000\n",
"Epoch 9/500\n",
"80/80 [==============================] - 0s 66us/step - loss: 1.3453 - acc: 0.3750 - val_loss: 1.4183 - val_acc: 0.3000\n",
"Epoch 10/500\n",
"80/80 [==============================] - 0s 56us/step - loss: 1.3333 - acc: 0.3750 - val_loss: 1.4034 - val_acc: 0.3000\n",
"Epoch 11/500\n",
"80/80 [==============================] - 0s 70us/step - loss: 1.3217 - acc: 0.3750 - val_loss: 1.3889 - val_acc: 0.3000\n",
"Epoch 12/500\n",
"80/80 [==============================] - 0s 66us/step - loss: 1.3104 - acc: 0.3750 - val_loss: 1.3749 - val_acc: 0.3000\n",
"Epoch 13/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 1.2996 - acc: 0.3750 - val_loss: 1.3613 - val_acc: 0.3000\n",
"Epoch 14/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 1.2890 - acc: 0.3750 - val_loss: 1.3481 - val_acc: 0.3000\n",
"Epoch 15/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 1.2792 - acc: 0.3750 - val_loss: 1.3360 - val_acc: 0.3000\n",
"Epoch 16/500\n",
"80/80 [==============================] - 0s 80us/step - loss: 1.2706 - acc: 0.3750 - val_loss: 1.3247 - val_acc: 0.3000\n",
"Epoch 17/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 1.2621 - acc: 0.3750 - val_loss: 1.3137 - val_acc: 0.3000\n",
"Epoch 18/500\n",
"80/80 [==============================] - 0s 88us/step - loss: 1.2538 - acc: 0.3750 - val_loss: 1.3031 - val_acc: 0.3000\n",
"Epoch 19/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 1.2459 - acc: 0.3750 - val_loss: 1.2926 - val_acc: 0.3000\n",
"Epoch 20/500\n",
"80/80 [==============================] - 0s 52us/step - loss: 1.2382 - acc: 0.3750 - val_loss: 1.2833 - val_acc: 0.3000\n",
"Epoch 21/500\n",
"80/80 [==============================] - 0s 61us/step - loss: 1.2310 - acc: 0.3750 - val_loss: 1.2745 - val_acc: 0.3000\n",
"Epoch 22/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 1.2242 - acc: 0.3750 - val_loss: 1.2660 - val_acc: 0.3000\n",
"Epoch 23/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 1.2176 - acc: 0.3750 - val_loss: 1.2579 - val_acc: 0.3000\n",
"Epoch 24/500\n",
"80/80 [==============================] - 0s 65us/step - loss: 1.2114 - acc: 0.3750 - val_loss: 1.2497 - val_acc: 0.3000\n",
"Epoch 25/500\n",
"80/80 [==============================] - 0s 79us/step - loss: 1.2053 - acc: 0.3750 - val_loss: 1.2416 - val_acc: 0.3000\n",
"Epoch 26/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 1.1992 - acc: 0.3750 - val_loss: 1.2335 - val_acc: 0.3000\n",
"Epoch 27/500\n",
"80/80 [==============================] - 0s 59us/step - loss: 1.1931 - acc: 0.3750 - val_loss: 1.2253 - val_acc: 0.3000\n",
"Epoch 28/500\n",
"80/80 [==============================] - 0s 79us/step - loss: 1.1867 - acc: 0.3750 - val_loss: 1.2172 - val_acc: 0.3000\n",
"Epoch 29/500\n",
"80/80 [==============================] - 0s 84us/step - loss: 1.1802 - acc: 0.3750 - val_loss: 1.2090 - val_acc: 0.3000\n",
"Epoch 30/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 1.1737 - acc: 0.3750 - val_loss: 1.2011 - val_acc: 0.3000\n",
"Epoch 31/500\n",
"80/80 [==============================] - 0s 59us/step - loss: 1.1670 - acc: 0.3750 - val_loss: 1.1936 - val_acc: 0.3000\n",
"Epoch 32/500\n",
"80/80 [==============================] - 0s 68us/step - loss: 1.1603 - acc: 0.3750 - val_loss: 1.1864 - val_acc: 0.3000\n",
"Epoch 33/500\n",
"80/80 [==============================] - 0s 122us/step - loss: 1.1536 - acc: 0.3750 - val_loss: 1.1796 - val_acc: 0.3000\n",
"Epoch 34/500\n",
"80/80 [==============================] - 0s 69us/step - loss: 1.1470 - acc: 0.3750 - val_loss: 1.1732 - val_acc: 0.3000\n",
"Epoch 35/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 1.1406 - acc: 0.3750 - val_loss: 1.1672 - val_acc: 0.3000\n",
"Epoch 36/500\n",
"80/80 [==============================] - 0s 131us/step - loss: 1.1343 - acc: 0.3750 - val_loss: 1.1612 - val_acc: 0.3000\n",
"Epoch 37/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 1.1283 - acc: 0.3750 - val_loss: 1.1557 - val_acc: 0.3000\n",
"Epoch 38/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 1.1225 - acc: 0.3750 - val_loss: 1.1504 - val_acc: 0.3000\n",
"Epoch 39/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 1.1167 - acc: 0.3750 - val_loss: 1.1450 - val_acc: 0.3000\n",
"Epoch 40/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 1.1111 - acc: 0.3750 - val_loss: 1.1395 - val_acc: 0.3000\n",
"Epoch 41/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 1.1055 - acc: 0.3750 - val_loss: 1.1335 - val_acc: 0.3000\n",
"Epoch 42/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 1.0998 - acc: 0.3750 - val_loss: 1.1273 - val_acc: 0.3000\n",
"Epoch 43/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 1.0941 - acc: 0.3750 - val_loss: 1.1214 - val_acc: 0.3000\n",
"Epoch 44/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 1.0886 - acc: 0.3750 - val_loss: 1.1154 - val_acc: 0.3000\n",
"Epoch 45/500\n",
"80/80 [==============================] - 0s 71us/step - loss: 1.0832 - acc: 0.3750 - val_loss: 1.1095 - val_acc: 0.3000\n",
"Epoch 46/500\n",
"80/80 [==============================] - 0s 69us/step - loss: 1.0780 - acc: 0.3750 - val_loss: 1.1038 - val_acc: 0.3000\n",
"Epoch 47/500\n",
"80/80 [==============================] - 0s 108us/step - loss: 1.0730 - acc: 0.3750 - val_loss: 1.0982 - val_acc: 0.3000\n",
"Epoch 48/500\n",
"80/80 [==============================] - 0s 69us/step - loss: 1.0681 - acc: 0.3750 - val_loss: 1.0926 - val_acc: 0.3000\n",
"Epoch 49/500\n",
"80/80 [==============================] - 0s 71us/step - loss: 1.0634 - acc: 0.3750 - val_loss: 1.0871 - val_acc: 0.3000\n",
"Epoch 50/500\n",
"80/80 [==============================] - 0s 67us/step - loss: 1.0586 - acc: 0.3750 - val_loss: 1.0818 - val_acc: 0.3000\n",
"Epoch 51/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 1.0539 - acc: 0.3750 - val_loss: 1.0765 - val_acc: 0.3000\n",
"Epoch 52/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 1.0492 - acc: 0.3750 - val_loss: 1.0711 - val_acc: 0.3000\n",
"Epoch 53/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 1.0445 - acc: 0.3750 - val_loss: 1.0657 - val_acc: 0.3000\n",
"Epoch 54/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 1.0396 - acc: 0.3875 - val_loss: 1.0602 - val_acc: 0.3000\n",
"Epoch 55/500\n",
"80/80 [==============================] - 0s 73us/step - loss: 1.0348 - acc: 0.4000 - val_loss: 1.0547 - val_acc: 0.3250\n",
"Epoch 56/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 1.0301 - acc: 0.4125 - val_loss: 1.0491 - val_acc: 0.3250\n",
"Epoch 57/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 1.0254 - acc: 0.4750 - val_loss: 1.0435 - val_acc: 0.3250\n",
"Epoch 58/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 1.0207 - acc: 0.5250 - val_loss: 1.0379 - val_acc: 0.3750\n",
"Epoch 59/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 1.0160 - acc: 0.5500 - val_loss: 1.0324 - val_acc: 0.4000\n",
"Epoch 60/500\n",
"80/80 [==============================] - 0s 71us/step - loss: 1.0113 - acc: 0.5625 - val_loss: 1.0269 - val_acc: 0.4750\n",
"Epoch 61/500\n",
"80/80 [==============================] - 0s 80us/step - loss: 1.0066 - acc: 0.5875 - val_loss: 1.0215 - val_acc: 0.5000\n",
"Epoch 62/500\n",
"80/80 [==============================] - 0s 57us/step - loss: 1.0019 - acc: 0.6000 - val_loss: 1.0160 - val_acc: 0.5500\n",
"Epoch 63/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.9971 - acc: 0.6125 - val_loss: 1.0106 - val_acc: 0.5750\n",
"Epoch 64/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.9922 - acc: 0.6250 - val_loss: 1.0051 - val_acc: 0.6000\n",
"Epoch 65/500\n",
"80/80 [==============================] - 0s 114us/step - loss: 0.9873 - acc: 0.6375 - val_loss: 0.9997 - val_acc: 0.6750\n",
"Epoch 66/500\n",
"80/80 [==============================] - 0s 84us/step - loss: 0.9824 - acc: 0.6500 - val_loss: 0.9943 - val_acc: 0.6750\n",
"Epoch 67/500\n",
"80/80 [==============================] - 0s 129us/step - loss: 0.9774 - acc: 0.6625 - val_loss: 0.9888 - val_acc: 0.6750\n",
"Epoch 68/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 0.9724 - acc: 0.6875 - val_loss: 0.9834 - val_acc: 0.6750\n",
"Epoch 69/500\n",
"80/80 [==============================] - 0s 63us/step - loss: 0.9673 - acc: 0.7000 - val_loss: 0.9780 - val_acc: 0.6750\n",
"Epoch 70/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 0.9623 - acc: 0.7000 - val_loss: 0.9727 - val_acc: 0.6750\n",
"Epoch 71/500\n",
"80/80 [==============================] - 0s 79us/step - loss: 0.9573 - acc: 0.7125 - val_loss: 0.9673 - val_acc: 0.6750\n",
"Epoch 72/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.9523 - acc: 0.7125 - val_loss: 0.9620 - val_acc: 0.7000\n",
"Epoch 73/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.9473 - acc: 0.7125 - val_loss: 0.9567 - val_acc: 0.6750\n",
"Epoch 74/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 0.9423 - acc: 0.7250 - val_loss: 0.9514 - val_acc: 0.6750\n",
"Epoch 75/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.9373 - acc: 0.7250 - val_loss: 0.9461 - val_acc: 0.6750\n",
"Epoch 76/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.9322 - acc: 0.7625 - val_loss: 0.9409 - val_acc: 0.6750\n",
"Epoch 77/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.9272 - acc: 0.7625 - val_loss: 0.9356 - val_acc: 0.6750\n",
"Epoch 78/500\n",
"80/80 [==============================] - 0s 104us/step - loss: 0.9222 - acc: 0.7625 - val_loss: 0.9305 - val_acc: 0.6750\n",
"Epoch 79/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.9172 - acc: 0.7625 - val_loss: 0.9253 - val_acc: 0.6750\n",
"Epoch 80/500\n",
"80/80 [==============================] - 0s 55us/step - loss: 0.9121 - acc: 0.7625 - val_loss: 0.9202 - val_acc: 0.6750\n",
"Epoch 81/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.9071 - acc: 0.7625 - val_loss: 0.9152 - val_acc: 0.6750\n",
"Epoch 82/500\n",
"80/80 [==============================] - 0s 77us/step - loss: 0.9020 - acc: 0.7625 - val_loss: 0.9102 - val_acc: 0.6750\n",
"Epoch 83/500\n",
"80/80 [==============================] - 0s 69us/step - loss: 0.8970 - acc: 0.7500 - val_loss: 0.9053 - val_acc: 0.6750\n",
"Epoch 84/500\n",
"80/80 [==============================] - 0s 231us/step - loss: 0.8920 - acc: 0.7500 - val_loss: 0.9003 - val_acc: 0.6750\n",
"Epoch 85/500\n",
"80/80 [==============================] - 0s 188us/step - loss: 0.8870 - acc: 0.7500 - val_loss: 0.8954 - val_acc: 0.6750\n",
"Epoch 86/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.8821 - acc: 0.7375 - val_loss: 0.8905 - val_acc: 0.6750\n",
"Epoch 87/500\n",
"80/80 [==============================] - 0s 67us/step - loss: 0.8771 - acc: 0.7375 - val_loss: 0.8856 - val_acc: 0.6750\n",
"Epoch 88/500\n",
"80/80 [==============================] - 0s 86us/step - loss: 0.8722 - acc: 0.7500 - val_loss: 0.8808 - val_acc: 0.6750\n",
"Epoch 89/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.8673 - acc: 0.7500 - val_loss: 0.8759 - val_acc: 0.6750\n",
"Epoch 90/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.8623 - acc: 0.7500 - val_loss: 0.8711 - val_acc: 0.6750\n",
"Epoch 91/500\n",
"80/80 [==============================] - 0s 151us/step - loss: 0.8574 - acc: 0.7500 - val_loss: 0.8663 - val_acc: 0.6750\n",
"Epoch 92/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.8525 - acc: 0.7500 - val_loss: 0.8615 - val_acc: 0.6750\n",
"Epoch 93/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 0.8476 - acc: 0.7500 - val_loss: 0.8568 - val_acc: 0.6750\n",
"Epoch 94/500\n",
"80/80 [==============================] - 0s 57us/step - loss: 0.8427 - acc: 0.7500 - val_loss: 0.8521 - val_acc: 0.6750\n",
"Epoch 95/500\n",
"80/80 [==============================] - 0s 68us/step - loss: 0.8378 - acc: 0.7500 - val_loss: 0.8474 - val_acc: 0.6750\n",
"Epoch 96/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.8329 - acc: 0.7500 - val_loss: 0.8427 - val_acc: 0.6750\n",
"Epoch 97/500\n",
"80/80 [==============================] - 0s 131us/step - loss: 0.8280 - acc: 0.7500 - val_loss: 0.8380 - val_acc: 0.6750\n",
"Epoch 98/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.8231 - acc: 0.7500 - val_loss: 0.8334 - val_acc: 0.6750\n",
"Epoch 99/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.8182 - acc: 0.7500 - val_loss: 0.8287 - val_acc: 0.6750\n",
"Epoch 100/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.8133 - acc: 0.7500 - val_loss: 0.8241 - val_acc: 0.6750\n",
"Epoch 101/500\n",
"80/80 [==============================] - 0s 68us/step - loss: 0.8084 - acc: 0.7500 - val_loss: 0.8194 - val_acc: 0.6750\n",
"Epoch 102/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.8034 - acc: 0.7500 - val_loss: 0.8148 - val_acc: 0.6750\n",
"Epoch 103/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 0.7985 - acc: 0.7500 - val_loss: 0.8102 - val_acc: 0.6750\n",
"Epoch 104/500\n",
"80/80 [==============================] - 0s 65us/step - loss: 0.7936 - acc: 0.7500 - val_loss: 0.8057 - val_acc: 0.6750\n",
"Epoch 105/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.7887 - acc: 0.7500 - val_loss: 0.8011 - val_acc: 0.6750\n",
"Epoch 106/500\n",
"80/80 [==============================] - 0s 117us/step - loss: 0.7838 - acc: 0.7500 - val_loss: 0.7966 - val_acc: 0.6750\n",
"Epoch 107/500\n",
"80/80 [==============================] - 0s 62us/step - loss: 0.7788 - acc: 0.7500 - val_loss: 0.7920 - val_acc: 0.6750\n",
"Epoch 108/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.7739 - acc: 0.7500 - val_loss: 0.7875 - val_acc: 0.6750\n",
"Epoch 109/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.7689 - acc: 0.7500 - val_loss: 0.7830 - val_acc: 0.6750\n",
"Epoch 110/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 0.7640 - acc: 0.7500 - val_loss: 0.7785 - val_acc: 0.6750\n",
"Epoch 111/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.7591 - acc: 0.7500 - val_loss: 0.7739 - val_acc: 0.6750\n",
"Epoch 112/500\n",
"80/80 [==============================] - 0s 79us/step - loss: 0.7542 - acc: 0.7500 - val_loss: 0.7693 - val_acc: 0.6750\n",
"Epoch 113/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.7492 - acc: 0.7500 - val_loss: 0.7648 - val_acc: 0.6750\n",
"Epoch 114/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.7443 - acc: 0.7500 - val_loss: 0.7602 - val_acc: 0.6750\n",
"Epoch 115/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.7393 - acc: 0.7500 - val_loss: 0.7557 - val_acc: 0.6750\n",
"Epoch 116/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.7344 - acc: 0.7500 - val_loss: 0.7511 - val_acc: 0.6750\n",
"Epoch 117/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 0.7294 - acc: 0.7500 - val_loss: 0.7466 - val_acc: 0.6750\n",
"Epoch 118/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.7245 - acc: 0.7500 - val_loss: 0.7421 - val_acc: 0.6750\n",
"Epoch 119/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.7195 - acc: 0.7500 - val_loss: 0.7376 - val_acc: 0.6750\n",
"Epoch 120/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.7146 - acc: 0.7500 - val_loss: 0.7330 - val_acc: 0.6750\n",
"Epoch 121/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.7096 - acc: 0.7500 - val_loss: 0.7285 - val_acc: 0.6750\n",
"Epoch 122/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.7047 - acc: 0.7500 - val_loss: 0.7239 - val_acc: 0.6750\n",
"Epoch 123/500\n",
"80/80 [==============================] - 0s 77us/step - loss: 0.6999 - acc: 0.7500 - val_loss: 0.7194 - val_acc: 0.6750\n",
"Epoch 124/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 0.6951 - acc: 0.7500 - val_loss: 0.7149 - val_acc: 0.6750\n",
"Epoch 125/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.6903 - acc: 0.7500 - val_loss: 0.7103 - val_acc: 0.7000\n",
"Epoch 126/500\n",
"80/80 [==============================] - 0s 81us/step - loss: 0.6856 - acc: 0.7500 - val_loss: 0.7059 - val_acc: 0.7250\n",
"Epoch 127/500\n",
"80/80 [==============================] - 0s 143us/step - loss: 0.6810 - acc: 0.7500 - val_loss: 0.7015 - val_acc: 0.7250\n",
"Epoch 128/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.6763 - acc: 0.7500 - val_loss: 0.6971 - val_acc: 0.7250\n",
"Epoch 129/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.6718 - acc: 0.7500 - val_loss: 0.6929 - val_acc: 0.7250\n",
"Epoch 130/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.6673 - acc: 0.7625 - val_loss: 0.6887 - val_acc: 0.7250\n",
"Epoch 131/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.6628 - acc: 0.7625 - val_loss: 0.6846 - val_acc: 0.7250\n",
"Epoch 132/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.6585 - acc: 0.7625 - val_loss: 0.6806 - val_acc: 0.7500\n",
"Epoch 133/500\n",
"80/80 [==============================] - 0s 182us/step - loss: 0.6542 - acc: 0.7625 - val_loss: 0.6767 - val_acc: 0.7500\n",
"Epoch 134/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.6500 - acc: 0.7625 - val_loss: 0.6728 - val_acc: 0.7500\n",
"Epoch 135/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 0.6459 - acc: 0.7750 - val_loss: 0.6690 - val_acc: 0.7500\n",
"Epoch 136/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.6418 - acc: 0.7750 - val_loss: 0.6652 - val_acc: 0.7500\n",
"Epoch 137/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.6378 - acc: 0.8000 - val_loss: 0.6615 - val_acc: 0.7500\n",
"Epoch 138/500\n",
"80/80 [==============================] - 0s 117us/step - loss: 0.6340 - acc: 0.8250 - val_loss: 0.6578 - val_acc: 0.7500\n",
"Epoch 139/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 0.6302 - acc: 0.8250 - val_loss: 0.6542 - val_acc: 0.7500\n",
"Epoch 140/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.6265 - acc: 0.8250 - val_loss: 0.6505 - val_acc: 0.7500\n",
"Epoch 141/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 0.6229 - acc: 0.8250 - val_loss: 0.6469 - val_acc: 0.7750\n",
"Epoch 142/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.6193 - acc: 0.8250 - val_loss: 0.6434 - val_acc: 0.7750\n",
"Epoch 143/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.6157 - acc: 0.8250 - val_loss: 0.6399 - val_acc: 0.7750\n",
"Epoch 144/500\n",
"80/80 [==============================] - 0s 104us/step - loss: 0.6122 - acc: 0.8250 - val_loss: 0.6364 - val_acc: 0.7750\n",
"Epoch 145/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.6088 - acc: 0.8375 - val_loss: 0.6330 - val_acc: 0.7750\n",
"Epoch 146/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.6053 - acc: 0.8375 - val_loss: 0.6297 - val_acc: 0.7750\n",
"Epoch 147/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.6020 - acc: 0.8500 - val_loss: 0.6264 - val_acc: 0.8000\n",
"Epoch 148/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.5987 - acc: 0.8500 - val_loss: 0.6232 - val_acc: 0.8000\n",
"Epoch 149/500\n",
"80/80 [==============================] - 0s 104us/step - loss: 0.5954 - acc: 0.8500 - val_loss: 0.6200 - val_acc: 0.8000\n",
"Epoch 150/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.5922 - acc: 0.8500 - val_loss: 0.6169 - val_acc: 0.8000\n",
"Epoch 151/500\n",
"80/80 [==============================] - 0s 121us/step - loss: 0.5890 - acc: 0.8625 - val_loss: 0.6138 - val_acc: 0.8000\n",
"Epoch 152/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.5859 - acc: 0.8625 - val_loss: 0.6107 - val_acc: 0.8000\n",
"Epoch 153/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.5828 - acc: 0.8625 - val_loss: 0.6077 - val_acc: 0.8000\n",
"Epoch 154/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.5797 - acc: 0.8625 - val_loss: 0.6048 - val_acc: 0.8000\n",
"Epoch 155/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.5767 - acc: 0.8625 - val_loss: 0.6019 - val_acc: 0.8000\n",
"Epoch 156/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.5737 - acc: 0.8625 - val_loss: 0.5991 - val_acc: 0.8000\n",
"Epoch 157/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.5707 - acc: 0.8625 - val_loss: 0.5963 - val_acc: 0.8000\n",
"Epoch 158/500\n",
"80/80 [==============================] - 0s 140us/step - loss: 0.5678 - acc: 0.8625 - val_loss: 0.5936 - val_acc: 0.8000\n",
"Epoch 159/500\n",
"80/80 [==============================] - 0s 116us/step - loss: 0.5650 - acc: 0.8625 - val_loss: 0.5909 - val_acc: 0.8000\n",
"Epoch 160/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.5621 - acc: 0.8625 - val_loss: 0.5883 - val_acc: 0.8000\n",
"Epoch 161/500\n",
"80/80 [==============================] - 0s 81us/step - loss: 0.5593 - acc: 0.8625 - val_loss: 0.5857 - val_acc: 0.8000\n",
"Epoch 162/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.5566 - acc: 0.8625 - val_loss: 0.5831 - val_acc: 0.8000\n",
"Epoch 163/500\n",
"80/80 [==============================] - 0s 145us/step - loss: 0.5538 - acc: 0.8625 - val_loss: 0.5806 - val_acc: 0.8000\n",
"Epoch 164/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.5511 - acc: 0.8750 - val_loss: 0.5781 - val_acc: 0.8000\n",
"Epoch 165/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.5484 - acc: 0.8875 - val_loss: 0.5756 - val_acc: 0.8000\n",
"Epoch 166/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.5458 - acc: 0.8875 - val_loss: 0.5731 - val_acc: 0.8000\n",
"Epoch 167/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.5432 - acc: 0.8875 - val_loss: 0.5706 - val_acc: 0.8000\n",
"Epoch 168/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.5406 - acc: 0.8875 - val_loss: 0.5682 - val_acc: 0.8000\n",
"Epoch 169/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.5380 - acc: 0.8875 - val_loss: 0.5658 - val_acc: 0.8000\n",
"Epoch 170/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.5355 - acc: 0.8875 - val_loss: 0.5634 - val_acc: 0.8000\n",
"Epoch 171/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.5330 - acc: 0.8875 - val_loss: 0.5610 - val_acc: 0.8000\n",
"Epoch 172/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.5305 - acc: 0.8875 - val_loss: 0.5586 - val_acc: 0.8250\n",
"Epoch 173/500\n",
"80/80 [==============================] - 0s 69us/step - loss: 0.5280 - acc: 0.8875 - val_loss: 0.5562 - val_acc: 0.8250\n",
"Epoch 174/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.5256 - acc: 0.8875 - val_loss: 0.5539 - val_acc: 0.8250\n",
"Epoch 175/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.5232 - acc: 0.8875 - val_loss: 0.5515 - val_acc: 0.8500\n",
"Epoch 176/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.5208 - acc: 0.8875 - val_loss: 0.5492 - val_acc: 0.8500\n",
"Epoch 177/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.5184 - acc: 0.8875 - val_loss: 0.5469 - val_acc: 0.8500\n",
"Epoch 178/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.5161 - acc: 0.8875 - val_loss: 0.5446 - val_acc: 0.8500\n",
"Epoch 179/500\n",
"80/80 [==============================] - 0s 192us/step - loss: 0.5138 - acc: 0.8875 - val_loss: 0.5423 - val_acc: 0.8500\n",
"Epoch 180/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.5115 - acc: 0.8875 - val_loss: 0.5401 - val_acc: 0.8500\n",
"Epoch 181/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.5092 - acc: 0.8875 - val_loss: 0.5379 - val_acc: 0.8500\n",
"Epoch 182/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 0.5069 - acc: 0.8875 - val_loss: 0.5357 - val_acc: 0.8500\n",
"Epoch 183/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.5047 - acc: 0.8875 - val_loss: 0.5336 - val_acc: 0.8500\n",
"Epoch 184/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.5025 - acc: 0.8875 - val_loss: 0.5314 - val_acc: 0.8500\n",
"Epoch 185/500\n",
"80/80 [==============================] - 0s 130us/step - loss: 0.5003 - acc: 0.8875 - val_loss: 0.5293 - val_acc: 0.8500\n",
"Epoch 186/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.4981 - acc: 0.8875 - val_loss: 0.5272 - val_acc: 0.8500\n",
"Epoch 187/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.4960 - acc: 0.8875 - val_loss: 0.5251 - val_acc: 0.8500\n",
"Epoch 188/500\n",
"80/80 [==============================] - 0s 122us/step - loss: 0.4939 - acc: 0.9000 - val_loss: 0.5231 - val_acc: 0.8750\n",
"Epoch 189/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.4917 - acc: 0.9000 - val_loss: 0.5210 - val_acc: 0.8750\n",
"Epoch 190/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.4897 - acc: 0.9125 - val_loss: 0.5190 - val_acc: 0.8750\n",
"Epoch 191/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.4876 - acc: 0.9125 - val_loss: 0.5170 - val_acc: 0.8750\n",
"Epoch 192/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 0.4855 - acc: 0.9125 - val_loss: 0.5150 - val_acc: 0.8750\n",
"Epoch 193/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.4835 - acc: 0.9125 - val_loss: 0.5130 - val_acc: 0.8750\n",
"Epoch 194/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 0.4815 - acc: 0.9125 - val_loss: 0.5111 - val_acc: 0.8750\n",
"Epoch 195/500\n",
"80/80 [==============================] - 0s 128us/step - loss: 0.4795 - acc: 0.9125 - val_loss: 0.5091 - val_acc: 0.8750\n",
"Epoch 196/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.4775 - acc: 0.9125 - val_loss: 0.5072 - val_acc: 0.8750\n",
"Epoch 197/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.4756 - acc: 0.9125 - val_loss: 0.5053 - val_acc: 0.8750\n",
"Epoch 198/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.4736 - acc: 0.9125 - val_loss: 0.5034 - val_acc: 0.8750\n",
"Epoch 199/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.4717 - acc: 0.9250 - val_loss: 0.5015 - val_acc: 0.8750\n",
"Epoch 200/500\n",
"80/80 [==============================] - 0s 125us/step - loss: 0.4698 - acc: 0.9250 - val_loss: 0.4996 - val_acc: 0.9000\n",
"Epoch 201/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.4679 - acc: 0.9250 - val_loss: 0.4978 - val_acc: 0.9000\n",
"Epoch 202/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.4661 - acc: 0.9250 - val_loss: 0.4959 - val_acc: 0.9000\n",
"Epoch 203/500\n",
"80/80 [==============================] - 0s 134us/step - loss: 0.4642 - acc: 0.9250 - val_loss: 0.4941 - val_acc: 0.9000\n",
"Epoch 204/500\n",
"80/80 [==============================] - 0s 183us/step - loss: 0.4624 - acc: 0.9250 - val_loss: 0.4923 - val_acc: 0.9000\n",
"Epoch 205/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.4606 - acc: 0.9250 - val_loss: 0.4905 - val_acc: 0.9000\n",
"Epoch 206/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.4588 - acc: 0.9250 - val_loss: 0.4887 - val_acc: 0.9000\n",
"Epoch 207/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.4570 - acc: 0.9250 - val_loss: 0.4870 - val_acc: 0.9000\n",
"Epoch 208/500\n",
"80/80 [==============================] - 0s 130us/step - loss: 0.4552 - acc: 0.9250 - val_loss: 0.4852 - val_acc: 0.9000\n",
"Epoch 209/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.4535 - acc: 0.9250 - val_loss: 0.4835 - val_acc: 0.9000\n",
"Epoch 210/500\n",
"80/80 [==============================] - 0s 108us/step - loss: 0.4517 - acc: 0.9250 - val_loss: 0.4817 - val_acc: 0.9000\n",
"Epoch 211/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.4500 - acc: 0.9250 - val_loss: 0.4800 - val_acc: 0.9000\n",
"Epoch 212/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.4483 - acc: 0.9250 - val_loss: 0.4783 - val_acc: 0.9000\n",
"Epoch 213/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 0.4466 - acc: 0.9250 - val_loss: 0.4766 - val_acc: 0.9000\n",
"Epoch 214/500\n",
"80/80 [==============================] - 0s 120us/step - loss: 0.4449 - acc: 0.9250 - val_loss: 0.4749 - val_acc: 0.9000\n",
"Epoch 215/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.4433 - acc: 0.9250 - val_loss: 0.4732 - val_acc: 0.9000\n",
"Epoch 216/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.4416 - acc: 0.9250 - val_loss: 0.4715 - val_acc: 0.9000\n",
"Epoch 217/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 0.4400 - acc: 0.9250 - val_loss: 0.4699 - val_acc: 0.9000\n",
"Epoch 218/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.4384 - acc: 0.9250 - val_loss: 0.4683 - val_acc: 0.9000\n",
"Epoch 219/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.4368 - acc: 0.9250 - val_loss: 0.4666 - val_acc: 0.9000\n",
"Epoch 220/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.4352 - acc: 0.9250 - val_loss: 0.4650 - val_acc: 0.9000\n",
"Epoch 221/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.4336 - acc: 0.9250 - val_loss: 0.4634 - val_acc: 0.9000\n",
"Epoch 222/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.4320 - acc: 0.9250 - val_loss: 0.4618 - val_acc: 0.9000\n",
"Epoch 223/500\n",
"80/80 [==============================] - 0s 144us/step - loss: 0.4305 - acc: 0.9250 - val_loss: 0.4602 - val_acc: 0.9000\n",
"Epoch 224/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.4289 - acc: 0.9250 - val_loss: 0.4586 - val_acc: 0.9000\n",
"Epoch 225/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.4274 - acc: 0.9250 - val_loss: 0.4571 - val_acc: 0.9250\n",
"Epoch 226/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.4258 - acc: 0.9250 - val_loss: 0.4555 - val_acc: 0.9250\n",
"Epoch 227/500\n",
"80/80 [==============================] - 0s 133us/step - loss: 0.4243 - acc: 0.9250 - val_loss: 0.4539 - val_acc: 0.9250\n",
"Epoch 228/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.4228 - acc: 0.9250 - val_loss: 0.4523 - val_acc: 0.9250\n",
"Epoch 229/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.4213 - acc: 0.9250 - val_loss: 0.4508 - val_acc: 0.9250\n",
"Epoch 230/500\n",
"80/80 [==============================] - 0s 104us/step - loss: 0.4197 - acc: 0.9250 - val_loss: 0.4492 - val_acc: 0.9250\n",
"Epoch 231/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.4182 - acc: 0.9250 - val_loss: 0.4477 - val_acc: 0.9250\n",
"Epoch 232/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.4168 - acc: 0.9375 - val_loss: 0.4461 - val_acc: 0.9250\n",
"Epoch 233/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.4153 - acc: 0.9375 - val_loss: 0.4446 - val_acc: 0.9250\n",
"Epoch 234/500\n",
"80/80 [==============================] - 0s 134us/step - loss: 0.4138 - acc: 0.9375 - val_loss: 0.4431 - val_acc: 0.9250\n",
"Epoch 235/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.4123 - acc: 0.9375 - val_loss: 0.4416 - val_acc: 0.9250\n",
"Epoch 236/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.4109 - acc: 0.9375 - val_loss: 0.4400 - val_acc: 0.9250\n",
"Epoch 237/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.4094 - acc: 0.9375 - val_loss: 0.4385 - val_acc: 0.9250\n",
"Epoch 238/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 0.4080 - acc: 0.9375 - val_loss: 0.4370 - val_acc: 0.9250\n",
"Epoch 239/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.4065 - acc: 0.9375 - val_loss: 0.4355 - val_acc: 0.9250\n",
"Epoch 240/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.4051 - acc: 0.9375 - val_loss: 0.4340 - val_acc: 0.9250\n",
"Epoch 241/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.4037 - acc: 0.9375 - val_loss: 0.4325 - val_acc: 0.9250\n",
"Epoch 242/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.4023 - acc: 0.9375 - val_loss: 0.4310 - val_acc: 0.9250\n",
"Epoch 243/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 0.4009 - acc: 0.9375 - val_loss: 0.4295 - val_acc: 0.9250\n",
"Epoch 244/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.3994 - acc: 0.9375 - val_loss: 0.4281 - val_acc: 0.9250\n",
"Epoch 245/500\n",
"80/80 [==============================] - 0s 77us/step - loss: 0.3980 - acc: 0.9375 - val_loss: 0.4266 - val_acc: 0.9250\n",
"Epoch 246/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.3966 - acc: 0.9500 - val_loss: 0.4251 - val_acc: 0.9250\n",
"Epoch 247/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.3952 - acc: 0.9500 - val_loss: 0.4237 - val_acc: 0.9250\n",
"Epoch 248/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.3939 - acc: 0.9500 - val_loss: 0.4222 - val_acc: 0.9250\n",
"Epoch 249/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.3925 - acc: 0.9500 - val_loss: 0.4208 - val_acc: 0.9250\n",
"Epoch 250/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.3911 - acc: 0.9500 - val_loss: 0.4193 - val_acc: 0.9250\n",
"Epoch 251/500\n",
"80/80 [==============================] - 0s 79us/step - loss: 0.3897 - acc: 0.9500 - val_loss: 0.4179 - val_acc: 0.9250\n",
"Epoch 252/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.3884 - acc: 0.9500 - val_loss: 0.4165 - val_acc: 0.9250\n",
"Epoch 253/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.3870 - acc: 0.9500 - val_loss: 0.4151 - val_acc: 0.9250\n",
"Epoch 254/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 0.3856 - acc: 0.9500 - val_loss: 0.4136 - val_acc: 0.9250\n",
"Epoch 255/500\n",
"80/80 [==============================] - 0s 67us/step - loss: 0.3843 - acc: 0.9500 - val_loss: 0.4122 - val_acc: 0.9250\n",
"Epoch 256/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.3829 - acc: 0.9500 - val_loss: 0.4108 - val_acc: 0.9250\n",
"Epoch 257/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.3816 - acc: 0.9500 - val_loss: 0.4094 - val_acc: 0.9250\n",
"Epoch 258/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.3803 - acc: 0.9500 - val_loss: 0.4080 - val_acc: 0.9250\n",
"Epoch 259/500\n",
"80/80 [==============================] - 0s 138us/step - loss: 0.3789 - acc: 0.9500 - val_loss: 0.4066 - val_acc: 0.9250\n",
"Epoch 260/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.3776 - acc: 0.9500 - val_loss: 0.4052 - val_acc: 0.9250\n",
"Epoch 261/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.3763 - acc: 0.9500 - val_loss: 0.4039 - val_acc: 0.9250\n",
"Epoch 262/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.3749 - acc: 0.9500 - val_loss: 0.4025 - val_acc: 0.9250\n",
"Epoch 263/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.3736 - acc: 0.9500 - val_loss: 0.4011 - val_acc: 0.9250\n",
"Epoch 264/500\n",
"80/80 [==============================] - 0s 71us/step - loss: 0.3723 - acc: 0.9500 - val_loss: 0.3997 - val_acc: 0.9250\n",
"Epoch 265/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.3710 - acc: 0.9500 - val_loss: 0.3983 - val_acc: 0.9250\n",
"Epoch 266/500\n",
"80/80 [==============================] - 0s 114us/step - loss: 0.3697 - acc: 0.9500 - val_loss: 0.3969 - val_acc: 0.9250\n",
"Epoch 267/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.3684 - acc: 0.9500 - val_loss: 0.3956 - val_acc: 0.9250\n",
"Epoch 268/500\n",
"80/80 [==============================] - 0s 132us/step - loss: 0.3671 - acc: 0.9500 - val_loss: 0.3942 - val_acc: 0.9250\n",
"Epoch 269/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.3658 - acc: 0.9625 - val_loss: 0.3928 - val_acc: 0.9250\n",
"Epoch 270/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.3645 - acc: 0.9625 - val_loss: 0.3914 - val_acc: 0.9250\n",
"Epoch 271/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.3632 - acc: 0.9625 - val_loss: 0.3901 - val_acc: 0.9250\n",
"Epoch 272/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.3619 - acc: 0.9625 - val_loss: 0.3887 - val_acc: 0.9250\n",
"Epoch 273/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.3606 - acc: 0.9625 - val_loss: 0.3873 - val_acc: 0.9250\n",
"Epoch 274/500\n",
"80/80 [==============================] - 0s 118us/step - loss: 0.3593 - acc: 0.9625 - val_loss: 0.3860 - val_acc: 0.9250\n",
"Epoch 275/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.3581 - acc: 0.9625 - val_loss: 0.3846 - val_acc: 0.9250\n",
"Epoch 276/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.3568 - acc: 0.9625 - val_loss: 0.3832 - val_acc: 0.9250\n",
"Epoch 277/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.3555 - acc: 0.9625 - val_loss: 0.3819 - val_acc: 0.9250\n",
"Epoch 278/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.3542 - acc: 0.9625 - val_loss: 0.3805 - val_acc: 0.9250\n",
"Epoch 279/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.3530 - acc: 0.9625 - val_loss: 0.3792 - val_acc: 0.9250\n",
"Epoch 280/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.3517 - acc: 0.9625 - val_loss: 0.3778 - val_acc: 0.9250\n",
"Epoch 281/500\n",
"80/80 [==============================] - 0s 132us/step - loss: 0.3505 - acc: 0.9625 - val_loss: 0.3765 - val_acc: 0.9250\n",
"Epoch 282/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.3492 - acc: 0.9625 - val_loss: 0.3751 - val_acc: 0.9250\n",
"Epoch 283/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.3480 - acc: 0.9625 - val_loss: 0.3738 - val_acc: 0.9250\n",
"Epoch 284/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 0.3467 - acc: 0.9625 - val_loss: 0.3724 - val_acc: 0.9250\n",
"Epoch 285/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.3455 - acc: 0.9625 - val_loss: 0.3711 - val_acc: 0.9250\n",
"Epoch 286/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.3442 - acc: 0.9625 - val_loss: 0.3697 - val_acc: 0.9250\n",
"Epoch 287/500\n",
"80/80 [==============================] - 0s 84us/step - loss: 0.3430 - acc: 0.9625 - val_loss: 0.3684 - val_acc: 0.9250\n",
"Epoch 288/500\n",
"80/80 [==============================] - 0s 82us/step - loss: 0.3417 - acc: 0.9625 - val_loss: 0.3671 - val_acc: 0.9250\n",
"Epoch 289/500\n",
"80/80 [==============================] - 0s 81us/step - loss: 0.3405 - acc: 0.9625 - val_loss: 0.3657 - val_acc: 0.9250\n",
"Epoch 290/500\n",
"80/80 [==============================] - 0s 118us/step - loss: 0.3393 - acc: 0.9625 - val_loss: 0.3644 - val_acc: 0.9250\n",
"Epoch 291/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.3381 - acc: 0.9625 - val_loss: 0.3631 - val_acc: 0.9250\n",
"Epoch 292/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 0.3368 - acc: 0.9625 - val_loss: 0.3618 - val_acc: 0.9250\n",
"Epoch 293/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.3356 - acc: 0.9625 - val_loss: 0.3604 - val_acc: 0.9250\n",
"Epoch 294/500\n",
"80/80 [==============================] - 0s 84us/step - loss: 0.3344 - acc: 0.9625 - val_loss: 0.3591 - val_acc: 0.9500\n",
"Epoch 295/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.3332 - acc: 0.9625 - val_loss: 0.3578 - val_acc: 0.9500\n",
"Epoch 296/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.3320 - acc: 0.9625 - val_loss: 0.3565 - val_acc: 0.9500\n",
"Epoch 297/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 0.3307 - acc: 0.9625 - val_loss: 0.3552 - val_acc: 0.9500\n",
"Epoch 298/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.3295 - acc: 0.9625 - val_loss: 0.3538 - val_acc: 0.9500\n",
"Epoch 299/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.3283 - acc: 0.9625 - val_loss: 0.3525 - val_acc: 0.9500\n",
"Epoch 300/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.3271 - acc: 0.9625 - val_loss: 0.3512 - val_acc: 0.9750\n",
"Epoch 301/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.3259 - acc: 0.9625 - val_loss: 0.3499 - val_acc: 0.9750\n",
"Epoch 302/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.3247 - acc: 0.9625 - val_loss: 0.3486 - val_acc: 0.9750\n",
"Epoch 303/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.3235 - acc: 0.9625 - val_loss: 0.3473 - val_acc: 0.9750\n",
"Epoch 304/500\n",
"80/80 [==============================] - 0s 88us/step - loss: 0.3223 - acc: 0.9625 - val_loss: 0.3460 - val_acc: 0.9750\n",
"Epoch 305/500\n",
"80/80 [==============================] - 0s 175us/step - loss: 0.3212 - acc: 0.9625 - val_loss: 0.3447 - val_acc: 0.9750\n",
"Epoch 306/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.3200 - acc: 0.9625 - val_loss: 0.3434 - val_acc: 0.9750\n",
"Epoch 307/500\n",
"80/80 [==============================] - 0s 163us/step - loss: 0.3188 - acc: 0.9625 - val_loss: 0.3421 - val_acc: 0.9750\n",
"Epoch 308/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.3176 - acc: 0.9625 - val_loss: 0.3408 - val_acc: 0.9750\n",
"Epoch 309/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.3164 - acc: 0.9625 - val_loss: 0.3395 - val_acc: 0.9750\n",
"Epoch 310/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.3153 - acc: 0.9625 - val_loss: 0.3382 - val_acc: 0.9750\n",
"Epoch 311/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.3141 - acc: 0.9625 - val_loss: 0.3369 - val_acc: 0.9750\n",
"Epoch 312/500\n",
"80/80 [==============================] - 0s 143us/step - loss: 0.3129 - acc: 0.9625 - val_loss: 0.3357 - val_acc: 0.9750\n",
"Epoch 313/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.3117 - acc: 0.9625 - val_loss: 0.3344 - val_acc: 0.9750\n",
"Epoch 314/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.3106 - acc: 0.9625 - val_loss: 0.3331 - val_acc: 0.9750\n",
"Epoch 315/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.3094 - acc: 0.9625 - val_loss: 0.3318 - val_acc: 0.9750\n",
"Epoch 316/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.3083 - acc: 0.9625 - val_loss: 0.3305 - val_acc: 0.9750\n",
"Epoch 317/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.3071 - acc: 0.9625 - val_loss: 0.3293 - val_acc: 0.9750\n",
"Epoch 318/500\n",
"80/80 [==============================] - 0s 121us/step - loss: 0.3060 - acc: 0.9625 - val_loss: 0.3280 - val_acc: 0.9750\n",
"Epoch 319/500\n",
"80/80 [==============================] - 0s 80us/step - loss: 0.3048 - acc: 0.9625 - val_loss: 0.3267 - val_acc: 0.9750\n",
"Epoch 320/500\n",
"80/80 [==============================] - 0s 77us/step - loss: 0.3037 - acc: 0.9625 - val_loss: 0.3255 - val_acc: 0.9750\n",
"Epoch 321/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.3025 - acc: 0.9625 - val_loss: 0.3242 - val_acc: 0.9750\n",
"Epoch 322/500\n",
"80/80 [==============================] - 0s 161us/step - loss: 0.3014 - acc: 0.9625 - val_loss: 0.3229 - val_acc: 0.9750\n",
"Epoch 323/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.3003 - acc: 0.9625 - val_loss: 0.3217 - val_acc: 0.9750\n",
"Epoch 324/500\n",
"80/80 [==============================] - 0s 116us/step - loss: 0.2992 - acc: 0.9625 - val_loss: 0.3204 - val_acc: 0.9750\n",
"Epoch 325/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.2981 - acc: 0.9625 - val_loss: 0.3192 - val_acc: 0.9750\n",
"Epoch 326/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.2969 - acc: 0.9625 - val_loss: 0.3179 - val_acc: 0.9750\n",
"Epoch 327/500\n",
"80/80 [==============================] - 0s 130us/step - loss: 0.2958 - acc: 0.9625 - val_loss: 0.3166 - val_acc: 0.9750\n",
"Epoch 328/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.2947 - acc: 0.9625 - val_loss: 0.3154 - val_acc: 0.9750\n",
"Epoch 329/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.2936 - acc: 0.9625 - val_loss: 0.3142 - val_acc: 0.9750\n",
"Epoch 330/500\n",
"80/80 [==============================] - 0s 104us/step - loss: 0.2925 - acc: 0.9625 - val_loss: 0.3129 - val_acc: 0.9750\n",
"Epoch 331/500\n",
"80/80 [==============================] - 0s 88us/step - loss: 0.2914 - acc: 0.9625 - val_loss: 0.3117 - val_acc: 0.9750\n",
"Epoch 332/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.2903 - acc: 0.9625 - val_loss: 0.3105 - val_acc: 0.9750\n",
"Epoch 333/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.2892 - acc: 0.9625 - val_loss: 0.3092 - val_acc: 0.9750\n",
"Epoch 334/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.2882 - acc: 0.9625 - val_loss: 0.3080 - val_acc: 0.9750\n",
"Epoch 335/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.2871 - acc: 0.9625 - val_loss: 0.3068 - val_acc: 0.9750\n",
"Epoch 336/500\n",
"80/80 [==============================] - 0s 81us/step - loss: 0.2860 - acc: 0.9625 - val_loss: 0.3056 - val_acc: 0.9750\n",
"Epoch 337/500\n",
"80/80 [==============================] - 0s 67us/step - loss: 0.2849 - acc: 0.9625 - val_loss: 0.3044 - val_acc: 0.9750\n",
"Epoch 338/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.2839 - acc: 0.9625 - val_loss: 0.3032 - val_acc: 0.9750\n",
"Epoch 339/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.2828 - acc: 0.9625 - val_loss: 0.3020 - val_acc: 0.9750\n",
"Epoch 340/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.2817 - acc: 0.9625 - val_loss: 0.3008 - val_acc: 0.9750\n",
"Epoch 341/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.2807 - acc: 0.9625 - val_loss: 0.2996 - val_acc: 0.9750\n",
"Epoch 342/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.2796 - acc: 0.9625 - val_loss: 0.2984 - val_acc: 0.9750\n",
"Epoch 343/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.2785 - acc: 0.9625 - val_loss: 0.2972 - val_acc: 0.9750\n",
"Epoch 344/500\n",
"80/80 [==============================] - 0s 144us/step - loss: 0.2775 - acc: 0.9625 - val_loss: 0.2960 - val_acc: 0.9750\n",
"Epoch 345/500\n",
"80/80 [==============================] - 0s 86us/step - loss: 0.2765 - acc: 0.9625 - val_loss: 0.2948 - val_acc: 0.9750\n",
"Epoch 346/500\n",
"80/80 [==============================] - 0s 136us/step - loss: 0.2754 - acc: 0.9625 - val_loss: 0.2937 - val_acc: 0.9750\n",
"Epoch 347/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.2744 - acc: 0.9625 - val_loss: 0.2925 - val_acc: 0.9750\n",
"Epoch 348/500\n",
"80/80 [==============================] - 0s 73us/step - loss: 0.2733 - acc: 0.9625 - val_loss: 0.2913 - val_acc: 0.9750\n",
"Epoch 349/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.2723 - acc: 0.9625 - val_loss: 0.2901 - val_acc: 0.9750\n",
"Epoch 350/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.2713 - acc: 0.9625 - val_loss: 0.2890 - val_acc: 0.9750\n",
"Epoch 351/500\n",
"80/80 [==============================] - 0s 149us/step - loss: 0.2702 - acc: 0.9625 - val_loss: 0.2878 - val_acc: 0.9750\n",
"Epoch 352/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.2692 - acc: 0.9625 - val_loss: 0.2867 - val_acc: 0.9750\n",
"Epoch 353/500\n",
"80/80 [==============================] - 0s 151us/step - loss: 0.2682 - acc: 0.9625 - val_loss: 0.2855 - val_acc: 0.9750\n",
"Epoch 354/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.2672 - acc: 0.9625 - val_loss: 0.2843 - val_acc: 0.9750\n",
"Epoch 355/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.2662 - acc: 0.9625 - val_loss: 0.2832 - val_acc: 0.9750\n",
"Epoch 356/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.2652 - acc: 0.9625 - val_loss: 0.2820 - val_acc: 0.9750\n",
"Epoch 357/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.2641 - acc: 0.9625 - val_loss: 0.2809 - val_acc: 0.9750\n",
"Epoch 358/500\n",
"80/80 [==============================] - 0s 72us/step - loss: 0.2631 - acc: 0.9625 - val_loss: 0.2798 - val_acc: 0.9750\n",
"Epoch 359/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.2621 - acc: 0.9625 - val_loss: 0.2786 - val_acc: 0.9750\n",
"Epoch 360/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.2611 - acc: 0.9625 - val_loss: 0.2775 - val_acc: 0.9750\n",
"Epoch 361/500\n",
"80/80 [==============================] - 0s 86us/step - loss: 0.2602 - acc: 0.9625 - val_loss: 0.2764 - val_acc: 0.9750\n",
"Epoch 362/500\n",
"80/80 [==============================] - 0s 102us/step - loss: 0.2592 - acc: 0.9625 - val_loss: 0.2752 - val_acc: 0.9750\n",
"Epoch 363/500\n",
"80/80 [==============================] - 0s 80us/step - loss: 0.2582 - acc: 0.9625 - val_loss: 0.2741 - val_acc: 0.9750\n",
"Epoch 364/500\n",
"80/80 [==============================] - 0s 73us/step - loss: 0.2572 - acc: 0.9625 - val_loss: 0.2730 - val_acc: 0.9750\n",
"Epoch 365/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.2562 - acc: 0.9625 - val_loss: 0.2719 - val_acc: 0.9750\n",
"Epoch 366/500\n",
"80/80 [==============================] - 0s 72us/step - loss: 0.2552 - acc: 0.9625 - val_loss: 0.2708 - val_acc: 0.9750\n",
"Epoch 367/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.2543 - acc: 0.9625 - val_loss: 0.2696 - val_acc: 0.9750\n",
"Epoch 368/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.2533 - acc: 0.9625 - val_loss: 0.2685 - val_acc: 0.9750\n",
"Epoch 369/500\n",
"80/80 [==============================] - 0s 80us/step - loss: 0.2523 - acc: 0.9625 - val_loss: 0.2674 - val_acc: 0.9750\n",
"Epoch 370/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.2514 - acc: 0.9625 - val_loss: 0.2663 - val_acc: 0.9750\n",
"Epoch 371/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.2504 - acc: 0.9625 - val_loss: 0.2652 - val_acc: 0.9750\n",
"Epoch 372/500\n",
"80/80 [==============================] - 0s 84us/step - loss: 0.2495 - acc: 0.9625 - val_loss: 0.2641 - val_acc: 0.9750\n",
"Epoch 373/500\n",
"80/80 [==============================] - 0s 124us/step - loss: 0.2485 - acc: 0.9625 - val_loss: 0.2631 - val_acc: 0.9750\n",
"Epoch 374/500\n",
"80/80 [==============================] - 0s 140us/step - loss: 0.2476 - acc: 0.9625 - val_loss: 0.2620 - val_acc: 1.0000\n",
"Epoch 375/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.2466 - acc: 0.9625 - val_loss: 0.2609 - val_acc: 1.0000\n",
"Epoch 376/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.2457 - acc: 0.9625 - val_loss: 0.2598 - val_acc: 1.0000\n",
"Epoch 377/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.2447 - acc: 0.9625 - val_loss: 0.2587 - val_acc: 1.0000\n",
"Epoch 378/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.2438 - acc: 0.9625 - val_loss: 0.2576 - val_acc: 1.0000\n",
"Epoch 379/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.2428 - acc: 0.9625 - val_loss: 0.2566 - val_acc: 1.0000\n",
"Epoch 380/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.2419 - acc: 0.9625 - val_loss: 0.2555 - val_acc: 1.0000\n",
"Epoch 381/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.2410 - acc: 0.9625 - val_loss: 0.2544 - val_acc: 1.0000\n",
"Epoch 382/500\n",
"80/80 [==============================] - 0s 111us/step - loss: 0.2400 - acc: 0.9625 - val_loss: 0.2533 - val_acc: 1.0000\n",
"Epoch 383/500\n",
"80/80 [==============================] - 0s 116us/step - loss: 0.2391 - acc: 0.9625 - val_loss: 0.2523 - val_acc: 1.0000\n",
"Epoch 384/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.2382 - acc: 0.9625 - val_loss: 0.2512 - val_acc: 1.0000\n",
"Epoch 385/500\n",
"80/80 [==============================] - 0s 85us/step - loss: 0.2373 - acc: 0.9625 - val_loss: 0.2501 - val_acc: 1.0000\n",
"Epoch 386/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.2364 - acc: 0.9625 - val_loss: 0.2491 - val_acc: 1.0000\n",
"Epoch 387/500\n",
"80/80 [==============================] - 0s 128us/step - loss: 0.2354 - acc: 0.9625 - val_loss: 0.2480 - val_acc: 1.0000\n",
"Epoch 388/500\n",
"80/80 [==============================] - 0s 129us/step - loss: 0.2345 - acc: 0.9625 - val_loss: 0.2470 - val_acc: 1.0000\n",
"Epoch 389/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.2337 - acc: 0.9625 - val_loss: 0.2460 - val_acc: 1.0000\n",
"Epoch 390/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.2328 - acc: 0.9625 - val_loss: 0.2449 - val_acc: 1.0000\n",
"Epoch 391/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.2319 - acc: 0.9625 - val_loss: 0.2439 - val_acc: 1.0000\n",
"Epoch 392/500\n",
"80/80 [==============================] - 0s 108us/step - loss: 0.2310 - acc: 0.9625 - val_loss: 0.2429 - val_acc: 1.0000\n",
"Epoch 393/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.2301 - acc: 0.9625 - val_loss: 0.2419 - val_acc: 1.0000\n",
"Epoch 394/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.2293 - acc: 0.9625 - val_loss: 0.2410 - val_acc: 1.0000\n",
"Epoch 395/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.2284 - acc: 0.9625 - val_loss: 0.2400 - val_acc: 1.0000\n",
"Epoch 396/500\n",
"80/80 [==============================] - 0s 88us/step - loss: 0.2275 - acc: 0.9625 - val_loss: 0.2390 - val_acc: 1.0000\n",
"Epoch 397/500\n",
"80/80 [==============================] - 0s 134us/step - loss: 0.2267 - acc: 0.9625 - val_loss: 0.2380 - val_acc: 1.0000\n",
"Epoch 398/500\n",
"80/80 [==============================] - 0s 123us/step - loss: 0.2258 - acc: 0.9625 - val_loss: 0.2371 - val_acc: 1.0000\n",
"Epoch 399/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.2250 - acc: 0.9625 - val_loss: 0.2361 - val_acc: 1.0000\n",
"Epoch 400/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.2241 - acc: 0.9625 - val_loss: 0.2352 - val_acc: 1.0000\n",
"Epoch 401/500\n",
"80/80 [==============================] - 0s 99us/step - loss: 0.2233 - acc: 0.9625 - val_loss: 0.2343 - val_acc: 1.0000\n",
"Epoch 402/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.2225 - acc: 0.9625 - val_loss: 0.2333 - val_acc: 1.0000\n",
"Epoch 403/500\n",
"80/80 [==============================] - 0s 117us/step - loss: 0.2216 - acc: 0.9625 - val_loss: 0.2324 - val_acc: 1.0000\n",
"Epoch 404/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.2208 - acc: 0.9625 - val_loss: 0.2315 - val_acc: 1.0000\n",
"Epoch 405/500\n",
"80/80 [==============================] - 0s 128us/step - loss: 0.2200 - acc: 0.9625 - val_loss: 0.2305 - val_acc: 1.0000\n",
"Epoch 406/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.2191 - acc: 0.9625 - val_loss: 0.2296 - val_acc: 1.0000\n",
"Epoch 407/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.2183 - acc: 0.9625 - val_loss: 0.2287 - val_acc: 1.0000\n",
"Epoch 408/500\n",
"80/80 [==============================] - 0s 133us/step - loss: 0.2175 - acc: 0.9750 - val_loss: 0.2278 - val_acc: 1.0000\n",
"Epoch 409/500\n",
"80/80 [==============================] - 0s 155us/step - loss: 0.2167 - acc: 0.9750 - val_loss: 0.2269 - val_acc: 1.0000\n",
"Epoch 410/500\n",
"80/80 [==============================] - 0s 134us/step - loss: 0.2159 - acc: 0.9750 - val_loss: 0.2260 - val_acc: 1.0000\n",
"Epoch 411/500\n",
"80/80 [==============================] - 0s 116us/step - loss: 0.2151 - acc: 0.9750 - val_loss: 0.2251 - val_acc: 1.0000\n",
"Epoch 412/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.2143 - acc: 0.9750 - val_loss: 0.2242 - val_acc: 1.0000\n",
"Epoch 413/500\n",
"80/80 [==============================] - 0s 122us/step - loss: 0.2135 - acc: 0.9750 - val_loss: 0.2233 - val_acc: 1.0000\n",
"Epoch 414/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.2127 - acc: 0.9750 - val_loss: 0.2224 - val_acc: 1.0000\n",
"Epoch 415/500\n",
"80/80 [==============================] - 0s 104us/step - loss: 0.2119 - acc: 0.9750 - val_loss: 0.2215 - val_acc: 1.0000\n",
"Epoch 416/500\n",
"80/80 [==============================] - 0s 91us/step - loss: 0.2112 - acc: 0.9750 - val_loss: 0.2206 - val_acc: 1.0000\n",
"Epoch 417/500\n",
"80/80 [==============================] - 0s 78us/step - loss: 0.2104 - acc: 0.9750 - val_loss: 0.2198 - val_acc: 1.0000\n",
"Epoch 418/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.2096 - acc: 0.9750 - val_loss: 0.2189 - val_acc: 1.0000\n",
"Epoch 419/500\n",
"80/80 [==============================] - 0s 93us/step - loss: 0.2089 - acc: 0.9750 - val_loss: 0.2180 - val_acc: 1.0000\n",
"Epoch 420/500\n",
"80/80 [==============================] - 0s 95us/step - loss: 0.2081 - acc: 0.9750 - val_loss: 0.2172 - val_acc: 1.0000\n",
"Epoch 421/500\n",
"80/80 [==============================] - 0s 170us/step - loss: 0.2073 - acc: 0.9750 - val_loss: 0.2163 - val_acc: 1.0000\n",
"Epoch 422/500\n",
"80/80 [==============================] - 0s 108us/step - loss: 0.2066 - acc: 0.9750 - val_loss: 0.2155 - val_acc: 1.0000\n",
"Epoch 423/500\n",
"80/80 [==============================] - 0s 94us/step - loss: 0.2059 - acc: 0.9750 - val_loss: 0.2146 - val_acc: 1.0000\n",
"Epoch 424/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.2051 - acc: 0.9750 - val_loss: 0.2138 - val_acc: 1.0000\n",
"Epoch 425/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.2044 - acc: 0.9750 - val_loss: 0.2129 - val_acc: 1.0000\n",
"Epoch 426/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.2037 - acc: 0.9750 - val_loss: 0.2121 - val_acc: 1.0000\n",
"Epoch 427/500\n",
"80/80 [==============================] - 0s 71us/step - loss: 0.2029 - acc: 0.9750 - val_loss: 0.2113 - val_acc: 1.0000\n",
"Epoch 428/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.2022 - acc: 0.9750 - val_loss: 0.2105 - val_acc: 1.0000\n",
"Epoch 429/500\n",
"80/80 [==============================] - 0s 76us/step - loss: 0.2015 - acc: 0.9750 - val_loss: 0.2096 - val_acc: 1.0000\n",
"Epoch 430/500\n",
"80/80 [==============================] - 0s 71us/step - loss: 0.2008 - acc: 0.9750 - val_loss: 0.2088 - val_acc: 1.0000\n",
"Epoch 431/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.2001 - acc: 0.9750 - val_loss: 0.2080 - val_acc: 1.0000\n",
"Epoch 432/500\n",
"80/80 [==============================] - 0s 90us/step - loss: 0.1994 - acc: 0.9750 - val_loss: 0.2072 - val_acc: 1.0000\n",
"Epoch 433/500\n",
"80/80 [==============================] - 0s 142us/step - loss: 0.1987 - acc: 0.9750 - val_loss: 0.2064 - val_acc: 1.0000\n",
"Epoch 434/500\n",
"80/80 [==============================] - 0s 97us/step - loss: 0.1980 - acc: 0.9750 - val_loss: 0.2056 - val_acc: 1.0000\n",
"Epoch 435/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.1973 - acc: 0.9750 - val_loss: 0.2048 - val_acc: 1.0000\n",
"Epoch 436/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.1966 - acc: 0.9750 - val_loss: 0.2040 - val_acc: 1.0000\n",
"Epoch 437/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.1959 - acc: 0.9750 - val_loss: 0.2033 - val_acc: 1.0000\n",
"Epoch 438/500\n",
"80/80 [==============================] - 0s 118us/step - loss: 0.1953 - acc: 0.9750 - val_loss: 0.2025 - val_acc: 1.0000\n",
"Epoch 439/500\n",
"80/80 [==============================] - 0s 83us/step - loss: 0.1946 - acc: 0.9750 - val_loss: 0.2017 - val_acc: 1.0000\n",
"Epoch 440/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.1939 - acc: 0.9750 - val_loss: 0.2010 - val_acc: 1.0000\n",
"Epoch 441/500\n",
"80/80 [==============================] - 0s 123us/step - loss: 0.1933 - acc: 0.9750 - val_loss: 0.2002 - val_acc: 1.0000\n",
"Epoch 442/500\n",
"80/80 [==============================] - 0s 142us/step - loss: 0.1926 - acc: 0.9750 - val_loss: 0.1994 - val_acc: 1.0000\n",
"Epoch 443/500\n",
"80/80 [==============================] - 0s 125us/step - loss: 0.1919 - acc: 0.9750 - val_loss: 0.1987 - val_acc: 1.0000\n",
"Epoch 444/500\n",
"80/80 [==============================] - 0s 106us/step - loss: 0.1913 - acc: 0.9750 - val_loss: 0.1979 - val_acc: 1.0000\n",
"Epoch 445/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.1906 - acc: 0.9750 - val_loss: 0.1972 - val_acc: 1.0000\n",
"Epoch 446/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.1900 - acc: 0.9750 - val_loss: 0.1964 - val_acc: 1.0000\n",
"Epoch 447/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.1893 - acc: 0.9750 - val_loss: 0.1957 - val_acc: 1.0000\n",
"Epoch 448/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.1887 - acc: 0.9750 - val_loss: 0.1949 - val_acc: 1.0000\n",
"Epoch 449/500\n",
"80/80 [==============================] - 0s 129us/step - loss: 0.1881 - acc: 0.9750 - val_loss: 0.1942 - val_acc: 1.0000\n",
"Epoch 450/500\n",
"80/80 [==============================] - 0s 89us/step - loss: 0.1874 - acc: 0.9750 - val_loss: 0.1935 - val_acc: 1.0000\n",
"Epoch 451/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.1868 - acc: 0.9750 - val_loss: 0.1927 - val_acc: 1.0000\n",
"Epoch 452/500\n",
"80/80 [==============================] - 0s 101us/step - loss: 0.1862 - acc: 0.9750 - val_loss: 0.1920 - val_acc: 1.0000\n",
"Epoch 453/500\n",
"80/80 [==============================] - 0s 117us/step - loss: 0.1855 - acc: 0.9750 - val_loss: 0.1913 - val_acc: 1.0000\n",
"Epoch 454/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.1849 - acc: 0.9750 - val_loss: 0.1906 - val_acc: 1.0000\n",
"Epoch 455/500\n",
"80/80 [==============================] - 0s 123us/step - loss: 0.1843 - acc: 0.9750 - val_loss: 0.1898 - val_acc: 1.0000\n",
"Epoch 456/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.1837 - acc: 0.9750 - val_loss: 0.1891 - val_acc: 1.0000\n",
"Epoch 457/500\n",
"80/80 [==============================] - 0s 119us/step - loss: 0.1830 - acc: 0.9750 - val_loss: 0.1884 - val_acc: 1.0000\n",
"Epoch 458/500\n",
"80/80 [==============================] - 0s 109us/step - loss: 0.1824 - acc: 0.9750 - val_loss: 0.1877 - val_acc: 1.0000\n",
"Epoch 459/500\n",
"80/80 [==============================] - 0s 84us/step - loss: 0.1818 - acc: 0.9750 - val_loss: 0.1870 - val_acc: 1.0000\n",
"Epoch 460/500\n",
"80/80 [==============================] - 0s 88us/step - loss: 0.1812 - acc: 0.9750 - val_loss: 0.1862 - val_acc: 1.0000\n",
"Epoch 461/500\n",
"80/80 [==============================] - 0s 86us/step - loss: 0.1806 - acc: 0.9750 - val_loss: 0.1855 - val_acc: 1.0000\n",
"Epoch 462/500\n",
"80/80 [==============================] - 0s 86us/step - loss: 0.1800 - acc: 0.9750 - val_loss: 0.1848 - val_acc: 1.0000\n",
"Epoch 463/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.1794 - acc: 0.9750 - val_loss: 0.1841 - val_acc: 1.0000\n",
"Epoch 464/500\n",
"80/80 [==============================] - 0s 127us/step - loss: 0.1788 - acc: 0.9750 - val_loss: 0.1834 - val_acc: 1.0000\n",
"Epoch 465/500\n",
"80/80 [==============================] - 0s 129us/step - loss: 0.1782 - acc: 0.9750 - val_loss: 0.1827 - val_acc: 1.0000\n",
"Epoch 466/500\n",
"80/80 [==============================] - 0s 110us/step - loss: 0.1776 - acc: 0.9750 - val_loss: 0.1820 - val_acc: 1.0000\n",
"Epoch 467/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.1770 - acc: 0.9750 - val_loss: 0.1813 - val_acc: 1.0000\n",
"Epoch 468/500\n",
"80/80 [==============================] - 0s 118us/step - loss: 0.1764 - acc: 0.9750 - val_loss: 0.1806 - val_acc: 1.0000\n",
"Epoch 469/500\n",
"80/80 [==============================] - 0s 151us/step - loss: 0.1758 - acc: 0.9750 - val_loss: 0.1799 - val_acc: 1.0000\n",
"Epoch 470/500\n",
"80/80 [==============================] - 0s 144us/step - loss: 0.1752 - acc: 0.9750 - val_loss: 0.1792 - val_acc: 1.0000\n",
"Epoch 471/500\n",
"80/80 [==============================] - 0s 86us/step - loss: 0.1746 - acc: 0.9750 - val_loss: 0.1785 - val_acc: 1.0000\n",
"Epoch 472/500\n",
"80/80 [==============================] - 0s 121us/step - loss: 0.1740 - acc: 0.9750 - val_loss: 0.1778 - val_acc: 1.0000\n",
"Epoch 473/500\n",
"80/80 [==============================] - 0s 127us/step - loss: 0.1734 - acc: 0.9750 - val_loss: 0.1770 - val_acc: 1.0000\n",
"Epoch 474/500\n",
"80/80 [==============================] - 0s 75us/step - loss: 0.1728 - acc: 0.9750 - val_loss: 0.1763 - val_acc: 1.0000\n",
"Epoch 475/500\n",
"80/80 [==============================] - 0s 73us/step - loss: 0.1723 - acc: 0.9750 - val_loss: 0.1756 - val_acc: 1.0000\n",
"Epoch 476/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.1717 - acc: 0.9750 - val_loss: 0.1749 - val_acc: 1.0000\n",
"Epoch 477/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.1711 - acc: 0.9750 - val_loss: 0.1742 - val_acc: 1.0000\n",
"Epoch 478/500\n",
"80/80 [==============================] - 0s 115us/step - loss: 0.1705 - acc: 0.9750 - val_loss: 0.1735 - val_acc: 1.0000\n",
"Epoch 479/500\n",
"80/80 [==============================] - 0s 134us/step - loss: 0.1699 - acc: 0.9750 - val_loss: 0.1728 - val_acc: 1.0000\n",
"Epoch 480/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.1694 - acc: 0.9750 - val_loss: 0.1721 - val_acc: 1.0000\n",
"Epoch 481/500\n",
"80/80 [==============================] - 0s 108us/step - loss: 0.1688 - acc: 0.9750 - val_loss: 0.1714 - val_acc: 1.0000\n",
"Epoch 482/500\n",
"80/80 [==============================] - 0s 113us/step - loss: 0.1682 - acc: 0.9750 - val_loss: 0.1707 - val_acc: 1.0000\n",
"Epoch 483/500\n",
"80/80 [==============================] - 0s 98us/step - loss: 0.1676 - acc: 0.9750 - val_loss: 0.1699 - val_acc: 1.0000\n",
"Epoch 484/500\n",
"80/80 [==============================] - 0s 88us/step - loss: 0.1670 - acc: 0.9750 - val_loss: 0.1692 - val_acc: 1.0000\n",
"Epoch 485/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.1665 - acc: 0.9750 - val_loss: 0.1685 - val_acc: 1.0000\n",
"Epoch 486/500\n",
"80/80 [==============================] - 0s 68us/step - loss: 0.1659 - acc: 0.9750 - val_loss: 0.1678 - val_acc: 1.0000\n",
"Epoch 487/500\n",
"80/80 [==============================] - 0s 63us/step - loss: 0.1653 - acc: 0.9750 - val_loss: 0.1670 - val_acc: 1.0000\n",
"Epoch 488/500\n",
"80/80 [==============================] - 0s 112us/step - loss: 0.1647 - acc: 0.9750 - val_loss: 0.1663 - val_acc: 1.0000\n",
"Epoch 489/500\n",
"80/80 [==============================] - 0s 63us/step - loss: 0.1641 - acc: 0.9750 - val_loss: 0.1655 - val_acc: 1.0000\n",
"Epoch 490/500\n",
"80/80 [==============================] - 0s 100us/step - loss: 0.1636 - acc: 0.9750 - val_loss: 0.1648 - val_acc: 1.0000\n",
"Epoch 491/500\n",
"80/80 [==============================] - 0s 123us/step - loss: 0.1630 - acc: 0.9750 - val_loss: 0.1641 - val_acc: 1.0000\n",
"Epoch 492/500\n",
"80/80 [==============================] - 0s 96us/step - loss: 0.1624 - acc: 0.9750 - val_loss: 0.1633 - val_acc: 1.0000\n",
"Epoch 493/500\n",
"80/80 [==============================] - 0s 92us/step - loss: 0.1618 - acc: 0.9750 - val_loss: 0.1626 - val_acc: 1.0000\n",
"Epoch 494/500\n",
"80/80 [==============================] - 0s 87us/step - loss: 0.1613 - acc: 0.9750 - val_loss: 0.1619 - val_acc: 1.0000\n",
"Epoch 495/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.1607 - acc: 0.9750 - val_loss: 0.1611 - val_acc: 1.0000\n",
"Epoch 496/500\n",
"80/80 [==============================] - 0s 103us/step - loss: 0.1601 - acc: 0.9750 - val_loss: 0.1604 - val_acc: 1.0000\n",
"Epoch 497/500\n",
"80/80 [==============================] - 0s 107us/step - loss: 0.1596 - acc: 0.9750 - val_loss: 0.1597 - val_acc: 1.0000\n",
"Epoch 498/500\n",
"80/80 [==============================] - 0s 128us/step - loss: 0.1590 - acc: 0.9750 - val_loss: 0.1590 - val_acc: 1.0000\n",
"Epoch 499/500\n",
"80/80 [==============================] - 0s 74us/step - loss: 0.1585 - acc: 0.9750 - val_loss: 0.1583 - val_acc: 1.0000\n",
"Epoch 500/500\n",
"80/80 [==============================] - 0s 105us/step - loss: 0.1580 - acc: 0.9750 - val_loss: 0.1576 - val_acc: 1.0000\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "52UlQRKifb-G",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 565
},
"outputId": "f76e316c-2e01-4e24-c99e-4d044286932d"
},
"source": [
"#print(history.history.keys())\n",
"\n",
"plt.plot(history.history['acc'])\n",
"plt.plot(history.history['val_acc'])\n",
"plt.ylabel('acuracia')\n",
"plt.xlabel('epoca')\n",
"plt.legend(['treino', 'validacao'], loc = 'upper left')\n",
"plt.show()\n",
"\n",
"plt.plot(history.history['loss'])\n",
"plt.plot(history.history['val_loss'])\n",
"plt.ylabel('perda')\n",
"plt.xlabel('epoca')\n",
"plt.legend(['treino', 'validacao'], loc = 'upper left')\n",
"plt.show()"
],
"execution_count": 226,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY8AAAESCAYAAAAFYll6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzt3Xl8VNX9//HXzCQhGyQkZJkQNoNC\nFBAEqVZEBTSoweBuwa1WrKKitigB/RJwj1p/WtT6lbYoxdYWqVCiRYp+FaVKFanKvghhmyQkIfs2\nc+f+/oiMTcMygWTmJnk/Hw8ej8ncO/d+JhPyzjn33HNspmmaiIiItIA92AWIiEj7o/AQEZEWU3iI\niEiLKTxERKTFFB4iItJiCg8REWkxhYeIiLSYwkNERFpM4SEiIi2m8BARkRZTeIiISIuFBLuA1lJX\nV8eGDRtISEjA4XAEuxwRkXbBMAwOHjzIoEGDCA8P9/t1HSY8NmzYwOTJk4NdhohIu/Tmm28yYsQI\nv/fvMOGRkJAANH4DkpOTg1yNiEj7UFBQwOTJk32/Q/3VYcLjcFdVcnIyqampQa5GRKR9aWl3vy6Y\ni4hIiyk8RESkxTpMt9WxeL1e9u3bR3V1dbBL6fCioqJITU3FbtffJSIdWUDCIzc3l/fff5/9+/ez\nfPlyTjvttGb7GIbB448/zieffILNZuOOO+7g2muvbZXzFxcXY7PZGDBggH6ptSGv18v+/fspLi4m\nMTEx2OWISBsKyG/SsWPH8uabb9KzZ8+j7rN8+XL27NnDypUr+fOf/8y8efPYt29fq5y/rKyMpKQk\nBUcbs9vtJCUlUV5eHuxSRKSNBaTl4c/Y4ffee49rr70Wu91OXFwc48aNY8WKFdx+++0nfX7DMAgN\nDT3p48jxhYaG4vF4gl2GtGOmaQJmsMtod2y2wP5xbJlrHi6Xi5SUFN/XTqeTgoKCVju+zWZrtWPJ\n0en7LCdr/+8foqHgu2CX0a7YHKE4b5xLeOqAgJ3TMuHRmcybN4+f//znhIWFteh1hYWFTJ8+nT/8\n4Q9tVJlIcJmGm4aC74joN4TwXunBLqfdsIWEEdojsPe3WSY8nE4nBw4cYMiQIUDzlkhH8tJLL3Hb\nbbc1Cw+Px0NIyNE/kqSkJAWHdGhGdQUAUQPPpdtZlwS5GjkWy4TH+PHjWbx4MZdccgllZWWsWrWK\nN998M9hltbq5c+cCcMMNN2C32+nZsyfdu3dn165dVFdXs2zZMr7++muee+4539DiadOmceGFF7Jv\n3z6uvvpq1q5dC8CAAQN44IEH+Mc//kFZWRkPPfQQGRkZAKxevZrnn38ewzCIi4vj0UcfpU+fPsF5\n0yJ+MmoaB1s4ImOCXIkcT0DC4/HHH2flypUUFxfz05/+lNjYWN59912mTJnCtGnTGDx4MFlZWXz9\n9ddccknjXxt33303vXr1avVaPvxyD//4155WPy7AxSN7M2ZE72Puk5OTwx//+EfeeustoqKiyM7O\nZvPmzSxatIjIyEgqKirIycnhtddeIzExkaKiIq655hry8vKOeLzo6GiWLFnCunXruP/++8nIyKCk\npISHHnqIRYsW0b9/fxYvXsz06dNZvHhxW7xtkVZjVH8fHlEKD6sLSHg88sgjPPLII82enz9/vu+x\nw+Hw/VXe2YwfP57IyEgA1q9fz759+5gyZYpvu81mIz8/n+7duzd77WWXXQbA0KFDKSoqor6+nq+/\n/pqBAwfSv39/AK6++mrmzp1LVVUV0dHRAXhHIifG1/KI6hbkSuR4LNNtFShjRhy/dRBoh4MDGocp\nDhgw4Ihddke676VLly7AD5OaaZistGeHr3mo28r6Ol14WEFUVBRVVVVERUU12zZs2DDy8/P5/PPP\nOeeccwD45ptvGDx4sN/HHzp0KLNmzWLnzp2kpaXxzjvvcPrpp6vV0cl4PQ3s/+10PJWlwS7Fb6bh\nxuYIxdYl8vg7+6m4rJbpv15NTV3H/cMqLNROzu3ncGqv5r0TbUXhEQS33XYbN998M+Hh4c3uuo+J\nieGVV17h2Wef5cknn8TtdtOrVy9effVVv48fFxfHM888w/Tp0/F4PMTFxfHss8+29tsQizMqinGX\n7Cci7SzC4tvPyMWwpL6ter/Qpl0llJTXMe7s3kRFdMybhcNC7SR2b73A9YfCIwjuuece7rnnnqNu\nHzJkyBGH5KampvpGWgFs3bq1yfb//Hr06NGMHj26FaqV9urwxeeYkZcTecrQIFcTPPkFldjtNqZe\nM4TQEC1R3VoUHiIW89WWIlzFVSd9nK4Ht5EK/HNbJfUHOu8d219tKaRnQpSCo5UpPEQspK7ew9zf\nfY7Xe/JzO53XZSvXRcGCVfuoMNvPdY+2kHGO7nFqbQoPEQvZU1iJ12vywE+GMXxg0kkdq2ZtDbWf\nr+Xl/7kCm6Nz/1fvGtmyqYDk+Dr3T5SIxewpaByqOrBPHDHRXU7qWG53FfUR0cTGNB/VJ3KyFB4i\nbcDwmjzy6hoKilu2emVNvYewEDtJ8Sf2C7/047eo/PpDALy1lYTEJJzQcUSOR+Eh0gYKS6rZsLOE\nIf17kBTXsiGUp/bujsN+YkNVa3asw2azEdGvcYLRiE48ykralsJDpA3kf9/9dMvlp3Na78DduGVU\nlxPR70wSMu8O2Dmlc1J4tAM33XQTt912GxdddBEvvvgip556qm9Oq/80b948ampqmDFjRhCq7FzK\nq+pZv+0gmEceFfXFpkIAeid1DVhNpmli1JRrXigJCIVHO3PfffcFuwQB/rRyK++u2XXMffo6uxHe\nJXD/xcz6GjA8mpFWAqLThUflNx/5Lii2tq5njqHrkAuPuc8rr7xCWVkZs2bNAuDQoUOMHz+e3Nxc\nfvOb31BfX49hGNx5551cfvnlzV6fnZ3NoEGDuPHGG6msrOThhx9m27ZtJCQkkJycTI8ePQD47LPP\neOGFF454vMLCQh5//HF2794NQGZmJj//+c9Zvnw5CxcuxO12AzBjxgzOPfdcoHF+rSeeeIKamhoi\nIyN5+OGHfQt3dUa7DpRzaq9Ypk8eftR9uncLD2BF4KnWWhgSOJ0uPIJt4sSJXHfddTz00EOEhISQ\nl5fHmDFjGDZsGH/84x9xOBwUFxdz1VVXMWrUKGJijv6L4OWXXyYqKooVK1ZQWlrKVVddxaWXXgrA\n6aefftTjTZ8+nQsuuIB58+YBUFraeAPZqFGjyMzMxGaz8d1333HrrbeyevVqGhoamDZtGk899RTn\nnnsu//znP5k2bRorV65s8VK6HYFpmuS7Khg9LJWUBOtMNun1TWceG+RKpDPodOHRdciFx20dtKWU\nlBT69+/Pxx9/zNixY3nnnXeYOXMmpaWlzJo1i/z8fBwOB+Xl5ezatYuhQ48+Wmbt2rW+dVLi4uK4\n+OKLfduOdrxTTz2V9evXs2DBAt++cXFxAOzdu5df/vKXFBYWEhISQnFxMQcPHqS0tJTQ0FBfK+TH\nP/4xoaGh7Nq1iwEDBrTFt6nV5X36HX//bPcJv/58z2ecYn7/ehPuDfPSfW8X9r5mnfA062sBcETq\nmoe0vU4XHlZw5ZVXsnTpUlJTU6msrGTEiBHceuutjBkzhpdeegmbzUZGRgb19fUnfI45c+a0+Hi/\n+MUvyM7OZty4cXi9Xs4888yTqsFKVn2xh+paNwP6nNjIpzNc2zFtdg6FNd71bbfZiHZ2IyzUWvMl\nRaQNIyyh9VfgFPlvCo8guOSSS3jqqadYsGABV155JTabjcrKSnr27InNZmPNmjXk5+cf9zjnnHMO\nf/3rXxk+fDiHDh1i1apVjB8/HuCox4uKimLYsGG8/vrr3H777UBjKyUuLo7KykpSU1MBWLJkCQ0N\nDQD069cPt9vtW2Pks88+w+Px0K9fv7b49rQ6w2uyt6CSy87rx8+uGNTi15umye7cOrqNvJwhY25q\ngwpF2h+FRxBEREQwduxY/vrXv/LBBx8A8Mtf/pK5c+cyb948Bg8e7Fd30NSpU5k1axbjx48nISGB\nESNG+LYd63jPPfccc+fOJTMzE7vdTmZmJnfccQczZ85k6tSpxMTEcP755xMb29h3HhYWxq9//esm\nF8xffPFFy17v2PhdCdV1bt/X5ZX1NHi89Ek+sWGzZkMtpuHWhWiR/2AzzaMMVG9lu3btIjs7m7Ky\nMmJjY8nNzaVv375N9jl48CCzZ89m3759eDwe7rzzTrKysvw6/r59+xg7diwffPCB76/nwzZv3kx6\nenprvRU5jmB+v3cdKGfarz464raXpl9EH2fLrwe4S13s/c09JFxxL10HX3hyBYpYzLF+dx5LwFoe\nOTk5TJo0iaysLJYtW8bs2bNZuHBhk32efvppBg0axG9+8xvf6KGRI0fidDoDVaa0c9/tbxxxNOvW\nkfSI/WGobGR4KD1PcGSUUaMhsCL/zR6Ik5SUlLBp0yYyMzOBxvsKNm3a5BsietiWLVs4//zzgcYR\nQAMHDuTvf/97IEqUDiK/oJLQEDsjz0jm1F7dff9ONDjghxX5dPOdyA8C0vJwuVwkJSXhcDSOTHE4\nHCQmJuJyuXzDRAHOOOMM3nvvPQYPHsy+fftYv359i5pRx2KaZquuiyxHFqBe0CPadaCcdz7awSk9\nY5pNLGh6DYqWvoCnsuWLInnV8hBpxlIXzLOzs3nyySfJysoiJSWFc8891xc4J8PhcOB2uy17gbcj\ncbvdhIQE58fqs29dAFz2477NtnkqSqje/E9CE3oR0sKb6OzdetAlNR1H18BNcChidQH5X+50Oiks\nLMQwDBwOB4ZhUFRU1OxaRlxcHM8995zv6ylTptC/f/+TPn9sbCyFhYX07NkTuz0gPXWdktfrpbCw\n8Jh3xbel/IIKUnpEkXFO32bbjOoyAOIvuonIU48+pYiI+Ccg4REfH096ejp5eXlkZWWRl5dHenp6\nky4raJznqWvXroSEhPDZZ5+xbds2fv3rX5/0+Xv06MG+ffvYunXrSR9Lji0qKso3v9aJ8hheduwt\nw2jhOt4795VzSs8jB5euW4i0roD1L8yZM4fs7GxeeeUVunXrRm5uLtDYupg2bRqDBw/2Tb5nt9vp\n3r07r776KhERESd9brvdTu/evU/6OBIY/1ibzytLvjmh1178oyN/zr4RUwoPkVYRsPBIS0tj8eLF\nzZ6fP3++7/EFF1zABRdcEKiSxKK27y2ja2QYM24acfyd/4Pdbjvq9CNGdePiTHbN+yTSKix1wVwE\nYE9BJf1SunHmaa23/rZRU44tLAJ7aJdWO6ZIZ6bwEEvxek3yCyoYN7Ll3YwNJQco/XAhpmE031aU\nry4rkVak8BBLKTpUQ12DQd8TmEakZsc6arZ9QVjyKc3u6QmJjiXy1LNbq0yRTk/hIZayp6ASgD7J\nLQ8Pb0052B30vO0Z3RAq0sYUHhJwZZX1HKqsO+K2b3YUA9D7BGbANarLcUTGKDhEAkDhIQHl9Zrc\n/eyHVFQ3HHUfZ3wUkeGhLT62UV2u6xoiAaLwkIAqKK2morqBCeefwqBT4o+4z4m0OuBweGgorkgg\nKDwkoPJdjdc0LjwrldN6t+5cUUZNOaE9erbqMUXkyBQeEhAFJdX8aeVW8gsab9brldTy1oWnqozS\n/3sT0zhyl5en8pBmvhUJEIWHBMRHX+3jwy/3ktIjigvPSiWiS8t/9Gq/+zdV33xISEwitiPMthza\nPYmIfkNao1wROQ6FhwREvquC5PhI/nfmuBM+xuH5qVKn/Ap7l8jWKk1EToDmJ5c25/WabNhZckL3\nbvwno7ocmyMUW9jJT5YpIidH4SFt7nd/20BZVT19U04yPGrKsUfpPg4RK1B4SJvbvLuUEIediaPT\nTuo4h28CFJHgU3hIm/J6TfYUVnLZj/sSHXlyywAb1RW6j0PEInTBXFrdqn/tYdueQwDUuw3qGwz6\nnMBEh4eZXoNDq/+Mu/QAYYm9WqtMETkJCg9pdb9fvoF6t5fI74fjJsZFMjjtxJembSjaQ9maJdjD\no4noM7i1yhSRk6DwkFblMbxU1riZlDGQn1wyoFWOaVSXAZB83UzCew1slWOKyMnRNQ9pVeVV9QDE\ndm29Ffu0/riI9Sg8pFWVVzVOHRIbfXIXx//T4fXHFR4i1hGwbqtdu3aRnZ1NWVkZsbGx5Obm0rdv\n3yb7lJSUMHPmTFwuFx6Phx/96Ec88sgjhISod629KPu+5RET3botD90cKGItAWt55OTkMGnSJN5/\n/30mTZrE7Nmzm+3z6quvkpaWxvLly/nb3/7Gxo0bWblyZaBKlFbg67ZqzfCo1s2BIlYTkD/pS0pK\n2LRpEwsWLAAgMzOTxx57jNLSUuLi4nz72Ww2qqur8Xq9NDQ04Ha7SUpKCkSJ0krKj9Ly8FSVUbHu\n7+A1WnzM+n1bdHOgiMUEJDxcLhdJSUk4vp8J1eFwkJiYiMvlahIeU6dO5d5772XUqFHU1tYyefJk\nhg8fHogSpZWUVdYT4rATGd70R6t68xrKPn0b7CFwAg2IbsPHt1KFItIaLHUxYcWKFQwYMIA33niD\n6upqpkyZwooVKxg/Xr842ovyqgZio8OadTEZVWVgs9Mv+0/YbBqnIdLeBeR/sdPppLCwEMNo7LIw\nDIOioiKcTmeT/RYtWsQVV1yB3W6na9eujBkzhrVr1waiRGklZVX1xBxhmK5RU4EjspuCQ6SDCMj/\n5Pj4eNLT08nLywMgLy+P9PT0Jl1WAKmpqaxevRqAhoYGPvvsM0499dRAlCitpLyq/ogjrYzqMg21\nFelAAvZn4Jw5c1i0aBEZGRksWrSIuXPnAjBlyhS+/fZbAGbNmsW6deuYMGECEydOpG/fvlx33XWB\nKlFaQVlV/RFHWhnV5QoPkQ4kYNc80tLSWLx4cbPn58+f73vcu3dv34gsaX9M06S88ijhUVNOaJzz\nCK8SkfbIUhfMpX3bsLOEBo/X121VuWE1RkUx0HjB3BGp6dRFOgqFh7SaBXkbAeib0g2jupyDy15s\nsj0suV8wyhKRNqDwkFZhmib7iirJOKcPZw1IpKEoH4CErPuIGngONmzYQkKDXKWItBaFh7SKg4dq\nqa03SEuNBRovkAOEdI3HHtJ6kySKiDUoPNqxRX/fTN6n3/m+ttvtPPCTYZx9enLAavjNkq/5+Kt9\nGF4TgD7JXQFNoy7S0Sk82rEN35UQ0SWEHw9JAeD9tfms21IU0PD457cu30qB0RGhDOjdHfih5aE5\nqUQ6JoVHO1ZT5yYtNZYpExuXZt225xD5BRUBO395VT1llfVcfVF/Jl7Qv8k2o7ocbHbsEVEBq0dE\nAkfh0Y5V13maTEDYx9mNT/+9nw++2MOPzkhmzTcuDK+3zc5fWFLTeN4Yk4p17wOmb1vd3s2ajkSk\nA1N4tGM1tW4iw38YwXR6v3je/zyfF95az9BTE/j39oNtXkOXMAdJ+z+m+N9/b7Ytou/gNj+/iASH\nwqOdMk2TmvqmLY8xI3px5qk9+OljK/n39oN0iwrjpQcvatM6wsNCqHj3RUK6J5Ny8xNNtjkiotv0\n3CISPAqPdqq+wcDrNYkKb3rvRHxMBMlxUbhKqunr7Eb3ruFtXsuhmnJCorsTEh3b5ucSEWtQh3Q7\nVV3nBmi26BJA7++Hy/ZO6hqQWjTpoUjno5ZHO1VT5wFocs3jsCsv7E+3qDDGn9s3ILU0rtWh8BDp\nTBQe7VTN9y2PqIjm4XHGKfGccUp8QOowvQbemkrsanmIdCotCo/i4mK++eYbDh06hGn+MCzzmmuu\nafXC5Ni+2dE4W21El8Dnv2ma1O74Cm9DDd76WsBUy0Okk/H7N8+qVat48MEH6dOnDzt27KB///5s\n376ds846S+ERYF6vycL3NgOQEBsR8PM3uHZS8JcnmzyntTpEOhe/w+OFF17gySef5NJLL+Xss89m\n6dKlLFmyhB07drRlfXIERYcab867cfxAEuMiA35+T2UJAEnXPERoj1RsIaGExiQGvA4RCR6/R1sd\nOHCASy+9tMlzV155JUuXLm31ouTYdrsapyAZelpCUM5/eN6qLs7+hMX3VHCIdEJ+tzzi4+MpLi6m\nR48e9OzZk/Xr19O9e3e8bTj9hTT3l1XbeHfNLgB6BWgo7n/zTXoYpZUBRTorv8Pj2muvZd26dWRk\nZHDrrbdy8803Y7fb+elPf+rX63ft2kV2djZlZWXExsaSm5tL3759m+zz0EMPsXXrVt/XW7du5eWX\nX2bs2LH+ltnhvb82n5AQOzdcPOCIw3QDwagpxx4ehc2hxZ1EOiu/w+OOO+7wPZ44cSIjR46ktraW\ntLQ0v16fk5PDpEmTyMrKYtmyZcyePZuFCxc22eeZZ57xPd6yZQu33HIL559/vr8ldng1dW6KSmu4\n6dJ0rht3WtDq0E2BInLCd5inpKT4HRwlJSVs2rSJzMxMADIzM9m0aROlpaVHfc3bb7/NhAkTCAvT\nKnSHffr1AQBOiazCNIPXXWjUlGtorkgnd8yWx6WXXsrf/944W+oFF1yAzWY74n4fffTRMU/icrlI\nSkrC4XAA4HA4SExMxOVyERcX12z/hoYGli9fzuuvv+7HW+gc3B4v8/7yb3o6Son7v4WUM5nYH18V\nlFqM6nLCeqQG5dwiYg3HDI/HHnvM9/jZZ59t82IOW7VqFSkpKaSnpwfsnFZ34GAVABNHxMJ2qNu7\nJWi1aDoSETlmeIwYMcL3eOTIkSd8EqfTSWFhIYZh4HA4MAyDoqIinM4j31i2ZMkSrr766hM+X0d0\neHjugJ5dMbcHrw5NRyIi0IJrHvfccw9ffvllk+e+/PJLpk2bdtzXxsfHk56eTl5eHgB5eXmkp6cf\nscuqoKCAdevWMWHCBH9L6xTyCypw2G3EhDYEtQ6jphJNRyIifofHF198wbBhw5o8N3ToUNauXevX\n6+fMmcOiRYvIyMhg0aJFzJ07F4ApU6bw7bff+vZ75513uOiii4iJ0S+n/3TwUC09YiOgtrEFYgbp\n/hpvzeF7PPT5iHRmfg/VDQsLo7a2lujoH1aHq6mpISTEv0OkpaWxePHiZs/Pnz+/ydd33XWXvyV1\nKmVV9cR27YLx/S9vb11VUOrQDYIiAi0Ij1GjRjF79mweffRRoqOjqaqq4tFHH9V9GG3M9BrUu74j\nsmI3Pbt2wV2yHwBPRXFQLprX7Ws8p7qtRDo3v8MjOzubBx98kJEjRxITE0N5eTmjR49ucmOftL6q\nDas5uPwlrgcogbrGOQkxqg5xYOHDwSnK7iCka/PrVSLSefgdHjExMbz22msUFRVRUFCA0+kkISE4\nE/N1Jo0XqOF3VRfxozN7M25kH0LjnHgOFTRZUyWQQqJjsXcJ/Gy+ImIdLV5JKDExkYSEBEzT9E2K\naLdrKfS2YnoaR1dtbEjhRylnEHlK4139obGayVZEgsfv8CgsLOTRRx/lyy+/pKKiosm2zZs3t3ph\n0sg03IANAzsx0V2CXY6ICNCCobo5OTmEhoby+uuvExkZyTvvvMOYMWN8Q26lbZiGB9MeAthI6B74\nVQNFRI7E75bH+vXr+b//+z8iIyOx2WwMHDiQJ554ghtuuIHrrruuLWvs1ExPA4atcU6w3skaHisi\n1uB3y8Nut/vu6ejWrRulpaVERkZSWFjYZsUJmB4PHhz0iI0gOkLrZ4iINfjd8jjzzDP5+OOPufji\nixk1ahT3338/4eHhDBo0qC3r6/RMw02DYadPcnBWDRQRORK/w+OZZ57xja6aNWsWv//976muruaW\nW25ps+IEvO4G6gwbfdRlJSIW4ld4GIbBE0884ZuiPTw8nKlTp7ZpYdKotqYOj2mnj1MtDxGxDr+u\neTgcDtasWXPUxaCk7dTW1OLBoZaHiFiK3xfMb7nlFubNm4fb7W7LeuS/1NfV4TEdpCap5SEi1uH3\nNY9FixZRXFzMggULiIuLa9IKOd4ytHLiGurrsYeG0iXUEexSRER8/A6PQC5DKz8wGhoI66JWh4hY\ni9/hcTLL0MqJqXcbeA03XSJ0Z7mIWIvf4fHiiy8eddt9993XKsVIU/sKKwnBIDIyPNiliIg04Xd4\nFBQUNPn64MGDfPHFF4wbN67Vi5JG+QUVxGAQGa3pz0XEWvwOj6eeeqrZc6tXr+bdd99t1YLkB/mu\nSs6yeYmKUstDRKzlpBbiGDVqFKtWrfJr3127dnH99deTkZHB9ddfz+7du4+433vvvceECRPIzMxk\nwoQJFBcXn0yJ7Vp+QQVhdi/2kLBglyIi0oTfLY+9e/c2+bq2tpa8vDycTqdfr8/JyWHSpElkZWWx\nbNkyZs+ezcKFC5vs8+233/LSSy/xxhtvkJCQQGVlJWFhnfcXZ35BJSEOA1uIJkQUEWvxOzwuvvhi\nbDabb+nTiIgI0tPTefrpp4/72pKSEjZt2sSCBQsAyMzM5LHHHqO0tJS4uB/Wwn799de57bbbfMvb\ndu3aeYeoVtW6KS6rwRFnYHMoPETEWvwOjy1btpzwSVwuF0lJSTgcjTe6ORwOEhMTcblcTcJj586d\npKamMnnyZGpqarj44ou56667OuW0KHsKKgihcSJKtTxExGr8vuaxefNmXC5Xk+dcLtdJhcp/MwyD\nrVu3smDBAv7whz+wevVqli1b1mrHb0/yCyqJtNUD4IjovC0wEbEmv8PjwQcfxOPxNHnO7Xbz4IMP\nHve1TqeTwsJCDMMAGkOiqKio2fWSlJQUxo8fT1hYGNHR0YwdO5ZvvvnG3xI7lD2uCnqEN84j5oiM\nCXI1IiJN+R0eBw4coFevXk2e6927N/v37z/ua+Pj40lPTycvLw+AvLw80tPTm3RZQeO1kE8//RTT\nNHG73Xz++ecMHDjQ3xI7lN0FFaTFNXbXOaIUHiJiLX6HR3JyMhs3bmzy3MaNG0lMTPTr9XPmzGHR\nokVkZGSwaNEi5s6dC8CUKVP49ttvAbj88suJj4/nsssuY+LEifTv359rrrnG3xI7FFdxNc7oxmse\njihNxy4i1uL3BfNbb72VqVOncvvtt9O7d2/27NnD73//e+68806/Xp+WlsbixYubPT9//nzfY7vd\nzsyZM5k5c6a/ZXVIpmlSXlVPbMj31zyiYoNckYhIU36Hx3XXXUfXrl15++23KSgowOl0MmPGDMaP\nH9+W9XVK1XUePIZJV3s9NkdlIh0LAAAS1klEQVQotjBNjCgi1uJ3eACcffbZhIWFcejQIQCqqqp4\n++23O23XUlspr2pscURSgz0qplMOVRYRa/M7PFatWsWDDz5Inz592LFjB/3792f79u2cddZZCo9W\nVlbZGB5djGqNtBIRS/I7PF544QWefPJJLr30Us4++2yWLl3KkiVL2LFjR1vW1ykdbnmEuqtwxMYd\nZ28RkcBr0VDdSy+9tMlzV155JUuXLm31ojq7w+Fhq6/SMF0RsSS/wyM+Pt43w23Pnj1Zv349e/bs\nwev1tllxndXBslocdjBrK9RtJSKW5Hd4XHvttaxbtw5oHLZ78803k5WVxU9+8pM2K66z2lNQSb+E\nLmC41fIQEUvy+5rHHXfc4Xs8ceJERo4cSW1tLWlpaW1SWGe221XBMKcdXJqaRESsqUVDdf9TSkpK\na9Yh36ur91BYWkOf9Ijvw0N3l4uI9ZzUSoLS+kor6gCIj2hcN8UeHhXMckREjkjhYTFl34+0ig5r\nDA9baJdgliMickQKD4s5PEw3KqRxFJs9LDyY5YiIHJHCw2LKqhoACHc0rp2iloeIWJHCw2IOtzy6\n2BoXzlLLQ0SsSOFhMeWV9URHhGIzGlsganmIiBUpPCymrKqemOgwTHcdtpAwbDZ9RCJiPfrNZDE1\ndR6iIkIxG+qxqctKRCxK4WExdQ0ewsNC8LrrsavLSkQsSuFhMXX1BuFhIY3dVgoPEbGoE56epKV2\n7dpFdnY2ZWVlxMbGkpubS9++fZvsM2/ePP74xz+SmJgIwFlnnUVOTk6gSrSExpaHA29DHfZQdVuJ\niDUFLDxycnKYNGkSWVlZLFu2jNmzZ7Nw4cJm+02cOJEZM2YEqizLqWsw6BLmwKyvxxamloeIWFNA\nuq1KSkrYtGkTmZmZAGRmZrJp0yZKS0sDcfp2pb7BQ3iXELU8RMTSAhIeLpeLpKQkHA4HAA6Hg8TE\nRFwuV7N93333XSZMmMBtt93G+vXrA1GepdQ1GISHOTDdGm0lItYVsG4rf9xwww3ceeedhIaGsmbN\nGqZOncp7771H9+7dg11aQLg9XgyvSZcwB0ZdlWbUFRHLCkjLw+l0UlhYiGE0TrlhGAZFRUU4nc4m\n+yUkJBAaGgrAeeedh9PpZPv27YEo0RLqGxrnswoPteOtqdRCUCJiWQEJj/j4eNLT08nLywMgLy+P\n9PR04uLimuxXWFjoe7x582b2799Pv379AlGiJdQ1NIZrlK0OMLUErYhYVsC6rebMmUN2djavvPIK\n3bp1Izc3F4ApU6Ywbdo0Bg8ezPPPP8/GjRux2+2EhobyzDPPkJCQEKgSg67u+5ZHhFkLoPAQEcsK\nWHikpaWxePHiZs/Pnz/f9/hwoHRWdfWNLY9wbw2g9ctFxLp0h7mFHG55dDGqAbU8RMS6FB4Wcvia\nR5jn+/BQy0NELErhYSH17sbwCPE2LghlD48MZjkiIkel8LAQ9/fh4cAAmx2b3RHkikREjkzhYSH1\nbi8Adq+BLSQ0yNWIiBydwsNCGnwtD4/CQ0QsTeFhIYfDw24a2BwKDxGxLoWHhRwOD5vXrfAQEUtT\neFhIg8dLiMMGhrqtRMTaFB4W0uA2CAt1YBpqeYiItSk8LKT+cHh43Gp5iIilKTwspEnLQ+EhIham\n8LCQBreXLqH2xpaHuq1ExMIUHhbS4FG3lYi0DwoPC2lwG4SF6IK5iFifwsNCGtxewg53W6nlISIW\npvCwkHoN1RWRdkLhYSE/jLbSTYIiYm0KDwtp8HjpEurA9DRgcwRshWARkRZTeFhIXb3n+/DwYAsJ\nC3Y5IiJHFbDw2LVrF9dffz0ZGRlcf/317N69+6j7fvfdd5x55pnk5uYGqrygq6huoKK6AWd8BHg9\nuuYhIpYWsPDIyclh0qRJvP/++0yaNInZs2cfcT/DMMjJyWHcuHGBKs0S9hRUANAnsXHpWV3zEBEr\nC0h4lJSUsGnTJjIzMwHIzMxk06ZNlJaWNtv3tdde48ILL6Rv376BKM0y8gsqAeidEA4oPETE2gIS\nHi6Xi6SkJByOxjW5HQ4HiYmJuFyuJvtt2bKFTz/9lFtvvTUQZVlKvquCqIhQYrqYANhCuwS5IhGR\no7PMkB63283//M//8NRTT/lCpjPJL6igT3JXvDWN3VeOqNggVyQicnQBCQ+n00lhYSGGYeBwODAM\ng6KiIpxOp2+fgwcPsmfPHu644w4AKioqME2TqqoqHnvssUCUGTSmaZJfUMnooT0xqssBcETFBLkq\nEZGjC0h4xMfHk56eTl5eHllZWeTl5ZGenk5cXJxvn5SUFNauXev7et68edTU1DBjxoxAlBhUpRV1\nVNe66ePshlGzGwBHZLfgFiUicgwBG201Z84cFi1aREZGBosWLWLu3LkATJkyhW+//TZQZVjSbtf3\nI62Su6rlISLtQsCueaSlpbF48eJmz8+fP/+I+997771tXZJl5Lu+H2mV3A337nJsIWHYQsODXJWI\nyNFZ5oJ5MJVV1rPi8914PN6gnH/dlkLiunWhW1QYRdUVOKJisNlsQalFRMQfCg8aRzotXrUNjxGc\n8AAYe3ZvALy1ldgjugatDhERfyg8gDNPTWBJ7oRglwGAt6EOu+7xEBGL08SIFmO667CF6XqHiFib\nwsNivO567LpYLiIWp/CwGLOhDluYuq1ExNoUHhajloeItAcKD4sx3fWaFFFELE/hYSGm18D0NKjl\nISKWp/CwENPdAKBrHiJieQoPC/G66wB0n4eIWJ7Cw0JMdz2A5rUSEctTeFiIt6Gx5aFuKxGxOoWH\nhZi+biu1PETE2hQeFmFUl1O9tXExLA3VFRGrU3hYRPm/8ij//G9gsxPSrUewyxEROSbNqmsRnqpD\nOKLjSP35CzjCo4JdjojIManlYRFGdTmO6O4KDhFpFxQeFuGtKccR1S3YZYiI+CVg3Va7du0iOzub\nsrIyYmNjyc3NpW/fvk32WbJkCa+//jp2ux2v18u1117LzTffHKgSg8pTXU5EQu9glyEi4peAhUdO\nTg6TJk0iKyuLZcuWMXv2bBYuXNhkn4yMDK666ipsNhtVVVVMmDCBkSNHMnDgwECVGRSmaeKtLscR\nFRPsUkRE/BKQ8CgpKWHTpk0sWLAAgMzMTB577DFKS0uJi4vz7RcdHe17XFdXh9vtxmaztXl9puGm\nZsd6TK+nzc91xPN73JiGG0ekwkNE2oeAhIfL5SIpKQmHwwGAw+EgMTERl8vVJDwAPvjgA55//nn2\n7NnDL3/5SwYMGNDm9dVsX0fhkmfb/DzHE9o9KdgliIj4xXJDdceOHcvYsWM5cOAAd999N6NHj+aU\nU05p03NGDTyHXnfNwzSC0/IAsDlCCemeHLTzi4i0REDCw+l0UlhYiGEYOBwODMOgqKgIp9N51Nek\npKQwePBgPvroozYPD4DQuJQ2P4eISEcRkKG68fHxpKenk5eXB0BeXh7p6enNuqx27tzpe1xaWsra\ntWs57bTTAlGiiIi0QMC6rebMmUN2djavvPIK3bp1Izc3F4ApU6Ywbdo0Bg8ezJ///GfWrFlDSEgI\npmly4403MmrUqECVKCIifgpYeKSlpbF48eJmz8+fP9/3eNasWYEqR0REToLuMBcRkRZTeIiISIsp\nPEREpMUsd5/HiTIMA4CCgoIgVyIi0n4c/p15+HeovzpMeBw8eBCAyZMnB7kSEZH25+DBg/Tp08fv\n/W2maZptWE/A1NXVsWHDBhISEnzToIiIyLEZhsHBgwcZNGgQ4eHhfr+uw4SHiIgEji6Yi4hIiyk8\nRESkxRQeIiLSYgoPERFpMYWHiIi0mMJDRERaTOEhIiItpvAAdu3axfXXX09GRgbXX389u3fvDnZJ\nJy03N5cxY8YwYMAAtm3b5nv+WO+1vX8fDh06xJQpU8jIyGDChAncc889lJaWAvDvf/+bK664goyM\nDG677TZKSkp8rzvWtvZg6tSpXHHFFUycOJFJkyaxefNmoGN/1gAvvfRSk5/vjvwZA4wZM4bx48eT\nlZVFVlYWn3zyCRDE922KedNNN5lLly41TdM0ly5dat50001BrujkffHFF+aBAwfMiy66yNy6davv\n+WO91/b+fTh06JD5+eef+75++umnzZkzZ5qGYZjjxo0zv/jiC9M0TfPll182s7OzTdM0j7mtvaio\nqPA9/sc//mFOnDjRNM2O/Vlv2LDB/NnPfub7+e7on7Fpms3+L5vmsd9bW7/vTh8excXF5vDhw02P\nx2Oapml6PB5z+PDhZklJSZArax3/+QN3rPfaEb8PK1asMG+55Rbz66+/Ni+//HLf8yUlJebQoUNN\n0zSPua09euedd8wrr7yyQ3/W9fX15nXXXWfu3bvX9/PdGT7jI4VHMN93h5kY8US5XC6SkpJ882E5\nHA4SExNxuVzN1lhv7471Xk3T7FDfB6/Xy5/+9CfGjBmDy+UiJSXFty0uLg6v10tZWdkxt8XGxgaj\n9BPy8MMPs2bNGkzT5Le//W2H/qxffPFFrrjiClJTU33PdYbPGGD69OmYpsnw4cP5xS9+EdT3rWse\n0iE99thjREZGcuONNwa7lIB44okn+Oijj3jggQd45plngl1Om1m/fj0bNmxg0qRJwS4l4N58803+\n9re/sWTJEkzT5NFHHw1qPZ0+PJxOJ4WFhb657A3DoKioCKfTGeTKWt+x3mtH+j7k5uaSn5/PCy+8\ngN1ux+l0cuDAAd/20tJS7HY7sbGxx9zWHk2cOJG1a9eSnJzcIT/rL774gp07dzJ27FjGjBlDQUEB\nP/vZz8jPz+/wn/HhzycsLIxJkybx1VdfBfVnu9OHR3x8POnp6eTl5QGQl5dHenp6u2i+t9Sx3mtH\n+T48//zzbNiwgZdffpmwsDAABg0aRF1dHV9++SUAb731FuPHjz/utvaguroal8vl+/rDDz8kJiam\nw37Wd9xxB59++ikffvghH374IcnJyfzud7/j9ttv77CfMUBNTQ2VlZUAmKbJe++9R3p6elB/tjUl\nO7Bz506ys7OpqKigW7du5ObmcsoppwS7rJPy+OOPs3LlSoqLi+nevTuxsbG8++67x3yv7f37sH37\ndjIzM+nbt69vXYLU1FRefvllvvrqK3Jycqivr6dnz548++yz9OjRA+CY26yuuLiYqVOnUltbi91u\nJyYmhhkzZnDGGWd06M/6sDFjxvDqq69y2mmnddjPGGDv3r3ce++9GIaB1+slLS2NRx55hMTExKC9\nb4WHiIi0WKfvthIRkZZTeIiISIspPEREpMUUHiIi0mIKDxERaTGFh4iItJjCQ0REWkzhISIiLabw\nEPFDYWEh9957L+eccw5jxoxh4cKFAMybN49p06Zx//33M2zYMK688kq2bNnie93OnTu56aabGDFi\nBJdffjkffPCBb1tdXR1PP/00F110EcOHD+cnP/kJdXV1AEybNo3zzjuP4cOHM3nyZLZv3x7YNyxy\nHAoPkePwer3cddddDBgwgNWrV/PGG2/wxhtv+FZy++CDDxg/fjz/+te/yMzMZOrUqbjdbtxuN3fe\neSfnnXce//znP3nkkUeYPn063333HdA4gePGjRt56623+Ne//sWDDz6I3d74X3L06NG8//77fPbZ\nZ5x++ulMnz49aO9f5Eg0PYnIcXz99dfcd999fPTRR77n/vd//5fdu3eTkpLCJ598wl/+8hegMWhG\njx7NCy+8AMB9993HJ5984guFX/ziF/Tr14+7776boUOH8pe//IWBAwce8/wVFRWcffbZfPnll3Tt\n2rVt3qRIC3X6xaBEjmf//v0UFRUxYsQI33OGYTBixAhSUlJITk72PW+320lKSqKoqAiA5ORkX3AA\npKSkUFhYyKFDh6ivr6dXr17NzmcYBv/v//0/VqxY4ZtGGxrXaFd4iFUoPESOw+l0kpqaysqVK5tt\nmzdvHgUFBb6vvV4vhYWFJCYmAlBQUIDX6/UFgMvlom/fvnTv3p0uXbqwd+/eZi2P5cuX88EHH7Bg\nwQJSU1OprKzk7LPPRp0EYiW65iFyHEOGDCEqKorXXnuNuro6DMNg27ZtfPPNNwBs3LiRlStX4vF4\neOONNwgLC+PMM89kyJAhhIeH89vf/ha3283atWv58MMPueyyy7Db7Vx99dU89dRTvoWZ1q9fT0ND\nA9XV1YSFhdG9e3dqa2t5/vnng/wdEGlO1zxE/FBYWEhubi5r166loaGBfv36cf/997Nu3Tq2b9+O\n3W7n448/pk+fPjzxxBOcccYZQOMaI3PnzmXz5s0kJSXxwAMPcPHFFwONo61+9atfsWLFCmpqahg4\ncCC/+93vMAyD6dOn8/nnnxMbG8t9993HjBkzWLlyJX369Anmt0HER+EhchLmzZtHfn4+zz33XLBL\nEQkodVuJiEiLKTxERKTF1G0lIiItppaHiIi0mMJDRERaTOEhIiItpvAQEZEWU3iIiEiLKTxERKTF\n/j9k6oeLFqoR1AAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"tags": []
}
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY8AAAESCAYAAAAFYll6AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzs3Xd4VGXexvHvlEx6Ib2RRkghEFpo\nCggJCCvBgLq4C4urKK4VxGVXVFZAscS2KopdLGBDBSQiBIPShEAwEkIJEEIJ6b1MymTmvH9Es/Kq\nkGAyk/L7XFeuC2bOzNxnJszNOc8551EpiqIghBBCtIHa0gGEEEJ0PVIeQggh2kzKQwghRJtJeQgh\nhGgzKQ8hhBBtJuUhhBCizaQ8hBBCtJmUhxBCiDaT8hBCCNFmUh5CCCHaTMpDCCFEm2ktHaC91NfX\nk5mZiYeHBxqNxtJxhBCiSzAajRQXF9O/f39sbGxa/TizlEdiYiJbtmzh/PnzbNy4kbCwsN9cbtOm\nTbz66qsoioJKpWLVqlW4u7u36jUyMzOZNWtWe8YWQogeY82aNcTExLR6ebOUR1xcHDfddNNFv9wP\nHTrEyy+/zHvvvYeHhwfV1dXodLpWv4aHhwfQ/AZ4e3v/4cxCCNETFBQUMGvWrJbv0NYyS3m0ps3e\nffdd5syZ07ICjo6ObXqNn3dVeXt74+/v3/aQQgjRg7V1d3+nGfPIzs7G39+fWbNmodfrmThxInfe\neScqlcrS0YQQQvw/naY8jEYjWVlZrFq1isbGRm677TZ8fX2ZNm2apaMJIYT4fzpNefj6+jJ58mR0\nOh06nY64uDgyMjLapTxMJhO5ubnU1ta2Q1JxMfb29vj7+6NWy1HgQnRnnaY84uPj2b59OwkJCTQ1\nNbF3714mTZrULs9dUlKCSqUiPDxcvtQ6kMlk4vz585SUlODp6WnpOEKIDmSWb9Lly5czduxYCgoK\nuOWWW5gyZQoAc+fO5dChQwBMmTIFNzc3rrnmGqZNm0ZoaCg33HBDu7x+RUUFXl5eUhwdTK1W4+Xl\nRWVlpaWjCCE6mEpRFMXSIdpDbm4ucXFxpKSk/Opoq6NHjxIRESGD72agKArHjh0jMjLS0lGEEK1w\nse/Oi+kx/xW/WHEY6/U0Fp9DMZnMmKh7koIWomfoMeVxcQpKUwOKocEsr7ZixQoaGxvb/LjCwkJm\nz57dAYmEEKJtpDwAtVXz9VxMjXVmeb2XX34Zg8Hwq9ubmpou+jgvLy8++OCDjoolhBCt1mmOtrIk\nlUaDSqvD1Fjf4a+1bNkyAP7yl7+gVqvx8/OjV69e5OTkUFtby4YNGzh48CDPPvtsy6HF8+bNY9y4\nceTm5nL99deTmpoKQHh4OAsWLGDr1q1UVFTw73//u+UItR07dvD8889jNBpxdXXl0UcfJTAwsMPX\nTwjRM/S48tiWdpat+87+6nalyYBiMqK2OgWXudt+4vAAYmMCLrrMkiVL+PDDD/n444+xt7dn0aJF\nHD16lNWrV2NnZ0dVVRVLlizhjTfewNPTk6KiIm644QaSkpJ+8/kcHBz4/PPPOXDgAPfddx+TJk2i\ntLSUf//736xevZrQ0FDWrl3LwoULWbt27eWtmBBC/D89rjx+l0oNShMopuY/m9HkyZOxs7MDID09\nndzcXObOnfu/aCoVZ86coVevXr967DXXXAPAoEGDKCoqoqGhgYMHDxIREUFoaCgA119/PcuWLaOm\npgYHBwczrJEQorvrceURG/PbWweK0UBj0Rk0Tu5o7V3Mmunn4oDmQ13Dw8NZs2bNr5bLzc391W3W\n1tbA/y5qdqlxEyGEaA8yYP4TlcYKNFYoZhj3sLe3p6am5jfvGzx4MGfOnGHv3r0tt2VkZNCW03EG\nDRrEsWPHyM7OBmDdunX069dPtjqEEO2mx215XIxaZ4Opoa5lMqqOMmfOHG666SZsbGzw8/O74D5n\nZ2dWrlzJM888wxNPPIHBYKB379689tprrX5+V1dXnn76aRYuXEhTUxOurq4888wz7b0aQogerMec\nYd6aM56NtZU0VRVj5RGAWtv6iajEhVr7fgshLE/OMG8HKp0tgFl2XQkhRFcm5fELKq0VqLWYGsxz\nsqAQQnRVUh6/oFKpUOtsMTXWtWmAWgghehopj5+YTM1loba2BVMTSlPbrz0lhBA9hZQHUKNv5HR+\nFUaTCbV18zkXsutKCCF+n5QHoNGoMZoU6uqbUGm0qDQ6TA16S8cSQohOS8oDsNFp0KhV1NY3n52t\nsrZFMdSjKDK/hxBC/BYpD5oHyu1stOjrDSiK0rzrSjGhNJpnfo9LmT17Nt9++y0AL774Ips2bfrN\n5VasWEFiYqI5owkheig5w/wndjZWVOsNNBiMWOtsABWmRn3zAHonMn/+fEtHEEII85VHYmIiW7Zs\n4fz582zcuJGwsLDfXfbUqVNMnz6dmTNn8sADD7RrjuqM76g+uO1XtyuKgrrRSKFWjZVGjemnWQXV\nVtatfm7HgbE4Ro+76DIrV66koqKChx56CIDy8nImT55MYmIir776Kg0NDRiNRu644w6mTJnyq8cv\nWrSI/v3787e//Y3q6moefvhhjh8/joeHB97e3ri7uwOwZ88eXnjhhd98vsLCQpYvX87p06cBiI+P\n5x//+AcbN27k/fffb5mo6oEHHmDUqFFA8/W1Hn/8cfR6PXZ2djz88MNER0e3+r0RQnQvZiuPuLg4\nbrrpJmbNmnXR5YxGI0uWLGHChAlmStZMpVKhVoHJaAKNGpVajWJsAkWBdrzO1bRp05gxYwb//ve/\n0Wq1JCUlERsby+DBg/nwww/RaDSUlJRw3XXXMXr0aJydnX/3uV555RXs7e3ZvHkzZWVlXHfddfzp\nT38CoF+/fr/7fAsXLuSqq65ixYoVAJSVlQEwevRo4uPjUalUnDp1iptvvpkdO3bQ2NjIvHnzePLJ\nJxk1ahTff/898+bNIzk5GZ1OLuMiRE9ktvKIiYlp1XJvvPEG48aNQ6/Xo9e3/xFPjtHjfnfroLSy\nnrKqenr7OqE2GTCUnEPr7InGzqndXt/X15fQ0FC2b99OXFwc69at48EHH6SsrIyHHnqIM2fOoNFo\nqKysJCcnh0GDBv3uc6WmprJ48WKg+WKIEydObLnv956vb9++pKens2rVqpZlXV1dATh37hz//Oc/\nKSwsRKvVUlJSQnFxMWVlZVhZWbVshVxxxRVYWVmRk5NDeHh4u703Qoiuo1MNmB87doxdu3Zx8803\nW+T17W2bu1Rf34RKq2u+VEl9+xfY9OnTWb9+PVlZWVRXVxMTE8PSpUsZPnw4GzduZMOGDXh7e9PQ\ncPkD9pfzfPfffz8zZ87kq6++Yt26dWg0mj+UQQjRfXWa8jAYDPznP/9h2bJlLRMbmZu1lQatRkVt\nnaF5N5aNPaZGfbsfsnv11Vezf/9+Vq1axfTp01GpVFRXV+Pn54dKpWL37t2cOXPmks8zcuRIvvji\nC6B57OSbb75pue/3ns/e3p7Bgwfz7rvvtiz7826r6urqlqtqfv755zQ2Np9lHxwcjMFgaJljZM+e\nPTQ1NREcHPzH3wwhRJfUaY62Ki4u5uzZs9x+++0AVFVVoSgKNTU1PPbYY2bJoFKpsLe1orq2EZOp\n+ZBdk74SU0MdGhv7dnsdW1tb4uLi+OKLL0hJSQHgn//8J8uWLWPFihUMGDCgVbuD7rrrLh566CEm\nT56Mh4fHBbsGL/Z8zz77LMuWLSM+Ph61Wk18fDy33347Dz74IHfddRfOzs6MGTMGF5fmGRV1Oh0v\nvfTSBQPmL774oox3CNGDmX0+j9jYWF577bWLHm0Fzecs6PX6Vh9t1R7zeQDo6w2cL67Fx80Oe1st\njYWnUds6YuXs0arHC5nPQ4iupNPP57F8+XLGjh1LQUEBt9xyS8tho3PnzuXQoUPminFJttZaNGoV\nNXUGVCo1ap0dpvpaucquEEL8gtl2Wy1evLjlyKBfevPNN39z+XvvvbejI/2mn3dd1egbMSkKahs7\nTA01KIZGVLrWn/MhhBDdWacZMO9obdlycLC1wqRAXX3TT1fZVWFqqOm4cN2IbKEJ0TP0iPLQaDQt\nZ023hq1N866ran1j81V2dbYY62rki7EVDAYDWm2nOQ5DCNFBekR5uLi4UFhYiMnUukNu1SoVDnZW\n1NQZMJpMaGwdwGhAMcg5DxdjMpkoLCy86FnxQojuoUf8F9Hd3Z3c3FyysrJa/RhDk4ny6nqqS3RY\n69QYq0pRFZSjsXHowKRdn729fcv1tYQQ3VePKA+1Wk1AQECbHqMoCrc/+Q3ervY8dscVFHz2NPXn\njhI47w1UGqsOSiqEEF1Dj9htdTlUKhXjhvTm4MliSivrcBo8EZO+itrj+y0dTQghLE7K4yJiY3qj\nKPDNvrPYBkejdfagOn2rpWMJIYTFSXlchI+7PQP7upOcegYFNY4D46jLycBQXmDpaEIIYVFSHpcw\naWQQReV1/Hi8GMeBsaBSU/1jiqVjCSGERUl5XMLI/t442evYknoarZMbdqFDqT64DaWp9eeNCCFE\ndyPlcQlWWg1xwwJIzSygqFyP07A/YaytoDrjW0tHE0IIi5HyaIWpo0MA2LA9G9ugaKx9+1Lx/brm\naWqFEKIHkvJoBY9etlw1xJ8tqWeoqm2k1+g/01RZRE3mDktHE0IIi5DyaKUbYvtiMBhZm3IC29Ah\n6LxDKN/9uWx9CCF6JCmPVurt5UjcsAC+2p1DcXkdrmP/QlN5AdU/fnPpBwshRDcj5dEGf706ApUK\nPkw+hm3oEGx6R1K+cy2mxnpLRxNCCLOS8mgDj162TLkymG/TznG2sBrX2NkYayuo3Jdk6WhCCGFW\nUh5t9Oe4MGyttbzz5WGs/cKwCxtGxd4NGPXVlo4mhBBmI+XRRk72OmZNjuSHrCJ2/nge13GzUBrr\nqfj+c0tHE0IIszFbeSQmJhIbG0t4eDjHjx//zWVeeeUVpkyZwtSpU7nuuuvYuXOnueK1yTVXBtO3\ntwtvrs+k0d4LhwHjqEz7mqbKYktHE0IIszBbecTFxbFmzRr8/Px+d5no6Gg+++wzNm7cyBNPPMGC\nBQuor+98g9EatYp7/jyIKn0jb27IxPWqG1GhomzHJ5aOJoQQZmG28oiJicHHx+eiy4wZMwZbW1sA\nwsPDURSFiooKc8RrsxA/Z26I7cu2tHMcOGvAadifqMn4jsais5aOJoQQHa7TjnmsX7+egIAAvL29\nLR3ld/1lYjghvs68vPYg6oHxqK1tKftujaVjCSFEh+uU5bFv3z5efPFFnnvuOUtHuSgrrZoFM4dQ\nU9fIa19l4zxqGvoTadSdOWzpaEII0aE6XXmkp6fzr3/9i1deeYWQkBBLx7mkIB8nZk6K4PuMfDJ0\ng9E6e1Ly9etyyXYhRLfWqcojIyODBQsW8NJLLxEVFWXpOK123fi+RAT24tUNx9CN/TuG0vOUf/+F\npWMJIUSHMVt5LF++nLFjx1JQUMAtt9zClClTAJg7dy6HDh0CYNmyZdTX1/PII4+QkJBAQkICWVlZ\n5op42TRqFQv+OgRDk4lXUxXso0ZTsfsLGgpOWTqaEEJ0CJWiKIqlQ7SH3Nxc4uLiSElJwd/f3yIZ\nknad4vV1h5iXEErYjy+g0urwv/UZ1NZ2FskjhBCXcrnfnZ1qt1VXd80VwUSHuvPWltNYT7ibpooi\nir9aSTfpZyGEaCHl0Y7UahX3zhiE0aTw2u46eo37K7VH91B1YLOlowkhRLuS8mhn3m723HRNJAeO\nFfGDdgh2oUMp3fou9edPWDqaEEK0GymPDhB/ZQj9gl1588sjaMbNRevoStEXz2LUV1k6mhBCtAsp\njw6gVquYf+NgDE0mXvjiOO7T76eptoLipFdk/EMI0S1IeXQQXw8H7pg+gIyTJXx5xIRb7Gz0J9Ko\nSvva0tGEEOIPk/LoQBOGBzBuiD8fbTnGWdcR2IUOpSzlfRoKT1s6mhBC/CFSHh1IpVJx5/XR+Ljb\n8+yaA1iNuw21jT1F6/+LydBg6XhCCHHZpDw6mJ2NFYv+Ppyauiae+ew4blPvxVBynpLNb8r4hxCi\ny5LyMIMgHyfu/fNADp8q5ZPDWlxGX09NxrdU/5Bs6WhCCHFZpDzMZNzQ3sSPDmb99mwOO47Gts9g\nSpLfof78b0/JK4QQnZmUhxnNmdqfyCBXXlp7kIaRc9A6uVL4+TM01XTO2RKFEOL3SHmYkZVWzQM3\nxWBrrWXZB5nYTJqPqa6GonXPo5iMlo4nhBCtJuVhZm7Otiy5bSS1dQYeW5+Pw4TbqD97mLJtH1g6\nmhBCtJqUhwX08XfhoZuHkVtUzX9TbXAYMonK1I3UHNlt6WhCCNEqUh4WMijMk/l/GcKh7BLeLYrG\n2i+c4qSVNBaftXQ0IYS4JCkPCxo3xJ85U6PYmVHIVtspqHU2FH72NKb6WktHE0KIi5LysLDp40KZ\ndlUfPt9XwuGAGRgqiij6cgWKYrJ0NCGE+F1SHp3ALfFRjBviz6u76ykOnYr+xH4qdn1u6VhCCPG7\npDw6AbVaxbwbBzMk3JMnUh2p9x9G+Y5P0J84YOloQgjxm8xSHomJicTGxhIeHs7x4799RrXRaGTZ\nsmVMmDCBiRMnsnbtWnNE6zSstGoW/X0Yffx78eiRcEy9elO04QUMZXmWjiaEEL9ilvKIi4tjzZo1\n+Pn5/e4yGzdu5OzZsyQnJ/PJJ5+wYsUKcnNzzRGv07C11rLktpE4uzjyXP5ITKgpWJuIqbHO0tGE\nEOICZimPmJgYfHx8LrrMpk2b+POf/4xarcbV1ZUJEyawefNmc8TrVJwdrFl2+yhqNc6s1o/FUHqe\n4o0yA6EQonPpNGMe+fn5+Pr6tvzdx8eHgoICCyayHG83e5bdPorDDd58pxpF7bE9VO7dYOlYQgjR\notOUh7hQsK8zi28ZwcbyvpzUhlH27Wr0pw5aOpYQQgCdqDx8fHzIy/vf4HB+fj7e3t4WTGR5A0Ld\nWTgrhjeKh1Kucado3fMYKgotHUsIITpPeUyePJm1a9diMpkoKyvjm2++YdKkSZaOZXFXDvTl5mlD\nWFEymobGJgrXPi1T2AohLM4s5bF8+XLGjh1LQUEBt9xyC1OmTAFg7ty5HDp0CICEhAT8/f25+uqr\nmTFjBnfffTe9e/c2R7xOL350CGPGDOKtyitpKDpDyabXZABdCGFRKqWbfAvl5uYSFxdHSkoK/v7+\nlo7T7kwmhcQP9mN/Ipl423Rc4/6Oy8hrLR1LCNHFXe5352VteSiKgslkavkRHU+tVnH/zKGc9RjD\nQUMQpSnvU3sizdKxhBA9VKvLo7CwkHvuuYcRI0bQr18/oqKiWn6EeVhbafjPrSPZajWBPJMbhev+\nS2ORXMJdCGF+rS6PJUuWoNVqeffdd7Gzs2PdunXExsaybNmyjswn/h9nB2sevn0MqxsnUGNQk/fJ\nExhrKy0dSwjRw7S6PNLT03niiSeIjIxEpVIRERHB448/zjvvvNOR+cRv8PNwYN4tsbxdE0tjVRn5\nnz2N0mSwdCwhRA/S6vJQq9VotVoAnJycKCsrw87OjsJCOe/AEqJC3LhhxtWsrrmCxtxjFH/9uhyB\nJYQwG21rFxw4cCDbt29n4sSJjB49mvvuuw8bGxv69+/fkfnERYwZ7EdR+RQ2b6tkcsa36Dx64zIy\nwdKxhBA9QKu3PJ5++mmGDRsGwEMPPcSIESPo27cvzz33XIeFE5d23fhQGJjAj40BlKZ8QO3x/ZaO\nJIToAVq95eHk5NTyZxsbG+6+++4OCSTaRqVS8Y/ronmqYjqueavhi+fwm/0oNn5hlo4mhOjGLloe\nL774YqueZP78+e0SRlwejUbN/TddwaMv13Cd/lP46HEC5jyJlavvpR8shBCX4aLl8ctLojc0NJCc\nnEz//v3x8/MjLy+PQ4cOcfXVV3d4SHFpttZaFt42nidequXm+vXkrnmM3rc8idbBxdLRhBDd0EXL\n48knn2z584IFC3juuecuuFhhcnJyj5ywqbNyc7Zl3m2TWLGyhrlVm8n7+HH8b3oUtc7W0tGEEN1M\nqwfMd+zYwYQJEy64LTY2lu3bt7d7KHH5An2cmD07nvdqxtJYmEPB58+hGJssHUsI0c20ujwCAwNZ\ns2bNBbd99NFHBAQEtHso8ccMDPMgdvpU1taOoP5UupwDIoRod60+2mr58uXcc889vPXWW3h5eVFY\nWIhWq2XFihUdmU9cpgnDAygsm8qWXbVMOrgNrZM7rmNvtHQsIUQ30eryiIiIYMuWLRw8eJCioiI8\nPDwYNGgQVlZWHZlP/AEzJ4XzQtlUUo/pGbHzUzR2zjjHTLZ0LCFEN9Cq8jAajQwePJi0tDRiYmI6\nOpNoJyqVintmDGbZG7U4FH1Ovy1vobGxx6H/GEtHE0J0ca0a89BoNAQFBVFeXt7ReUQ7s9KqWXTL\nKLbaTiHH6EXRly/JPCBCiD+s1QPmU6dO5Y477mDdunXs2bPngh/RuTnYWrF47mg+MU1qngfk82ep\nO3PY0rGEEF1Yq8c8PvroI4BfDZCrVCpSUlLaN5Vod56udjxw61geXdnIPMfNqD59Et9ZS7H2DbV0\nNCFEF9Tq8ti2bdsfeqGcnBwWLVpERUUFLi4uJCYmEhQUdMEypaWlPPjgg+Tn59PU1MSIESNYvHhx\ny6XgxR8T2tuFe2aP5qVVjSx03Ur+x8vxvWk5OvfuN+e7EKJjtWkOc4PBQFpaGps2bQJAr9ej1+tb\n9dglS5Ywc+ZMtmzZwsyZM3nkkUd+tcxrr71Gnz592LhxI19++SWHDx8mOTm5LRHFJQzv582N00bx\n37JY6hpN5H+4DENFkaVjCSG6mFaXR1ZWFpMmTWLx4sU8/PDDAOzfv5+HHnroko8tLS3lyJEjxMfH\nAxAfH8+RI0coKyu7YDmVSkVtbS0mk4nGxkYMBgNeXl5tWR/RClOuDGbM2CG8UBZLg76O/A+X0VQj\nB0MIIVqv1eWxdOlS5s2bx+bNm1t2Iw0bNowDBw5c8rH5+fl4eXmh0WiA5qO3PD09yc/Pv2C5u+66\ni5ycHEaPHt3yM3To0Lasj2ilm6f0IzgqipfLx9FYVUbBR49hrKuxdCwhRBfR6vI4efIkCQnNs9Sp\nVCoA7OzsaGhoaLcwmzdvJjw8nF27drFjxw7S0tLkwosdRK1WsWDmEKz9wnirahwNJecp+OQJTI31\nlo4mhOgCWl0efn5+ZGZmXnBbRkZGq65t5ePjQ2FhIUajEWg+6bCoqAgfH58Lllu9ejXXXnstarUa\nR0dHYmNjSU1NbW1E0UbWVhoWzxlBmWMfPm64ioa84xSufQpTU6OlowkhOrlWl8f8+fP5xz/+wUsv\nvURjYyOvv/468+bN47777rvkY93c3IiMjCQpKQmApKQkIiMjcXV1vWA5f39/duzYAUBjYyN79uyh\nb9++bVkf0UbODtYsvW0kh43BJDGeutOZFH3+LIrRYOloQohOrNXlMX78eN5++23KysoYMWIEeXl5\nvPzyy4wePbpVj1+6dCmrV69m0qRJrF69mmXLlgEwd+5cDh06BDTPjX7gwAGmTp3KtGnTCAoKYsaM\nGZexWqItfD0c+M+cEXxXGcB2m/HoTx6gaP0LKCajpaMJITqpVp9A0djYSHJyMrt376aoqAgvLy96\n9epF3759sba2vuTj+/Tpw9q1a391+5tvvtny54CAAFatWtXaSKIdRQa7cv/MISS+b8IpJI7Bx1Io\n3vgyHlPvQaXWWDqeEKKTaXV5LF26lJycHBYvXtwyDe1rr71GYWHhBTMOiq5r9EA/iuLrWJUELv3i\nCM5MQaXV4X7NHS0HSQghBLShPFJSUti6dStOTk4AhIaGEh0dLXOYdzPTx/WhoKyWF76HRwbHwY/f\noLLS4TZxjhSIEKJFq8c83N3dqauru+C2hoYGPDw82j2UsByVSsU/pg0gJtKL5T/6Utcnlqr9myj/\nbo3MRiiEaNHqLY+EhARuu+02Zs+ejZeXFwUFBaxZs4aEhIQLrqw7atSoDgkqzEejUfPv2TE8uHIX\nSw9qeGrIVVR8vw6VlQ29Rt9g6XhCiE5ApbTyv5OxsbGXfjILXmE3NzeXuLg4UlJS8PeXC/21h7Kq\neha+tANjk5HHoo7SdHw3rhP+jsuIay0dTQjRTi73u9NsV9UVXY+rkw1LbhvJAyt28nRONA+GGSn7\n5j3UWh1OQ2U6WyF6sjZdVVf0PIHeTjx483DOl+h5rXgENqFDKdn8JtUZ31o6mhDCgqQ8xCUN7OvB\nvTMG8WN2OWubJmATHE1x0kpqjuy2dDQhhIXILEuiVWJjAigqr2PN5mO4jL2Wif4Gija8iEqrwz5s\nmKXjCSHMTLY8RKvdOCGMKVcG89mOs+zzuRFr7xAKv3gWfXa6paMJIcxMykO0mkql4vZpAxgzyI+3\nN58iK+xmdO69KVj7FDVHv7d0PCGEGUl5iDZRq1Us+OsQBod58OL64+QNuRMb374UffE8lWky94oQ\nPYWUh2gzK62aB28eTqi/M4mfHKFo2B3Y9R1K6ZY3KfvuIzkTXYgeQMpDXBZbay1LbhuFr4cDj733\nI4UDb8FxYBwVuz+jaMMLMqGUEN2clIe4bE72OpbfcQU+bnY8uiqNgvA/4zp+FrWHd5G/ZinG2kpL\nRxRCdBApD/GHODtYs/yOK/FytWPZO6mcchuN53ULaSzI4fyqRTQWn7N0RCFEB5DyEH+Yi6M1j995\nBb7u9jz29l7S6wPw+dujKE2NnH/vIWqz9lk6ohCinUl5iHbRy9GGJ+4aTXigK8+sSSPltBa/W57C\nqpcPhZ8lUvbtGpnWVohuRMpDtBsHWyuW3T6KYZHevPZFBu9tL8R79mM4DppAxfdfUPDx4xj1VZaO\nKYRoB2Yrj5ycHG688UYmTZrEjTfeyOnTp39zuU2bNjF16lTi4+OZOnUqJSUl5ooo2oG1lYaHbh7G\nlCuDWb89m8ffT8c+7jbcr7mT+rNHyH3zn+hzDlo6phDiDzJbeSxZsoSZM2eyZcsWZs6cySOPPPKr\nZQ4dOsTLL7/MO++8Q1JSEh9++CGOjo7miijaiUaj5o7rornz+mh+yCpi4Us70QeMwvfmJ1Bb21Lw\n4aOUbl0lh/MK0YWZpTxKS0s6jNVfAAAfSklEQVQ5cuQI8fHxAMTHx3PkyBHKysouWO7dd99lzpw5\nLVPbOjo6Ym1tbY6IogNcc0Uwj94+ivKqehb8dzsZZXb43foMTkMnU7kvibxVD9BYdMbSMYUQl8Es\n5ZGfn4+XlxcajQYAjUaDp6cn+fn5FyyXnZ3NuXPnmDVrFtOnT2flypVytnIXN7CvB8/fdxWernY8\n+nYqq7dm02virXjf+BDG2kpy3/43Zds/kq0QIbqYTjVgbjQaycrKYtWqVXzwwQfs2LGDDRs2WDqW\n+IN83O155t4xTBoZyNqUEyx+/XvqPaPwn/tfHPpdQcWuzzj/5v3UnT5k6ahCiFYyS3n4+PhQWFiI\n0dh8qKbRaKSoqAgfH58LlvP19WXy5MnodDocHByIi4sjIyPDHBFFB9NZabjnz4NY8NchnDhXwT3P\nfMu+U7V4JszHe+YjKIpC/pqlFK1/gaYqOUhCiM7OLOXh5uZGZGQkSUlJACQlJREZGYmrq+sFy8XH\nx7Nr1y4URcFgMLB3714iIiLMEVGYSWxMb/5731V4udryxLv7eemTdPDph//c53EZfQO1Wamce/Ve\nynd8isnQYOm4QojfYbbdVkuXLmX16tVMmjSJ1atXs2zZMgDmzp3LoUPNuyumTJmCm5sb11xzDdOm\nTSM0NJQbbrjBXBGFmfT2cuTpe8cyY0IYKfvPMv/578jKrcX1qr/if8eL2PWNoXznJ5x79V5qMnfK\nuJcQnZBK6Sb/MnNzc4mLiyMlJQV/f39LxxGtdPhUKc9/9APF5XqmXBnMTdf0w9ZaS93ZI5RuXUVj\nwSms/cJxm3gzNn5hlo4rRLdzud+dnWrAXPQ8USFurPjnOKZcEcxXu3O4+5ltHDhWiG1AP/zmJOI+\n5S6aKgrJe/dBCj57msaSXEtHFkIg5SE6ATsbK/5xXTSJd4/BRqdh6Zt7eW7NASqqG3EaFEfvO1+m\n15gbqcs5SO4bCyj+6lWaqkotHVuIHk1r6QBC/Cwy2JUX7x/Hp9+c4LNtx0k9XMDMSeHEjw6h19gZ\nOA2dRPnuz6g6kExN5g6chl2Dy6jpaGwdLB1diB5HtjxEp2Kl1TBrcgQv/yuWfsGuvP3lYeY99y0H\njxejsXfG/epb6X3nS9hHjKRyzwbOvnInZd99KBdcFMLMpDxEp+Tn4cCS20bynzkjMDSZWPz69zzx\n7j7yimuwcvHCM2E+frc9i13wQCp2f8HZl++g9Jv3aKout3R0IXoE2W0lOi2VSsXwKG8GhXmwbvtJ\nPks5wb7DBUweFcRfJobj4hWE1/ULaSw+R8X3X1C5L4mqtK9xHBSHy6hpaJ09LL0KQnRbUh6i09NZ\nabhxQjhXDw/ko61ZfL3nNNvSzjJ9XF+mXdUHW4/eeCbMp9eYGVR8v46q9K1UpW/FPnIULsOnYu0b\naulVEKLbkfIQXUYvJxvuun4gCWP78P6mI3y45Ribvs9hRlwYk0YGonP1wSP+LlzG3EDVvq+o+jGF\n2sO7sOkdifPwqdiFxaBSayy9GkJ0C3KSoOiyjp0p472vjpCZXYqrkw3Xx4YyaWQQ1lbNBWFq0FP1\nYwpV+7+iqbIYrYsXzsOn4Bgdi9ra1sLphegcLve7U8pDdHmHTpbwYfKxn0rEmuvH92XSqP+ViGIy\nUpu1j8p9G2nIzUKls8Gh/1ichkzC2ivIsuGFsLDL/e6U3VaiyxsQ6s6ToaM5dLKEj5KzeHNDJmu3\nnWDq6BCuuSIIBzsdDpGjcIgcRf3541T9sIWajO+o/iEZa78wnIZcjX3kFaitZOIxIVpLykN0GwNC\n3RkQ6s6h7BI+23aCD74+ytqU41w9IpCEsX3wdLXDxi8MG78wjBNupubQdqp+2ELxxpcp3fouDtHj\ncBo8EZ27bLkKcSlSHqLbGdDHnQF93MnJq2Tddyf5ancOSbtzGDPQj2vHhhAW0AuNrSPOw+NxGjaF\n+rOHqfohmaq0zVTtS8LaLwzH6PHY97sSjY29pVdHiE5JykN0W8G+ztw/cyiz/9SPL3dms2Xvaban\n5xIW4MKUK4MZPdAPnZUG28D+2Ab2p6mmgprMHVRnbKPk69cpTX4Hu/DhOEaPxzY4Wo7UEuIXZMBc\n9Bj6egPb0s7x1e4ccotqcLLXcfWIQP40KghPV7uW5RRFobHgFNUZ31JzeCemuho0Dq44DBiLY/R4\n2a0luhUZMBfiEuxsrIgfHcKUK4PJOFHCV9/n8MW3J/ji2xMMifBiwvAAhvfzxkqrxtqnD9Y+fXCL\n+zu1J9OoOfgtlXu/pHLPenRewTj0H4tDvyvROrlZerWEsAgpD9HjqFQqBoZ5MDDMg6JyPVv2niFl\n/1meem8/TvY6xg31Z+LwQIJ8nFBprXCIGIVDxCiaasqpPbKbmsydlKW8R1nK+9gE9sMhaiz2ESPl\n6r6iR5HdVkIARpPCj8eL2LrvLKmZBTQZTYT6OzNheCCjB/ri7HDhYbyGsjxqDu+iJnMnhrI80Gix\n6zMEh/5jsAsdKof9ii5DdlsJ8Qdo1CqGRngxNMKLqtpGtv+Qy9Z9Z3jtiwzeXH+IIRGeXDXYnxFR\n3thYa7Fy9aXXmBm4jP4zjQWnqMncQc2R3eiP70Ols8U+bBj2kVdgGzIQtVZn6dUTot2ZrTxycnJY\ntGgRFRUVuLi4kJiYSFBQ0G8ue+rUKaZPn87MmTN54IEHzBVRCACc7HVMHRPC1DEh5ORVsv2HXLan\nn2f/kQNY6zSMjPLhqiF+DA73RKv53/iIa9xN1J89Qk3mDmqz9lGTueN/RRIxCts+g6RIRLdhtvJY\nsmQJM2fOJCEhgQ0bNvDII4/w/vvv/2o5o9HIkiVLmDBhgrmiCfG7gn2dCfZ15qZr+nH0dBnf/ZDL\n7oPn2Z6ei6OdjtGDfBk7yI/IYDc0ag22QQOwDRqA+5/+Qd3pQ9Qe3UPt8dSfisQGu74xOERc0Vwk\nsmtLdGFmKY/S0lKOHDnCqlWrAIiPj+exxx6jrKwMV1fXC5Z94403GDduHHq9Hr1eb454QlySWq0i\nKsSNqBA3bp82gPTjRWz/IZdtaef4+vvTuDhaM2qAD1cO8KV/Hzc0Gi12fQZj12cw7sbbqTuT2Vwk\nWanUHt6FysoGu75DsY8chV3IYNQ6G0uvohBtYpbyyM/Px8vLC42m+SQrjUaDp6cn+fn5F5THsWPH\n2LVrF++//z4rV640RzQh2sxKq2Z4P2+G9/OmrqGJtKOF7M7IaykSRzsdowb4cEW0D9GhHlhptdiF\nDMIuZBDuk+dSd+Ywtcd+KpIju1FpddgGD8Q+fDh2fYehsXO09CoKcUmdZsDcYDDwn//8hyeffLKl\nZITo7GyttYwZ5MeYQX7UNzaRnlXE7oP57PzxPMmpZ7C3tWJElDdXRvsyKMwDnZUWu5CB2IUMxH3y\nXOrPHqE2ax+1x/ehP7EfVGpsAiKxDx+BfdhwmQ1RdFpmKQ8fHx8KCwsxGo1oNBqMRiNFRUX4+Pi0\nLFNcXMzZs2e5/fbbAaiqqkJRFGpqanjsscfMEVOIP8RGp2XUAF9GDfCl0WDkxxPF7D6YR+rhAral\nncPWWsuQCE9GRHkTE+mFo52uZYzE7eo5NOZnU3t8H7VZqZQmv0Np8jvovPtgHz4c+/DhWLn3RqVS\nWXo1hQDMVB5ubm5ERkaSlJREQkICSUlJREZGXrDLytfXl9TU1Ja/r1ixAr1eL0dbiS5JZ6Vp2bVl\naDJx6GQJ3x/KY9/hAnYfzGseQwl2Y0R/b0ZEeePtZo+1byjWvqG4jptJY2ke+qxUao/vo3z7R5Rv\n/wgrVx/swkdgHz4Ca99QVCq1pVdT9GBm2221dOlSFi1axMqVK3FyciIxMRGAuXPnMm/ePAYMGGCu\nKEKYlZVWzZAIT4ZEeGK6XuFkbgV7M/NJPVzAWxsyeWtDJoHejgyP8mZkfx9C/V3Qufmiu2I6LldM\np6m6DP3xfc0TWqVupHLPejQOvbALG4Z9+AhsA6NQaawsvZqih5EzzIWwoPySWlIPF7DvcAGHc0ox\nmRRcnawZ1q95iyS6r0fLjIgAxroa9Nk/oM9KRZ+djmJoQG1th13fmOafkEGo5TLyog3kDHMhuiAf\nd3umXdWHaVf1oVrfSNrRQlIzC9iRnsuWvWfQadVE9/UgJtKLmEgvvFwdcOw/Fsf+YzEZGqjLyaA2\nq3mwvSZzB6g12Ab0aykTq17ell5F0U1JeQjRSTja6Rg/tDfjh/bG0GTk0MlS0o4VknakkLSjhQD0\n9nJgaERzkfQLdms+ez1sGIrJSMP549SeSEN/Io3Sraso3boKK4/e2P9UJNa+fWVOEtFuZLeVEF3A\n+eIa0o42l0hmdilNRhO21loGhTVvlQyN8MTN2bZleUN5AfoTadQe30/9uaNgMqK2c8IudAj2fYc1\nX3NLZ3uRVxQ9hey2EqIb8/NwwM/DgYSxfahraCLjRDFpx4pIO1LAnkP5AIT4OjM00pNhkd6EBXji\nPDwe5+HxGOtrqctOR38iDf3xNGoyvgONFtvA/tj1jcG+b4ycTyLaTMpDiC7G1lrLiP4+jOjvg6JE\nc7agmv0/bZV8/u1J1qacwN5GS3RfDwaHezIk3BOvqNE4RI1GMRmpP3esuUhOpFG65S1Kt7yFzjOw\nZZzE2qeP7N4SlyTlIUQXplKpCPRxItDHiRti+1JTZ+DH40WkZxXzQ1ZRy1aJr7s9g8M9GRzmwYDQ\nMNwCo3Cb8HcaS8+3FEnF9+uo2P158+6t4IHYhg7BLmQQGjsnC6+l6IykPIToRhxsrRg90I/RA/1Q\nFIXzxTUtRfLN/rN8tTsHjVpFRJArg8M9GBzmSZ/h1+IyMgFjXTV1p35En52OPjudmsM7ARXWvqHY\n/nSRR9kqET+T8hCim1KpVPh7OuLv6cjUMSEYmowcO11O+vEi0rOKWP31MVZ/fQxHOysG/rSLa1DY\nUDyjxqAoJhrzTzWfU5KdTsXOtVTs/BS1rSN2IYOayyRkEBp7Z0uvprAQKQ8heggrrYYBoe4MCHXn\npmv6UVnTwMETzVsl6VnF7DqYB4CPm33LctEDr8VvzAyM+mrqcn5jq8SnT3ORhA6RrZIeRspDiB7K\n2cGasYP9GTvYH0VROFtYzcHjxWScLGH3wearAgP4ezo0F0loCANih+Mx1eqnrZLmIqnY/TkVu9ai\ntrbDJmgAtkHR2IVEo+3lIxdy7MakPIQQzQPv3k4Eejtx7dg+GE0KOecryThZQsbJYr470DxXCUCQ\nj1Pzlkmf0QyYkYAtDdSdzqAuJ4O6nIPos1IpBbTOHtgGRWMbMhDbwP6yi6ubkfIQQvyKRq0itLcL\nob1duG58KE1GEydzK8g4UcKhkyVs2XuGjTtPoVJBsI8z/UJciQq5ln6jb8bRWNFcIjkZ1Gbtpfpg\nCgA6r+DmIgmKxqZ3hEzD28VJeQghLkmrURMR6EpEoCszJoRhaDKSdaacQydLyDxVSnLqWZJ25QDN\n1+uKCvYiKuRGIsfegZuh4KetkgwqU5Oo3LMelcYKm4BIbAIHYBsY1TxeopGvo65EPi0hRJtZaTX0\n7+NO/z7uADQZTWTnVnD4VBlHckpJPZzPN/vPAtDL0Zp+wYH0CxlM1GgHvJpyaThziLqcg5R/t4Zy\nQGVlg03vCGwDo7AJ7I+1d4iUSScnn44Q4g/TatSEB7oSHujKdeNDMZkUcouqOZxTxpFTpRzOKWV3\nRvPRXHY2WiKCwokIHEXkCCt6K3koBceoO5NJ2bdrAFDpbLDxj8Q2qD82AVFY+4TIkVydjJSHEKLd\nqdUqArydCPB24k+jggAoKtdz5KcyOZJTykdZx1AUUKmgt1cI4QFDiYq0oo+2EPuKbOrPHqZs2wcA\nqHS2P22Z9P9pyyRYysTCpDyEEGbh2csOz152jBvSfOXW2joDJ86Vc+xMOVlnytmbmc/WfQYA7Gz8\nCAvoz4BoHRE2RbjXn6Hp/FHKsn8qEysbrP36YuMfjo1/BDZ+YTIJlplJeQghLMLe1opBYZ4MCvME\nQFEU8kpqOXa67KdCKWPN9mJMCoAvfh5hDAzU0t+uFG9THtry01Ts/gIUE6BC59kba/8IbPzDsfYN\nw8rVW+Z570BSHkKITkGlUrVcej5uWAAA+noDJ3MrOHa6eetk94kyNtVogN5o1AGEetkwxL2WUKsi\n3BrPY8jcSfUPyQCobeyx9umDtU9o849vKBpHVzlxsZ1IeQghOi07GyuiQz2IDm2eb0RRFIor6sjO\nreDEuQpOnqsgKbuJar034I2VZghDvJro71xNgKYEp8o86s5sAJMRAI29C9a+fbH2DW0uFt9QNLaO\nFlzDrsts5ZGTk8OiRYuoqKjAxcWFxMREgoKCLljmlVdeYdOmTajVaqysrFiwYAFjxowxV0QhRCen\nUqlaxk5GDfAFmgulqLyOk+cqOHGunJO5FXx82o7aOlcgDCuVkYGueqKcqwnQlOKSfxbtif0tz6l1\n8WopE51XENZewXIZ+lYwW3ksWbKEmTNnkpCQwIYNG3jkkUd4//33L1gmOjqaOXPmYGtry7Fjx/jb\n3/7Grl27sLGxMVdMIUQXo1Kp8HK1w8vVjisHXlgoOXmV5ORVkZNXSXJeFfmlvsAAbFSN9LWrJNql\nmkBTKa7Zh7E6srvlOTWOrlh7BaPzCkLnFYy1VxDaXl4yhvILZimP0tJSjhw5wqpVqwCIj4/nscce\no6ysDFdX15blfrmVER4ejqIoVFRU4O3tbY6YQohu4peFMrK/T8vt+noDZ/KryclvLpW0vEo+z6ui\nvtGIvaoeP005YY7VhDRW4XXuHPbZ6agUU/Nz6mzQeQY2l4pnIDqPAKw8eqPpoUd5maU88vPz8fLy\nQqNpPi5bo9Hg6elJfn7+BeXxS+vXrycgIECKQwjRbuxsrIgMdiUy+H/fOyaTQkFpLWcKqjlX2PyT\nVFhNblENJkMj3poK/DRl9LGtIrCgAve8bWhNjS2P1zi6ovPo3Vwm7r2b/+zeG7W1rSVW0Ww65YD5\nvn37ePHFF3nnnXcsHUUI0c2p1Sp8PRzw9XBg1ID/baWYTApF5fqWQjlXWMOBwmrOFVZhY6jEW1OB\nt6YC/8ZK/GvycDt1GC1N/3teR3esvQLQeQSgc/+5XPy6zQUhzVIePj4+FBYWYjQa0Wg0GI1GioqK\n8PHx+dWy6enp/Otf/2LlypWEhISYI54QQvyKWq3C280ebzd7hvX73x4QRVEoq6rnfHENecW1nC+u\nIaWklryiKhorCvGkHG9NBT4NFfhW5uB58kc0NO/6UlCBgzs6j97YeQei8/BH5941S8Us5eHm5kZk\nZCRJSUkkJCSQlJREZGTkr3ZZZWRksGDBAl566SWioqLMEU0IIdpEpVLh5myLm7NtyyHEPzOaFIrL\n9eQV15JXUsPhklq2FlXSUJyPVW0+XupyvBsq8a7IxvNUOhqVAjSXisHGFVx8sPUKxNk/GFuvAKzc\n/FDrOucBQypFURRzvFB2djaLFi2iqqoKJycnEhMTCQkJYe7cucybN48BAwZw/fXXc/78eby8vFoe\n9/TTTxMeHn7J58/NzSUuLo6UlBT8/f07clWEEKLNDE0mCstqyS+ppaBUT2FJFbVF51HKz2NTW4i7\nqhwfTSUemiq0qp+3VECvdcHg4IXG1R877wDcAvvg7BeEpp3GVC73u9Ns5dHRpDyEEF3Vz7vCfi6V\nivPnaCg5h6oiD9v6ItxMZXj+olQAKlVO1Fp7YHT0xsojkJgp16LTWbX5tS/3u7NTDpgLIURP8std\nYVEhbkDwBffXNzSRX1JNydkzVOedxlCai7YqH/v6YjzrctAW7+a4mzv9x15ltsxSHkII0cnZWGsJ\n9utFsF8vYNAF9xmbmqipKCfE3eO3H9xB5HRJIYTowjRaLc5mLg6Q8hBCCHEZpDyEEEK0mZSHEEKI\nNpPyEEII0WZSHkIIIdpMykMIIUSbdZvzPIzG5mkmCwoKLJxECCG6jp+/M3/+Dm2tblMexcXFAMya\nNcvCSYQQouspLi4mMDCw1ct3m2tb1dfXk5mZiYeHR8ukU0IIIS7OaDRSXFxM//792zTld7cpDyGE\nEOYjA+ZCCCHaTMpDCCFEm0l5CCGEaDMpDyGEEG0m5SGEEKLNpDyEEEK0mZSHEEKINpPyAHJycrjx\nxhuZNGkSN954I6dPn7Z0pD8sMTGR2NhYwsPDOX78eMvtF1vXrv4+lJeXM3fuXCZNmsTUqVO55557\nKCsrA+DHH3/k2muvZdKkScyZM4fS0tKWx13svq7grrvu4tprr2XatGnMnDmTo0ePAt37swZ4+eWX\nL/j97s6fMUBsbCyTJ08mISGBhIQEdu7cCVhwvRWhzJ49W1m/fr2iKIqyfv16Zfbs2RZO9Mft379f\nycvLU8aPH69kZWW13H6xde3q70N5ebmyd+/elr8/9dRTyoMPPqgYjUZlwoQJyv79+xVFUZRXXnlF\nWbRokaIoykXv6yqqqqpa/rx161Zl2rRpiqJ07886MzNTufXWW1t+v7v7Z6woyq/+LSvKxdeto9e7\nx5dHSUmJMnToUKWpqUlRFEVpampShg4dqpSWllo4Wfv45S/cxda1O74PmzdvVv7+978rBw8eVKZM\nmdJye2lpqTJo0CBFUZSL3tcVrVu3Tpk+fXq3/qwbGhqUGTNmKOfOnWv5/e4Jn/FvlYcl17vbXBjx\ncuXn5+Pl5dVyPSyNRoOnpyf5+fm4urpaOF37uti6KorSrd4Hk8nERx99RGxsLPn5+fj6+rbc5+rq\nislkoqKi4qL3ubi4WCL6ZXn44YfZvXs3iqLw1ltvdevP+sUXX+Taa6/F39+/5bae8BkDLFy4EEVR\nGDp0KPfff79F11vGPES39Nhjj2FnZ8ff/vY3S0cxi8cff5zvvvuOBQsW8PTTT1s6TodJT08nMzOT\nmTNnWjqK2a1Zs4Yvv/ySzz//HEVRePTRRy2ap8eXh4+PD4WFhS3XsjcajRQVFeHj42PhZO3vYuva\nnd6HxMREzpw5wwsvvIBarcbHx4e8vLyW+8vKylCr1bi4uFz0vq5o2rRppKam4u3t3S0/6/3795Od\nnU1cXByxsbEUFBRw6623cubMmW7/Gf/8+eh0OmbOnMkPP/xg0d/tHl8ebm5uREZGkpSUBEBSUhKR\nkZFdYvO9rS62rt3lfXj++efJzMzklVdeQafTAdC/f3/q6+tJS0sD4OOPP2by5MmXvK8rqK2tJT8/\nv+Xv27Ztw9nZudt+1rfffju7du1i27ZtbNu2DW9vb95++21uu+22bvsZA+j1eqqrqwFQFIVNmzYR\nGRlp0d9tuSQ7kJ2dzaJFi6iqqsLJyYnExERCQkIsHesPWb58OcnJyZSUlNCrVy9cXFz46quvLrqu\nXf19OHHiBPHx8QQFBbXMS+Dv788rr7zCDz/8wJIlS2hoaMDPz49nnnkGd3d3gIve19mVlJRw1113\nUVdXh1qtxtnZmQceeICoqKhu/Vn/LDY2ltdee42wsLBu+xkDnDt3jnvvvRej0YjJZKJPnz4sXrwY\nT09Pi623lIcQQog26/G7rYQQQrSdlIcQQog2k/IQQgjRZlIeQggh2kzKQwghRJtJeQghhGgzKQ8h\nhBBtJuUhhBCizaQ8hGiFwsJC7r33XkaOHElsbCzvv/8+ACtWrGDevHncd999DB48mOnTp3Ps2LGW\nx2VnZzN79mxiYmKYMmUKKSkpLffV19fz1FNPMX78eIYOHcpf//pX6uvrAZg3bx5XXnklQ4cOZdas\nWZw4ccK8KyzEJUh5CHEJJpOJO++8k/DwcHbs2MF7773He++91zKTW0pKCpMnT2bfvn3Ex8dz1113\nYTAYMBgM3HHHHVx55ZV8//33LF68mIULF3Lq1Cmg+QKOhw8f5uOPP2bfvn3861//Qq1u/ic5duxY\ntmzZwp49e+jXrx8LFy602PoL8Vvk8iRCXMLBgweZP38+3333Xcttr7/+OqdPn8bX15edO3fy6aef\nAs1FM3bsWF544QUA5s+fz86dO1tK4f777yc4OJi7776bQYMG8emnnxIREXHR16+qqmLYsGGkpaXh\n6OjYMSspRBv1+MmghLiU8+fPU1RURExMTMttRqORmJgYfH198fb2brldrVbj5eVFUVERAN7e3i3F\nAeDr60thYSHl5eU0NDTQu3fvX72e0Wjkv//9L5s3b265jDY0z9Eu5SE6CykPIS7Bx8cHf39/kpOT\nf3XfihUrKCgoaPm7yWSisLAQT09PAAoKCjCZTC0FkJ+fT1BQEL169cLa2ppz5879astj48aNpKSk\nsGrVKvz9/amurmbYsGHITgLRmciYhxCXEB0djb29PW+88Qb19fUYjUaOHz9ORkYGAIcPHyY5OZmm\npibee+89dDodAwcOJDo6GhsbG9566y0MBgOpqals27aNa665BrVazfXXX8+TTz7ZMjFTeno6jY2N\n1NbWotPp6NWrF3V1dTz//PMWfgeE+DUZ8xCiFQoLC0lMTCQ1NZXGxkaCg4O57777OHDgACdOnECt\nVrN9+3YCAwN5/PHHiYqKAprnGFm2bBlHjx7Fy8uLBQsWMHHiRKD5aKvnnnuOzZs3o9friYiI4O23\n38ZoNLJw4UL27t2Li4sL8+fP54EHHiA5OZnAwEBLvg1CtJDyEOIPWLFiBWfOnOHZZ5+1dBQhzEp2\nWwkhhGgzKQ8hhPi/duyYBgAAgEGYf9do4G9NkI3NbQXAZnkAsIkHAJt4ALCJBwCbeACwiQcAWzTt\nkBbd0bUkAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"tags": []
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "MF2zwoKxK-HA",
"colab_type": "code",
"colab": {}
},
"source": [
"# Converte o dataframe para uma matriz\n",
"X_test = X_test.as_matrix()"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "r5GInZroK-HD",
"colab_type": "code",
"colab": {}
},
"source": [
"# Os resultados da predicao\n",
"classes = model.predict_classes(X_test, batch_size=120)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "ZPmTapM4K-HG",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 102
},
"outputId": "b9da63a7-6825-419c-9464-c7a938bd2512"
},
"source": [
"# Imprimindo a acuracia do modelo\n",
"import numpy as np\n",
"accuration = np.sum(classes == y_test)/30.0 * 100\n",
"\n",
"print \"Teste de acuracia : \" + str(accuration) + '%'\n",
"print \"Precicao :\"\n",
"print classes\n",
"print \"Alvo :\"\n",
"print np.asarray(y_test,dtype=\"int32\")"
],
"execution_count": 229,
"outputs": [
{
"output_type": "stream",
"text": [
"Teste de acuracia : 96.66666666666667%\n",
"Precicao :\n",
"[0 0 0 0 0 0 0 0 1 1 1 1 2 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2]\n",
"Alvo :\n",
"[0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2]\n"
],
"name": "stdout"
}
]
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment