Skip to content

Instantly share code, notes, and snippets.

@JnBrymn-EB
Created March 14, 2018 15:46
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save JnBrymn-EB/0de5f3cab16f68abbc4e8a94b75b5e3e to your computer and use it in GitHub Desktop.
Save JnBrymn-EB/0de5f3cab16f68abbc4e8a94b75b5e3e to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Notes\n",
"* http://scikit-learn.org/stable/tutorial/text_analytics/working_with_text_data.html\n",
"* http://karpathy.github.io/2015/05/21/rnn-effectiveness/\n",
"* https://machinelearningmastery.com/sequence-classification-lstm-recurrent-neural-networks-python-keras/\n",
"* https://blog.keras.io/a-ten-minute-introduction-to-sequence-to-sequence-learning-in-keras.html\n",
"* `return_state` <- the RNNs need to do this\n",
"* TODO add dropout\n",
"* https://github.com/keras-team/keras/issues/4563 - nan loss issue very similar to mine\n",
"* here next https://machinelearningmastery.com/text-generation-lstm-recurrent-neural-networks-python-keras/\n",
"\n",
"# TODO\n",
"* implement batch size"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/johnb/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6\n",
" return f(*args, **kwds)\n",
"Using TensorFlow backend.\n"
]
}
],
"source": [
"import numpy as np\n",
"import pandas as pd\n",
"import tensorflow\n",
"import keras\n",
"import sklearn\n",
"import random\n",
"\n",
"%matplotlib inline\n",
"import matplotlib\n",
"import matplotlib.pyplot as plt\n",
"plt.rcParams['axes.labelsize'] = 14\n",
"plt.rcParams['xtick.labelsize'] = 12\n",
"plt.rcParams['ytick.labelsize'] = 12"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"sample_submission = pd.read_csv('/Users/johnb/Personal/data_science/kaggle/toxic-comment/data/sample_submission.csv')\n",
"test = pd.read_csv('/Users/johnb/Personal/data_science/kaggle/toxic-comment/data/test.csv')\n",
"train = pd.read_csv('/Users/johnb/Personal/data_science/kaggle/toxic-comment/data/train.csv')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from sklearn.base import BaseEstimator, TransformerMixin\n",
"from collections import Counter\n",
"\n",
"class DataFrameSelector(BaseEstimator, TransformerMixin):\n",
" \"\"\"Allows you to specify a DataFrame as the input to a Pipeline (see below)\"\"\"\n",
" def __init__(self, attribute_names):\n",
" self.attribute_names = attribute_names\n",
"\n",
" def fit(self, X, y=None):\n",
" return self\n",
" \n",
" def transform(self, X):\n",
" return X[self.attribute_names].values\n",
" \n",
"class MapToInts(BaseEstimator, TransformerMixin):\n",
" def __init__(self, first_n_chars):\n",
" self.first_n_chars = first_n_chars or 100\n",
" \n",
" def translate(self, comment):\n",
" return [self.translation.get(char, self.first_n_chars) for char in comment]\n",
" \n",
" def untranslate(self, translated_comment):\n",
" return ''.join([self.untranslation[num] for num in translated_comment])\n",
"\n",
" def fit(self, X, y=None):\n",
" char_counter = Counter()\n",
" for text in X:\n",
" char_counter.update(text)\n",
" \n",
" for c in '1234567890':\n",
" del char_counter[c]\n",
" most_popular_chars = dict(char_counter.most_common()[:self.first_n_chars-1]).keys()\n",
" self.translation = {char: num for num, char in enumerate(most_popular_chars)}\n",
" self.untranslation = {num: char for char,num in self.translation.items()}\n",
" for c in '1234567890':\n",
" self.translation[c] = self.first_n_chars-1\n",
" self.untranslation[self.first_n_chars-1] = '⌗'\n",
" self.untranslation[self.first_n_chars] = '≠' \n",
" self.num_symbols = len(self.untranslation)\n",
" return self\n",
" \n",
" def transform(self, X):\n",
" y = []\n",
" for text in X:\n",
" y.append(self.translate(text))\n",
" return y"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from sklearn.pipeline import Pipeline\n",
"\n",
"map_to_ints = MapToInts(first_n_chars=100)\n",
"pipeline = Pipeline([\n",
" ('comment_selector', DataFrameSelector('comment_text')),\n",
" ('map_to_ints', map_to_ints),\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"X = pipeline.fit_transform(train[:2000])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"class TrainingSetMaker(BaseEstimator, TransformerMixin):\n",
" def __init__(self, num_samples, num_batches, num_training_steps, num_target_steps, map_to_ints):\n",
" \"\"\"Make training set\n",
" \n",
" num_samples is the number of randomly selected comments used to make the training set (this is the height \n",
" of each batch)\n",
" num_batches is how many batches we privide to keras during one epoch of training\n",
" num_training_steps is the width of each batch\n",
" num_target_steps is the width of the target\n",
" map_to_ints is the object used to encode the documents into ints - we use it to find the int for a space char\n",
" \n",
" To make a batch we first get text samples (really integers at this point) of an appropriate length. The length\n",
" is num_batches * num_training_steps + num_target_steps. A sample is selected from any starting point in the \n",
" input text and then we affix the same comment over and over (space delimited) until it is the proper length. \n",
" The first batch is then the first `num_training_steps` characters from each sample for training and the \n",
" subsequent `num_target_steps` for the target.\n",
" \n",
" Each element of the training and target pieces in the batches then one-hot encoded.\n",
" \"\"\"\n",
" self.num_samples = num_samples\n",
" self.num_batches = num_batches\n",
" self.num_training_steps = num_training_steps\n",
" self.num_target_steps = num_target_steps\n",
" self.map_to_ints = map_to_ints\n",
" \n",
" def make_a_sample(self, x):\n",
" sample_len = self.num_batches * self.num_training_steps + self.num_target_steps\n",
" start_index = random.randrange(len(x))\n",
" sample = x[start_index:start_index + sample_len]\n",
" current_len = len(sample)\n",
" while current_len < sample_len:\n",
" sample.append(self.space)\n",
" current_len += 1\n",
" remaining_len = sample_len-current_len\n",
" append_str = x[:remaining_len]\n",
" sample.extend(append_str)\n",
" current_len += len(append_str)\n",
" return sample\n",
" \n",
" def make_same_len_samples(self, X):\n",
" samples = []\n",
" indices = random.sample(range(len(X)), self.num_samples)\n",
" for i in indices:\n",
" x = X[i]\n",
" samples.append(self.make_a_sample(x))\n",
" return np.array(samples)\n",
" \n",
" def batch_iterator(self, X):\n",
" samples = self.make_same_len_samples(X)\n",
" for batch_num in range(self.num_batches):\n",
" train = samples[:, batch_num*self.num_training_steps:(batch_num+1)*self.num_training_steps]\n",
" target = samples[:, (batch_num+1)*self.num_training_steps:(batch_num+1)*self.num_training_steps+self.num_target_steps]\n",
" yield (train, target)\n",
" \n",
" def fit(self, X, y=None):\n",
" self.space = map_to_ints.translate([' '])[0]\n",
" return self\n",
" \n",
" def transform(self, X):\n",
" train, target = list(zip(*self.batch_iterator(X)))\n",
" train = np.concatenate(train)\n",
" target = np.concatenate(target)\n",
" return train, target"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"|a bad idea> in |\n",
"|omise not >to e|\n",
"|ck, its bi>gger|\n",
"|re I happe>n to|\n",
"|ed the mat>eria|\n",
"--------\n",
"| in the fi>rst |\n",
"|to engage >in a|\n",
"|gger than >your|\n",
"|n to resid>e. I|\n",
"|erial and >made|\n",
"--------\n",
"|rst place—>runn|\n",
"|in anymore> per|\n",
"|yours! hah>aha |\n",
"|e. I use n>o so|\n",
"|made corre>ctio|\n",
"--------\n"
]
}
],
"source": [
"# prove that the training batches look correct\n",
"maker = TrainingSetMaker(\n",
" num_samples=5, num_batches=3, num_training_steps=10, num_target_steps=4, map_to_ints=map_to_ints\n",
")\n",
"maker.fit(None)\n",
"X = pipeline.fit_transform(train[:2000])\n",
"for X_,y in maker.batch_iterator(X):\n",
" for i in range(X_.shape[0]):\n",
" print('|{}>{}|'.format(map_to_ints.untranslate(X_[i]),map_to_ints.untranslate(y[i])))\n",
" print('--------')"
]
},
{
"cell_type": "code",
"execution_count": 38,
"metadata": {},
"outputs": [],
"source": [
"class OneHotEncoder(BaseEstimator, TransformerMixin):\n",
" def __init__(self, map_to_ints):\n",
" self.map_to_ints = map_to_ints\n",
"\n",
" def one_hot_encode(self, X):\n",
" shape = list(X.shape)\n",
" shape.append(self.num_symbols)\n",
" one_hot_X = np.zeros(shape)\n",
" i1 = np.tile(np.arange(X.shape[0]).reshape([X.shape[0],1]), [1,X.shape[1]]).flatten()\n",
" i2 = np.tile(np.arange(X.shape[1]).reshape([1,X.shape[1]]), [X.shape[0],1]).flatten()\n",
" i3 = X.flatten()\n",
" one_hot_X[i1,i2,i3] = 1.0\n",
" return one_hot_X\n",
" \n",
" def one_hot_decode(self, one_hot_X):\n",
" return np.where(one_hot_X)[2].reshape(one_hot_X.shape[:2])\n",
" \n",
" def fit(self, X, y=None):\n",
" self.num_symbols = self.map_to_ints.num_symbols \n",
" return self\n",
" \n",
" def transform(self, X):\n",
" transformed = []\n",
" for x in X:\n",
" transformed.append(self.one_hot_encode(x))\n",
" # flatten target if target only one char\n",
" target_shape = transformed[1].shape\n",
" if target_shape[1] == 1:\n",
" transformed[1] = transformed[1].reshape((target_shape[0],target_shape[2]))\n",
" return transformed"
]
},
{
"cell_type": "code",
"execution_count": 39,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 1., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]],\n",
"\n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 1., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.]],\n",
"\n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]],\n",
"\n",
" [[ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 1., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 1., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]],\n",
"\n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.]]])"
]
},
"execution_count": 39,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# prove that the OneHotEncoder can successfully round-trip encode data\n",
"one_hot_encoder = OneHotEncoder(map_to_ints)\n",
"one_hot_encoder.fit(None)\n",
"one_hot_X = one_hot_encoder.one_hot_encode(X_)\n",
"X_copy = one_hot_encoder.one_hot_decode(one_hot_X)\n",
"assert np.all(X_ == X_copy)\n",
"one_hot_X"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# End-to-end pipeline"
]
},
{
"cell_type": "code",
"execution_count": 339,
"metadata": {
"scrolled": true
},
"outputs": [
{
"data": {
"text/plain": [
"[array([[[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]],\n",
" \n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]],\n",
" \n",
" [[ 1., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 1., 0., 0., ..., 0., 0., 0.]],\n",
" \n",
" ..., \n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 1., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.]],\n",
" \n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 1., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 1., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]],\n",
" \n",
" [[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]]]),\n",
" array([[ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.],\n",
" ..., \n",
" [ 0., 0., 1., ..., 0., 0., 0.],\n",
" [ 0., 1., 0., ..., 0., 0., 0.],\n",
" [ 0., 0., 0., ..., 0., 0., 0.]])]"
]
},
"execution_count": 339,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from sklearn.pipeline import Pipeline\n",
"\n",
"first_n_chars = 100\n",
"\n",
"num_samples = 101\n",
"num_batches = 23\n",
"num_training_steps = 11\n",
"num_target_steps = 1\n",
"\n",
"map_to_ints = MapToInts(first_n_chars=first_n_chars)\n",
"full_pipeline = Pipeline([\n",
" ('comment_selector', DataFrameSelector('comment_text')),\n",
" ('map_to_ints', map_to_ints),\n",
" ('train_maker', TrainingSetMaker(\n",
" num_samples=num_samples,\n",
" num_batches=num_batches,\n",
" num_training_steps=num_training_steps,\n",
" num_target_steps=num_target_steps,\n",
" map_to_ints=map_to_ints,\n",
" )),\n",
" ('one_hot_encoder', OneHotEncoder(map_to_ints=map_to_ints)),\n",
"])\n",
"\n",
"X = full_pipeline.fit_transform(train[:2000])\n",
"X"
]
},
{
"cell_type": "code",
"execution_count": 338,
"metadata": {},
"outputs": [
{
"ename": "AssertionError",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-338-c4f1042da376>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# prove it works\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32massert\u001b[0m \u001b[0mX\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mnum_samples\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mnum_batches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_training_steps\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfirst_n_chars\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32massert\u001b[0m \u001b[0mX\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mnum_samples\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mnum_batches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_target_steps\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfirst_n_chars\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mAssertionError\u001b[0m: "
]
}
],
"source": [
"# prove it works\n",
"assert X[0].shape == (num_samples*num_batches, num_training_steps, first_n_chars+1)\n",
"assert X[1].shape == (num_samples*num_batches, num_target_steps, first_n_chars+1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"------------------------\n",
"\n",
"# Finally! Keras stuff."
]
},
{
"cell_type": "code",
"execution_count": 420,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from sklearn.pipeline import Pipeline\n",
"\n",
"first_n_chars = 100\n",
"num_symbols = first_n_chars + 1 # (one symbol for 'other' besides the first n chars)\n",
"\n",
"num_samples = 230 # e.g. number of comments\n",
"num_batches = 27 # number of training steps\n",
"num_training_steps = 2\n",
"num_target_steps = 1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"First of all, you don't actually want the above pipeline b/c it had to recalculate the mapping each time. That's expensive. So we break it into two parts:"
]
},
{
"cell_type": "code",
"execution_count": 341,
"metadata": {},
"outputs": [],
"source": [
"map_to_ints = MapToInts(first_n_chars=first_n_chars)\n",
"pipeline_0 = Pipeline([\n",
" ('comment_selector', DataFrameSelector('comment_text')),\n",
" ('map_to_ints', map_to_ints),\n",
"])\n",
"\n",
"# you run this part once - it takes a while\n",
"X0 = pipeline_0.fit_transform(train)"
]
},
{
"cell_type": "code",
"execution_count": 424,
"metadata": {},
"outputs": [],
"source": [
"one_hot_encoder = OneHotEncoder(map_to_ints=map_to_ints)\n",
"\n",
"def make_pipeline_1(num_training_steps=num_training_steps):\n",
" pipeline_1 = Pipeline([\n",
" ('train_maker', TrainingSetMaker(\n",
" num_samples=num_samples,\n",
" num_batches=num_batches,\n",
" num_training_steps=num_training_steps,\n",
" num_target_steps=num_target_steps,\n",
" map_to_ints=map_to_ints,\n",
" )),\n",
" ('one_hot_encoder', one_hot_encoder),\n",
" ])\n",
" return pipeline_1\n",
"\n",
"pipeline_1 = make_pipeline_1()\n",
" \n",
"# you run this part several times, once for each training epoch\n",
"X1 = pipeline_1.fit_transform(X0)"
]
},
{
"cell_type": "code",
"execution_count": 440,
"metadata": {},
"outputs": [],
"source": [
"from keras.layers import Input, LSTM, Dense\n",
"from keras.models import Model\n",
"\n",
"\n",
"def make_model_v1(num_samples, num_training_steps=num_training_steps):\n",
" lstm_state_size = 20 # TODO see blog post for number\n",
"\n",
" inputs = Input(\n",
" batch_shape=(num_samples,num_training_steps, num_symbols),\n",
" dtype='float',\n",
" name='inputs',\n",
" )\n",
" intermediate_0 = LSTM(\n",
" units=lstm_state_size, \n",
" batch_size=num_samples, # gotcha\n",
" name='intermediate_0', \n",
" # return_sequences=True, you only need this on intermediate layers of RNNs\n",
" stateful=True, # gotcha - have to specify this\n",
" activation='relu',\n",
" dropout=0.5,\n",
" recurrent_dropout=0.5,\n",
" )(inputs)\n",
" outputs = Dense(num_symbols, activation='softmax')(intermediate_0)\n",
" model = Model(inputs=inputs, output=outputs)\n",
" model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])\n",
" # optimizer rmsprop adam sgd\n",
" return model\n",
"\n",
"def make_model_v2(num_samples, num_training_steps=num_training_steps):\n",
" lstm_state_size = 101 # TODO see blog post for number\n",
"\n",
" inputs = Input(\n",
" batch_shape=(num_samples,num_training_steps, num_symbols),\n",
" dtype='float',\n",
" name='inputs',\n",
" )\n",
" intermediate_0 = LSTM(\n",
" units=lstm_state_size, \n",
" batch_size=num_samples, # gotcha\n",
" name='intermediate_0', \n",
" return_sequences=True, # you only need this on intermediate layers of RNNs\n",
" stateful=True, # gotcha - have to specify this\n",
" activation='relu',\n",
" dropout=0.5,\n",
" recurrent_dropout=0.5,\n",
" )(inputs)\n",
" intermediate_1 = LSTM(\n",
" units=lstm_state_size, \n",
" batch_size=num_samples, # gotcha\n",
" name='intermediate_1', \n",
" # return_sequences=True, you only need this on intermediate layers of RNNs\n",
" stateful=True, # gotcha - have to specify this\n",
" activation='relu',\n",
" dropout=0.5,\n",
" recurrent_dropout=0.5,\n",
" )(intermediate_0)\n",
" \n",
" outputs = Dense(num_symbols, activation='softmax')(intermediate_1)\n",
" model = Model(inputs=inputs, output=outputs)\n",
" model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])\n",
" # optimizer rmsprop adam sgd\n",
" return model\n",
"\n",
"\n",
"make_model = make_model_v1\n",
"\n",
"def clone_model_with_different_size(trained_model, num_samples=1, num_training_steps=1):\n",
" \"\"\"takes a trained model and makes a new model with num_samples reduced to 1\"\"\"\n",
" new_model = make_model(num_samples, num_training_steps)\n",
" new_model.set_weights(trained_model.get_weights())\n",
" return new_model"
]
},
{
"cell_type": "code",
"execution_count": 441,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"inputs (InputLayer) (230, 10, 101) 0 \n",
"_________________________________________________________________\n",
"intermediate_0 (LSTM) (230, 20) 9760 \n",
"_________________________________________________________________\n",
"dense_64 (Dense) (230, 101) 2121 \n",
"=================================================================\n",
"Total params: 11,881\n",
"Trainable params: 11,881\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/johnb/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:24: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=Tensor(\"de...)`\n"
]
}
],
"source": [
"model = make_model(num_samples)\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 457,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 222us/step - loss: 2.5091 - acc: 0.2921\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4953 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5185 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4871 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4899 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5336 - acc: 0.2947\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5713 - acc: 0.2878\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 218us/step - loss: 2.5140 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4625 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5335 - acc: 0.2903\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5271 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4856 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5035 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5258 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4998 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5144 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 230us/step - loss: 2.4933 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4856 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5172 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4843 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4561 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5364 - acc: 0.2957\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4791 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5038 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5125 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.4934 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5405 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5159 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4997 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4939 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4900 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4940 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5401 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.4799 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4792 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5368 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4989 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4825 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5096 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4829 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5495 - acc: 0.2908\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4831 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5198 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4931 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4691 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5074 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4664 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5306 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4712 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4870 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4625 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5320 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5480 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4576 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4980 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5070 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5146 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 218us/step - loss: 2.4778 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5495 - acc: 0.2902\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4646 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4967 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4912 - acc: 0.3008\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4922 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5096 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4787 - acc: 0.3201\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4833 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4893 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4776 - acc: 0.3174\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4708 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5278 - acc: 0.2955\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5070 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5067 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5183 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5010 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4514 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5433 - acc: 0.2866\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4434 - acc: 0.3225\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4789 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5523 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5062 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4611 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4990 - acc: 0.2974\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5183 - acc: 0.3045\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5232 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4936 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4901 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4558 - acc: 0.3179\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4756 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5347 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4513 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4939 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4950 - acc: 0.3196\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5631 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4720 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4899 - acc: 0.3187\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.4806 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4953 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5804 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4774 - acc: 0.3216\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4829 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4873 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4813 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5385 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5194 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5176 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5250 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4820 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4849 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5197 - acc: 0.2934\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5292 - acc: 0.2987\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5118 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5043 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4682 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5024 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5069 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4749 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5255 - acc: 0.2895\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4827 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4782 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 221us/step - loss: 2.4763 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4506 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4600 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5848 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5689 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4987 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4874 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5633 - acc: 0.2915\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4712 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5126 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4960 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5387 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5298 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5401 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5188 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 196us/step - loss: 2.5149 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5431 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5168 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4856 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5428 - acc: 0.2965\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5496 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4753 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5371 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4953 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4422 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5101 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5113 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5122 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5303 - acc: 0.2963\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4849 - acc: 0.2960\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4928 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5345 - acc: 0.2940\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4912 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4804 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4538 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5293 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4642 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4576 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4745 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5363 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4919 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4654 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4427 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4930 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5390 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5136 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5384 - acc: 0.3052\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5169 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4586 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4772 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4767 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4907 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4863 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4348 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4383 - acc: 0.3277\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4834 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5145 - acc: 0.2966\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5064 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4549 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5413 - acc: 0.2986\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4705 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4786 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4727 - acc: 0.3206\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4724 - acc: 0.3171\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4835 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5436 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5182 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4952 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4736 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5228 - acc: 0.2994\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5289 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4907 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4394 - acc: 0.3229\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5444 - acc: 0.2955\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5056 - acc: 0.3188\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4690 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4437 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5389 - acc: 0.2969\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5609 - acc: 0.2900 0s - loss: 2.5778 - acc: 0.\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4809 - acc: 0.3171\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5185 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5016 - acc: 0.2973\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4773 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5160 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5168 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5071 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4988 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4901 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5698 - acc: 0.2932\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5124 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4659 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5166 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4822 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5255 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4843 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5077 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4997 - acc: 0.2981\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4910 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4635 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4826 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4886 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5126 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5015 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5239 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4872 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5030 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5054 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5039 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4862 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4880 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5276 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4996 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4650 - acc: 0.3213\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4770 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5300 - acc: 0.2923\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5198 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 195us/step - loss: 2.5272 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4482 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4840 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5018 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4626 - acc: 0.3192\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5672 - acc: 0.2924\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4882 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5147 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4957 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5138 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4616 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4907 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4963 - acc: 0.3200\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4770 - acc: 0.2997\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5181 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4733 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5043 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5111 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 221us/step - loss: 2.5167 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4998 - acc: 0.3171\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5469 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4899 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4820 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5563 - acc: 0.2971\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4916 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5088 - acc: 0.2948\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5096 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5390 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5113 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5361 - acc: 0.2994\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5229 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5013 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5222 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5151 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4766 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5314 - acc: 0.2940\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4392 - acc: 0.3229\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5366 - acc: 0.2894\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4618 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4941 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5037 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5324 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5157 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4914 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4939 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4998 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4787 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4922 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5208 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4514 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4852 - acc: 0.3258\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4849 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5719 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5663 - acc: 0.2981\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5453 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5265 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4348 - acc: 0.3214\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5072 - acc: 0.2979\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4963 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4787 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4798 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4751 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4916 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4672 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4977 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5085 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5080 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5063 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4807 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5128 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5306 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5190 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5000 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5105 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4853 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5386 - acc: 0.2944\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4837 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5210 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4805 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5314 - acc: 0.3187\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5359 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5597 - acc: 0.2955\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5462 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5331 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5038 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5102 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5146 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4955 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4624 - acc: 0.3176\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4804 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4913 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4767 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4886 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 197us/step - loss: 2.5051 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4623 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4833 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5051 - acc: 0.3129\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5271 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4528 - acc: 0.3213\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4654 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4599 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5230 - acc: 0.3008\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4944 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4709 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5374 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4952 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5063 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5222 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4820 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 219us/step - loss: 2.5331 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5175 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4957 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5119 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4951 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4876 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4482 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4781 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4968 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4799 - acc: 0.3216\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5111 - acc: 0.2986\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5159 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4843 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5238 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4871 - acc: 0.3232\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5152 - acc: 0.2987\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4953 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4954 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4889 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5036 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4726 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5157 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4697 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4995 - acc: 0.2995\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4746 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4932 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5355 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4935 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4891 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5228 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4962 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4875 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5063 - acc: 0.2958\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5351 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4883 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4983 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5085 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5355 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5261 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5349 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4995 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4633 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4770 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5114 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5230 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5103 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4800 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5243 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4632 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4829 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4666 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5251 - acc: 0.2916\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4991 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5026 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5136 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5735 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5166 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5103 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5195 - acc: 0.2969\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5303 - acc: 0.2963\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4735 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5447 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.5221 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4853 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4976 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4820 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5134 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4843 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4822 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4720 - acc: 0.3159\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4942 - acc: 0.2990\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5364 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4973 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4762 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4922 - acc: 0.2994\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5089 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4918 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5243 - acc: 0.2965\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4896 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5012 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4565 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4882 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4852 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5241 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.5259 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5423 - acc: 0.2936\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4852 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5551 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4950 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5304 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5521 - acc: 0.2960\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5582 - acc: 0.2995\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5114 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4919 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4671 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5297 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4672 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4639 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4555 - acc: 0.3208\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5104 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5112 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5399 - acc: 0.2948\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4692 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 2s 257us/step - loss: 2.5222 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 2s 250us/step - loss: 2.5534 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 2s 254us/step - loss: 2.4794 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 240us/step - loss: 2.5210 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5111 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4993 - acc: 0.3052 0s - loss: 2.5038 - acc: 0.30\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4954 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4604 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5084 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4787 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4547 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.4645 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4676 - acc: 0.3225\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5260 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4988 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4858 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4835 - acc: 0.3217\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5071 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4995 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5103 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5009 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4883 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4687 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4998 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4923 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4872 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4681 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4831 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5239 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5258 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5087 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5183 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4796 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4481 - acc: 0.3230\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4985 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5128 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5313 - acc: 0.2958\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4743 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4798 - acc: 0.3190\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4687 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5277 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5050 - acc: 0.2971\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4795 - acc: 0.3169\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4706 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4939 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4932 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4951 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 228us/step - loss: 2.4910 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5300 - acc: 0.2958\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5087 - acc: 0.3064 0s - loss: 2.5213 - acc: 0.30\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4732 - acc: 0.3047\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4928 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5195 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4597 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5159 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5009 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4909 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5017 - acc: 0.2974\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5223 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4901 - acc: 0.3179\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5375 - acc: 0.2887\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5303 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5645 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4878 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4846 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4712 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5157 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4955 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4856 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5000 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4980 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5318 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5252 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5133 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5288 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4915 - acc: 0.3174\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5584 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5068 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5196 - acc: 0.2882\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5612 - acc: 0.2971\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4629 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5304 - acc: 0.2929\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4671 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5195 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5454 - acc: 0.2929\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4950 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5064 - acc: 0.3002 0s - loss: 2.5164 - ac\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5241 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5003 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4971 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4964 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5472 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4685 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4963 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4998 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5504 - acc: 0.2928\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4745 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4496 - acc: 0.3132 1s - loss: 2.4476 \n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5341 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4909 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5201 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5199 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5448 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4831 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4814 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4707 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5037 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5338 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4800 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5431 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5062 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4954 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5155 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4824 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5358 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4845 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4778 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5423 - acc: 0.2932\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4802 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5799 - acc: 0.2910\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5344 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5095 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4589 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4544 - acc: 0.3254\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4994 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5410 - acc: 0.2994\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4880 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5020 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5145 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5227 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5258 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5157 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4968 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5040 - acc: 0.3103\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4586 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4856 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4809 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5142 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4996 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4798 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4816 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4773 - acc: 0.3110 1s - loss: 2.4181 -\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5063 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4561 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4932 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5166 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4805 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4891 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4887 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5499 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4828 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5458 - acc: 0.2878\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4892 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4929 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4871 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 220us/step - loss: 2.4959 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4823 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4486 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4382 - acc: 0.3221\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5218 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4717 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5108 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4692 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4831 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4630 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4909 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4987 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4681 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4949 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4950 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4671 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4967 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5088 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4791 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5151 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4942 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5011 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4966 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4723 - acc: 0.3182\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5165 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4789 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4843 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5540 - acc: 0.2968\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5178 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5151 - acc: 0.2976\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4974 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4728 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5288 - acc: 0.2969\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4594 - acc: 0.3171\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4970 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4902 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5228 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5011 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4828 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4320 - acc: 0.3237\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5156 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5063 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4981 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5136 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4812 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4938 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5296 - acc: 0.2910\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5213 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4505 - acc: 0.3188\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4831 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4733 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4925 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4418 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5390 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5388 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4756 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5259 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5240 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4892 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5050 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4737 - acc: 0.3243\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5017 - acc: 0.3129\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4945 - acc: 0.3205\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4735 - acc: 0.3308\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4940 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4902 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4957 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4747 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5164 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4861 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4875 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4851 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5283 - acc: 0.3008\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5066 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4553 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5263 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5145 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4725 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4992 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4987 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5215 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4785 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4994 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4787 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5060 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5051 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5185 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5064 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4863 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4777 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4861 - acc: 0.3227\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5044 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5040 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4981 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5082 - acc: 0.2986\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4923 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4977 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5215 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4618 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4210 - acc: 0.3224\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4987 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5414 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4613 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4650 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5119 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4672 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5257 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4590 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5425 - acc: 0.2918\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4735 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4753 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4586 - acc: 0.3258\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4973 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5400 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4723 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5184 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4945 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4792 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4792 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4862 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4732 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4683 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4637 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 218us/step - loss: 2.5216 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4936 - acc: 0.2950\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4884 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5048 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5132 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4360 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5121 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4417 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5132 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5044 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5042 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4775 - acc: 0.3224 1s - loss: 2.4956\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4906 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4386 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4720 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5285 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4845 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5398 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4927 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4555 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4831 - acc: 0.3179\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5184 - acc: 0.3005\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4853 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5274 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4778 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4727 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4839 - acc: 0.3205\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 218us/step - loss: 2.4507 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5112 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4766 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4972 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 218us/step - loss: 2.4608 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5514 - acc: 0.2973 1s - loss: 2.5460 -\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5009 - acc: 0.3222\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5017 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5175 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4737 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5218 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5160 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5118 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4792 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4822 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5566 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5136 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4725 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4982 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5059 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5314 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4784 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4804 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5147 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5244 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4948 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5108 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4465 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5115 - acc: 0.2944\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5164 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5038 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5338 - acc: 0.2995\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4939 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4973 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4797 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4636 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5064 - acc: 0.2971\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4657 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4819 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4969 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5013 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4762 - acc: 0.3233\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4855 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5250 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4681 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4799 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5199 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4774 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4704 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5002 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4791 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5462 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5101 - acc: 0.2950\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4529 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5059 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4842 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4678 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.5213 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5434 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4635 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4656 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5043 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5072 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5218 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4979 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 220us/step - loss: 2.4986 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4358 - acc: 0.3269\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4802 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5180 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4881 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4901 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5152 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5410 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4760 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4809 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4766 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5173 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4895 - acc: 0.3060\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5495 - acc: 0.2979\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4990 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5184 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5141 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5254 - acc: 0.2987\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5126 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5231 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4943 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5296 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4770 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4718 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4900 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5040 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5234 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5137 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5102 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5584 - acc: 0.2963\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 235us/step - loss: 2.5115 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5193 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4733 - acc: 0.3208\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4951 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5213 - acc: 0.2971\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5217 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4588 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4488 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5527 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5099 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5476 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4777 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5426 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5298 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5208 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5113 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5050 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4979 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5182 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5276 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4878 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4608 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5502 - acc: 0.2926\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4947 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4987 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4843 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4762 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5299 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5208 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5156 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5114 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4832 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5137 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5187 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4960 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4763 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5147 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4998 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5110 - acc: 0.3258\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4743 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5284 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4511 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5126 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4787 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4540 - acc: 0.3211\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5223 - acc: 0.2961\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4582 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4906 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4817 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4823 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5369 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5060 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4636 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5049 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5158 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4919 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4936 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5011 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4602 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5409 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5136 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4868 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5107 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4987 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5177 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5092 - acc: 0.3026\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5259 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4927 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5091 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4750 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5051 - acc: 0.3182\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4875 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5600 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 219us/step - loss: 2.5095 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5137 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4834 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5087 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4957 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5065 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5061 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5722 - acc: 0.2884\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4605 - acc: 0.3230\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4946 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.5231 - acc: 0.2948\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4658 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5406 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5362 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5008 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4720 - acc: 0.3187\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4821 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4713 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4725 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4815 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5503 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5086 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4702 - acc: 0.3174\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4880 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4588 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4672 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4946 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5300 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5777 - acc: 0.2958\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5307 - acc: 0.2913\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5027 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5295 - acc: 0.2989 0s - loss: 2.5272 - acc: 0.29\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5030 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4659 - acc: 0.3216\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4971 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5081 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4743 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5632 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4752 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4925 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5013 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4483 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4577 - acc: 0.3217\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5085 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4800 - acc: 0.3196\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4944 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4458 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4616 - acc: 0.3174\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4843 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4867 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4795 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4809 - acc: 0.3217\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5249 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5181 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4491 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5351 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5050 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4755 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5014 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5157 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4809 - acc: 0.3182\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4970 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4581 - acc: 0.3169\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4819 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4538 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4383 - acc: 0.3251\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4900 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5165 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4951 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4878 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5033 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5285 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5088 - acc: 0.2961\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5517 - acc: 0.2897\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5399 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5130 - acc: 0.3137\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4530 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4935 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5130 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5269 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5379 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4816 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.5049 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5116 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4599 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4688 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5007 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4630 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4478 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5611 - acc: 0.2900\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4636 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4768 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5359 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5242 - acc: 0.2957\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4964 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4741 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4637 - acc: 0.3200\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4608 - acc: 0.3221\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5011 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4756 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4938 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5295 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5046 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5134 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4709 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4588 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4938 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5572 - acc: 0.2952\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4996 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4779 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4644 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5009 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5063 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4736 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5091 - acc: 0.2937\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4840 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5135 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4889 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5193 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5554 - acc: 0.2923\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4907 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4830 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5467 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5336 - acc: 0.2961\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4667 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4614 - acc: 0.3169\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4976 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5355 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4646 - acc: 0.3293\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4867 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4971 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4700 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4877 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4907 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5309 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4960 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5071 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5203 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4985 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5090 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4826 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4650 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4433 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5391 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5134 - acc: 0.2976\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5495 - acc: 0.2976\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4454 - acc: 0.3227\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4896 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4024 - acc: 0.3309\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4383 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4872 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5301 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4641 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4973 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.5311 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5129 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5403 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4536 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4835 - acc: 0.3064\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5387 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4924 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4801 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4870 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4687 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5119 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5297 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4791 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5440 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4633 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4726 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5082 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4382 - acc: 0.3213\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5008 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5302 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5073 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4867 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4879 - acc: 0.3193\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5127 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5232 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4963 - acc: 0.2981\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5154 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4952 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4902 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4990 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5133 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5409 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4788 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5147 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5073 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5591 - acc: 0.2936 0s - loss: 2.5404 - acc\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4868 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4743 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4711 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4750 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4697 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4701 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4926 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5263 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5368 - acc: 0.3008\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4895 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5318 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4953 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4924 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4974 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4992 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4710 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4953 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5009 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5006 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4734 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4727 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5226 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4608 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 197us/step - loss: 2.4902 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4981 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4420 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4641 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4799 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5009 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4879 - acc: 0.3214\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5003 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5054 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5239 - acc: 0.2895\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4754 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4803 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4918 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4746 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4833 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4924 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5679 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4804 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5127 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4815 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4945 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4643 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5403 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5434 - acc: 0.2924\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4937 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4305 - acc: 0.3295\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4530 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4698 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4712 - acc: 0.3095\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5367 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4925 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4690 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4916 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4703 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4703 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4695 - acc: 0.3111 1s - loss: 2.426\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4396 - acc: 0.3213\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5101 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5371 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5173 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5075 - acc: 0.2986\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5427 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5172 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4639 - acc: 0.3179\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5085 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4727 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5503 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4919 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5371 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5139 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4857 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4826 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4623 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5066 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5088 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4602 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4863 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4994 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4615 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4948 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5353 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4910 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4898 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5259 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4923 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4978 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5330 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4814 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5219 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5226 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4933 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5470 - acc: 0.2977\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4955 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5066 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5107 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4755 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4848 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4941 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4808 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5356 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4614 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5152 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4728 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4670 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5108 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5189 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4800 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5238 - acc: 0.2940\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4819 - acc: 0.3176\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4264 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4875 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4392 - acc: 0.3230\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5203 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4858 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4845 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4591 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4904 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4714 - acc: 0.3159\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4812 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4907 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5142 - acc: 0.2968\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5014 - acc: 0.3176\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5239 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4501 - acc: 0.3190\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4974 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4896 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5127 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5020 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4893 - acc: 0.3052 1s - loss: 2.4760 - \n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5303 - acc: 0.2965\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5002 - acc: 0.2958\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5136 - acc: 0.3019\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4964 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4713 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4591 - acc: 0.3179\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4615 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.4542 - acc: 0.3187\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5007 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4748 - acc: 0.3182\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5354 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4939 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5018 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4913 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4965 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4657 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4526 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5175 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5022 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5036 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4800 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5143 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4572 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4689 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5212 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4983 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5236 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4804 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5116 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5255 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4789 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5142 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5135 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4853 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4730 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4431 - acc: 0.3213\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4649 - acc: 0.3211\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5185 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4728 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5150 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5268 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4678 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4903 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4591 - acc: 0.3206\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5302 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5280 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4623 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4448 - acc: 0.3187\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4525 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5073 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4973 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5175 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5231 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4990 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4835 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4558 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4689 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4846 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5193 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5307 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5023 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4905 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4859 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4719 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4775 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5111 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4820 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4786 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4970 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4659 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4726 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5083 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4857 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4993 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4667 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5011 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5166 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4678 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4834 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5447 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4867 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4299 - acc: 0.3261\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4789 - acc: 0.3201\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5097 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4822 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4993 - acc: 0.3116\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5319 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4486 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5064 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5214 - acc: 0.2995\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4691 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5143 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4945 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4990 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4704 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4715 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5140 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4694 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5288 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4609 - acc: 0.3159\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4536 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - ETA: 0s - loss: 2.4986 - acc: 0.319 - 1s 208us/step - loss: 2.5027 - acc: 0.3200\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4662 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5639 - acc: 0.2973\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5005 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4493 - acc: 0.3232\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5245 - acc: 0.2987\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4737 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5127 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5193 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.5053 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4822 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5263 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4809 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5020 - acc: 0.3008\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5042 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5227 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4682 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4890 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5356 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4782 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4816 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4921 - acc: 0.2971\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5058 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4674 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4778 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4749 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5255 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4725 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5287 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4863 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4901 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4796 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5169 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4933 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5315 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5131 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4603 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4723 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4778 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4943 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4623 - acc: 0.3129 0s - loss: 2.4908 - ac\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4802 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4799 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5510 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4855 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4931 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5110 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4802 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5082 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5040 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5249 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5382 - acc: 0.2950\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4455 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5326 - acc: 0.2952\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4824 - acc: 0.3193\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4952 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5644 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4701 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4939 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5021 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4849 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4714 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5414 - acc: 0.2981\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4984 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4742 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4529 - acc: 0.3227\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 236us/step - loss: 2.4615 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 220us/step - loss: 2.4866 - acc: 0.3147\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 225us/step - loss: 2.4881 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 224us/step - loss: 2.4913 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 224us/step - loss: 2.5078 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.5039 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5073 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5294 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5044 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4611 - acc: 0.3271\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4898 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4900 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4651 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4843 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5150 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5050 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4964 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4480 - acc: 0.3248\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5421 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4868 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4943 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5016 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4559 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4979 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4946 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5154 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4921 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5147 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5372 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4869 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5428 - acc: 0.2963\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5063 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5073 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5301 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5117 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4868 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5290 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5059 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4976 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4982 - acc: 0.3171\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5068 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4615 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5039 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5437 - acc: 0.2979\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5159 - acc: 0.3200\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 215us/step - loss: 2.5322 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4720 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4615 - acc: 0.3190\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5010 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5345 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4630 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4809 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5432 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4877 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4965 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4701 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4666 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4901 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4739 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5012 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5274 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4884 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5021 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4963 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4605 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4554 - acc: 0.3205\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4720 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4924 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4980 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4943 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4847 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5106 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4987 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4629 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5012 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4546 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5099 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5176 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4813 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4687 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5342 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4975 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4650 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5083 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5128 - acc: 0.3052\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4683 - acc: 0.3185\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4693 - acc: 0.3206\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4870 - acc: 0.3192\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5340 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5293 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5123 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5251 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4789 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5174 - acc: 0.3014\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4949 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4924 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4666 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4982 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4620 - acc: 0.3174\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5016 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4773 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4999 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5123 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4592 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4954 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4950 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5377 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5166 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4758 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4856 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5151 - acc: 0.2974\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4494 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5222 - acc: 0.2911\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4976 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4809 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5226 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4935 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5536 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4879 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4998 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4924 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4865 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4877 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.5070 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4662 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5091 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5247 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4776 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5164 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4870 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5045 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4595 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5134 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4917 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4961 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4862 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5458 - acc: 0.2992\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5167 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 197us/step - loss: 2.5143 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5201 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5279 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5193 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5013 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4802 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5129 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4900 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5521 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4663 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5021 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4478 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4652 - acc: 0.3182\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4571 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5081 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5003 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5184 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4715 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5261 - acc: 0.2911\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4797 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5131 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5001 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5364 - acc: 0.2940\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4839 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5268 - acc: 0.3076 0s - loss: 2.5408 - acc: 0\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4680 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4505 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5258 - acc: 0.3003\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4843 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4739 - acc: 0.3147\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4946 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4956 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4734 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4589 - acc: 0.3176\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5074 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4920 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5583 - acc: 0.2886\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5159 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4718 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4980 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5336 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5078 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4984 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4881 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5467 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4680 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5261 - acc: 0.2976\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5016 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5268 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4504 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5053 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4873 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5364 - acc: 0.2998\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4981 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4548 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5229 - acc: 0.2963\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5055 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5284 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4696 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5010 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5250 - acc: 0.3055\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4826 - acc: 0.3005\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5005 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4794 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4878 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4990 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4743 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4951 - acc: 0.3068\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4795 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5049 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4970 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5035 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4629 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4670 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5210 - acc: 0.3103\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4735 - acc: 0.3214\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5042 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4410 - acc: 0.3237\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4332 - acc: 0.3205\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4936 - acc: 0.3159\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4447 - acc: 0.3232\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5060 - acc: 0.2990\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5229 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4754 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4509 - acc: 0.3232\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4986 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4671 - acc: 0.3190\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4598 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5266 - acc: 0.2947\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5068 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4770 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4801 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5098 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4945 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4997 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4949 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5386 - acc: 0.3019 0s - loss: 2.5528 - acc: 0\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4771 - acc: 0.3182\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5225 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4844 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4998 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4519 - acc: 0.3192\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4843 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4983 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4841 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5055 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5131 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4979 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4943 - acc: 0.3000\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4977 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4868 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4819 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4950 - acc: 0.3082\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4914 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4874 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5123 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4803 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4336 - acc: 0.3196\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4850 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4808 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5240 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4851 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4435 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5024 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5108 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4373 - acc: 0.3192\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4830 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4813 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4691 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5016 - acc: 0.3018\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4668 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5358 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4902 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4605 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4946 - acc: 0.3027\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5191 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4363 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4682 - acc: 0.3201\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4934 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5005 - acc: 0.3169\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4755 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4516 - acc: 0.3159\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4783 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5046 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4790 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4574 - acc: 0.3158\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4921 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4852 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 219us/step - loss: 2.4553 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4481 - acc: 0.3190\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5307 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4789 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4603 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5048 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5062 - acc: 0.3019\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4832 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4427 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5201 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4868 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4809 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4408 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5045 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4618 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4892 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4706 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5501 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5495 - acc: 0.2937\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5161 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4538 - acc: 0.3224\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4805 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4498 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5214 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4979 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4900 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4885 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5155 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4785 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4804 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4769 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4791 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5060 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5447 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4847 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5232 - acc: 0.2995\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4525 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5059 - acc: 0.3024\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5330 - acc: 0.3002\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5111 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5301 - acc: 0.2968\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4329 - acc: 0.3274\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4758 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5011 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4850 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4592 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4392 - acc: 0.3219\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.5166 - acc: 0.3011\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4971 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4655 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4659 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4681 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4513 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4499 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5258 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4827 - acc: 0.3176\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4600 - acc: 0.3221\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4918 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5043 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5336 - acc: 0.3113\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5367 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4677 - acc: 0.3251\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4975 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5283 - acc: 0.2953\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5665 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4852 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4870 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4591 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4678 - acc: 0.3172\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 219us/step - loss: 2.5273 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4533 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4837 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5373 - acc: 0.2995\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4981 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4700 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5111 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4611 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5047 - acc: 0.2934\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4738 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4559 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4591 - acc: 0.3106\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4973 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4927 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4802 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5214 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4985 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5020 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4962 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4530 - acc: 0.3277\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4801 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 217us/step - loss: 2.4704 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4621 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5099 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5072 - acc: 0.3126\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5164 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5076 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4114 - acc: 0.3349\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4792 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4490 - acc: 0.3229\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4496 - acc: 0.3271\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5152 - acc: 0.3087\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4737 - acc: 0.3235\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4918 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4967 - acc: 0.3056\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4965 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5276 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5030 - acc: 0.2969\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4992 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4956 - acc: 0.3034\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4951 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4946 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5197 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4901 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5291 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5145 - acc: 0.3008\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4698 - acc: 0.3161\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4460 - acc: 0.3167\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4912 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5221 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4422 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5519 - acc: 0.2974\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5150 - acc: 0.3093\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4637 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5022 - acc: 0.3029\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5229 - acc: 0.3064\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4975 - acc: 0.3031\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4763 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5269 - acc: 0.2976\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4906 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5056 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5108 - acc: 0.3161\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 216us/step - loss: 2.4780 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4821 - acc: 0.3100\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4400 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4818 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4696 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4989 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5153 - acc: 0.2973\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4872 - acc: 0.3076\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5157 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4613 - acc: 0.3150\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5003 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5084 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4918 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4961 - acc: 0.3159 1s - loss: 2.4645 -\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4935 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4993 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.5030 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4830 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4552 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4997 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4928 - acc: 0.3134\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5219 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4520 - acc: 0.3211\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4852 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4758 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4572 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4724 - acc: 0.3169\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4752 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5044 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5516 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4730 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4486 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4863 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4844 - acc: 0.3122\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4896 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4884 - acc: 0.3071\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4424 - acc: 0.3188\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4958 - acc: 0.3053\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4792 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 216us/step - loss: 2.4784 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4877 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4826 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4922 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5098 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4681 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4543 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4903 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5184 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4665 - acc: 0.3143\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4675 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 197us/step - loss: 2.5091 - acc: 0.3084\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4617 - acc: 0.3171\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4997 - acc: 0.3072\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4854 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4825 - acc: 0.3147\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4573 - acc: 0.3184\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4871 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4486 - acc: 0.3177\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4737 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5323 - acc: 0.2984\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5102 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4791 - acc: 0.3219\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4615 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4580 - acc: 0.3217\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4828 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 197us/step - loss: 2.4687 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5486 - acc: 0.3023\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5200 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5196 - acc: 0.2966\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5015 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 219us/step - loss: 2.4760 - acc: 0.3155\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5159 - acc: 0.3124\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4792 - acc: 0.3233\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4796 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.5366 - acc: 0.3011\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4552 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4735 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4134 - acc: 0.3279\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 213us/step - loss: 2.4794 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4858 - acc: 0.2986\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5361 - acc: 0.2952\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5203 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5132 - acc: 0.3090\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4952 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4708 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4987 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.4744 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5013 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5145 - acc: 0.2963\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5075 - acc: 0.3010\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5362 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5339 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5194 - acc: 0.3101\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4900 - acc: 0.3169\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4965 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5162 - acc: 0.3052\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4851 - acc: 0.3203\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5047 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5013 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4881 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4944 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5112 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4434 - acc: 0.3238\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4856 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5332 - acc: 0.3077\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5035 - acc: 0.3074\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4697 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5380 - acc: 0.3016\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4881 - acc: 0.3085\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4928 - acc: 0.3129\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4978 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4889 - acc: 0.3089\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4812 - acc: 0.3163\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4520 - acc: 0.3200\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4581 - acc: 0.3116 0s - loss: 2.4655 - acc:\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5006 - acc: 0.3108\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4424 - acc: 0.3166\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4745 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4897 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4664 - acc: 0.3121\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4937 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 212us/step - loss: 2.4708 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5069 - acc: 0.3050\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4808 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4669 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4817 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4929 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4786 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4969 - acc: 0.3037\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4800 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4857 - acc: 0.3138\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 196us/step - loss: 2.4985 - acc: 0.3058\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5074 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4512 - acc: 0.3130\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4580 - acc: 0.3081\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.5377 - acc: 0.3063\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5270 - acc: 0.2950\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.5065 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.5169 - acc: 0.3040\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4804 - acc: 0.3143 0s - loss: 2.4795 - acc: 0.31\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4586 - acc: 0.3251\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4677 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4527 - acc: 0.3195\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5031 - acc: 0.3026\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.5395 - acc: 0.3048\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 214us/step - loss: 2.5106 - acc: 0.3045\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5179 - acc: 0.3137\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4947 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5070 - acc: 0.3092\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4680 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5041 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.5641 - acc: 0.2852\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 199us/step - loss: 2.4927 - acc: 0.3156\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5316 - acc: 0.3006\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4239 - acc: 0.3198\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.5137 - acc: 0.3079\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4718 - acc: 0.3190\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4580 - acc: 0.3114\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4942 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 198us/step - loss: 2.4852 - acc: 0.3127\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.5306 - acc: 0.3021\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4864 - acc: 0.3066\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4912 - acc: 0.3159\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4812 - acc: 0.3032\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4943 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4522 - acc: 0.3238\n",
"Epoch 1/1\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4666 - acc: 0.3135\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4846 - acc: 0.3060\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4906 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4941 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4589 - acc: 0.3145\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5352 - acc: 0.2974\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4879 - acc: 0.3042\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.5118 - acc: 0.3118\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4974 - acc: 0.2982\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.4791 - acc: 0.3082\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 206us/step - loss: 2.4824 - acc: 0.3153\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4308 - acc: 0.3180\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4929 - acc: 0.3142\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4771 - acc: 0.3140\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.4928 - acc: 0.3196\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.5279 - acc: 0.3013\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4995 - acc: 0.3061\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5086 - acc: 0.3035\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 209us/step - loss: 2.5122 - acc: 0.2989\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4405 - acc: 0.3227\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 201us/step - loss: 2.4973 - acc: 0.3105\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.4922 - acc: 0.3047\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 211us/step - loss: 2.4907 - acc: 0.3090\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4880 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4764 - acc: 0.3116\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4730 - acc: 0.3151\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 208us/step - loss: 2.4994 - acc: 0.3039\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 200us/step - loss: 2.4980 - acc: 0.3110\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 202us/step - loss: 2.5062 - acc: 0.3111\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 207us/step - loss: 2.4291 - acc: 0.3351\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 203us/step - loss: 2.4638 - acc: 0.3235\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 210us/step - loss: 2.4912 - acc: 0.3148\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 205us/step - loss: 2.5013 - acc: 0.3098\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 204us/step - loss: 2.4593 - acc: 0.3132\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 237us/step - loss: 2.4772 - acc: 0.3095\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 2s 254us/step - loss: 2.5373 - acc: 0.3043\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 2s 244us/step - loss: 2.5139 - acc: 0.3097\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 229us/step - loss: 2.5444 - acc: 0.2942\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 235us/step - loss: 2.5177 - acc: 0.2997\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 224us/step - loss: 2.4587 - acc: 0.3164\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 232us/step - loss: 2.4845 - acc: 0.3119\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 229us/step - loss: 2.4963 - acc: 0.3069\n",
"Epoch 1/1\n",
"6210/6210 [==============================] - 1s 219us/step - loss: 2.5328 - acc: 0.3008\n",
"Epoch 1/1\n",
"5060/6210 [=======================>......] - ETA: 0s - loss: 2.4995 - acc: 0.3053"
]
},
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-457-1104e3584755>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnum_samples\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;31m# gotcha - confused what this is\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;31m# do you want to train more than once on the same data?\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mshuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;31m# gotcha - shuffling doesn't make sense here b/c of prescribed format\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0;31m# validation_data=(Xs_valid, ys_valid),\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m )\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[1;32m 1655\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1656\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1657\u001b[0;31m validation_steps=validation_steps)\n\u001b[0m\u001b[1;32m 1658\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1659\u001b[0m def evaluate(self, x=None, y=None,\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_fit_loop\u001b[0;34m(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[1;32m 1211\u001b[0m \u001b[0mbatch_logs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'size'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_ids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1212\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_index\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_logs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1213\u001b[0;31m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1214\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1215\u001b[0m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2355\u001b[0m \u001b[0msession\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_session\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2356\u001b[0m updated = session.run(fetches=fetches, feed_dict=feed_dict,\n\u001b[0;32m-> 2357\u001b[0;31m **self.session_kwargs)\n\u001b[0m\u001b[1;32m 2358\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2359\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 887\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 888\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 889\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 890\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 891\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1118\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1119\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m-> 1120\u001b[0;31m feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[1;32m 1121\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1122\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1315\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1316\u001b[0m return self._do_call(_run_fn, self._session, feeds, fetches, targets,\n\u001b[0;32m-> 1317\u001b[0;31m options, run_metadata)\n\u001b[0m\u001b[1;32m 1318\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1319\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m 1321\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1322\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1323\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1324\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1325\u001b[0m \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m 1300\u001b[0m return tf_session.TF_Run(session, options,\n\u001b[1;32m 1301\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1302\u001b[0;31m status, run_metadata)\n\u001b[0m\u001b[1;32m 1303\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1304\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msession\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
]
}
],
"source": [
"for i in range(999999):\n",
" X1_train, X1_target = pipeline_1.fit_transform(X0)\n",
" model.fit(\n",
" X1_train, \n",
" X1_target, \n",
" batch_size=num_samples, # gotcha - confused what this is\n",
" epochs=1, # do you want to train more than once on the same data?\n",
" shuffle=False, # gotcha - shuffling doesn't make sense here b/c of prescribed format\n",
" # validation_data=(Xs_valid, ys_valid),\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 450,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/johnb/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:24: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=Tensor(\"de...)`\n"
]
}
],
"source": [
"num_training_steps = 50\n",
"model = clone_model_with_different_size(model, num_samples=num_samples, num_training_steps=num_training_steps)\n",
"pipeline_1 = make_pipeline_1(num_training_steps=num_training_steps)"
]
},
{
"cell_type": "code",
"execution_count": 456,
"metadata": {},
"outputs": [],
"source": [
"model.optimizer = 'cfgh'"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Evaluation attempts"
]
},
{
"cell_type": "code",
"execution_count": 458,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/johnb/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:24: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=Tensor(\"de...)`\n"
]
}
],
"source": [
"new_model = clone_model_with_different_size(model)"
]
},
{
"cell_type": "code",
"execution_count": 459,
"metadata": {},
"outputs": [],
"source": [
"def one_hot_encode(chars):\n",
" return one_hot_encoder.one_hot_encode(np.array(\n",
" [map_to_ints.translate(chars)]\n",
" ))\n",
"\n",
"def get_next_char(char):\n",
" assert isinstance(char, str)\n",
" assert len(char) == 1\n",
" one_hot_vect = one_hot_encode(char)\n",
" one_hot_encoder.one_hot_encode(np.array(\n",
" [map_to_ints.translate(char)]\n",
" ))\n",
" return map_to_ints.untranslate([np.argmax(new_model.predict(one_hot_vect))])"
]
},
{
"cell_type": "code",
"execution_count": 460,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"an an the the the the the the the the the the the the the the the the the the the the the the the th\n",
"be the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"coure the the the the the the the the the the the the the the the the the the the the the the the th\n",
"d the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"e the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"f the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"g the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"h the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"it an the the the the the the the the the the the the the the the the the the the the the the the th\n",
"jut the the the the the the the the the the the the the the the the the the the the the the the the \n",
"king the the the the the the the the the the the the the the the the the the the the the the the the\n",
"l the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"me the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"not the the the the the the the the the the the the the the the the the the the the the the the the \n",
"on the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"ped the the the the the the the the the the the the the the the the the the the the the the the the \n",
"qued the the the the the the the the the the the the the the the the the the the the the the the the\n",
"r the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"s the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"t an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"us the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"ve the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"we the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"xe the the the the the the the the the the the the the the the the the the the the the the the the t\n",
"y the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"ze the the the the the the the the the the the the the the the the the the the the the the the the t\n",
" the the the the the the the the the the the the the the the the the the the the the the the the the\n",
"A the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"Bre the the the the the the the the the the the the the the the the the the the the the the the the \n",
"Cous the the the the the the the the the the the the the the the the the the the the the the the the\n",
"D an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"E an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"F an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"H an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"G an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"I the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"Je an the the the the the the the the the the the the the the the the the the the the the the the th\n",
"K an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"L an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"M an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"N an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"O an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"P an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"Q an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"R an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"S the the the the the the the the the the the the the the the the the the the the the the the the th\n",
"T an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"U an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"V an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"W an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"X an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"Y an the the the the the the the the the the the the the the the the the the the the the the the the\n",
"Z an the the the the the the the the the the the the the the the the the the the the the the the the\n"
]
}
],
"source": [
"new_model.reset_states()\n",
"for char in 'abcdefghijklmnopqrstuvwxyz ABCDEFHGIJKLMNOPQRSTUVWXYZ':\n",
" next_char = char\n",
" chars = []\n",
" for i in range(100):\n",
" chars.append(next_char)\n",
" next_char = get_next_char(next_char)\n",
" print(''.join(chars))"
]
},
{
"cell_type": "code",
"execution_count": 493,
"metadata": {},
"outputs": [
{
"ename": "AssertionError",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-493-7bb9019affae>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mchars\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnext_char\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mnext_char\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_next_char\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnext_char\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m''\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchars\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m<ipython-input-459-84e75eeb37d1>\u001b[0m in \u001b[0;36mget_next_char\u001b[0;34m(char)\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mget_next_char\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchar\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;32massert\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchar\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0;32massert\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchar\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0mone_hot_vect\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mone_hot_encode\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchar\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m one_hot_encoder.one_hot_encode(np.array(\n",
"\u001b[0;31mAssertionError\u001b[0m: "
]
}
],
"source": [
"new_model.reset_states()\n",
"for char in one_hot_encode('hello')[0]:\n",
" new_model.predict(np.array([[char]]))\n",
"next_char = ''\n",
"chars = []\n",
"for i in range(100):\n",
" chars.append(next_char)\n",
" next_char = get_next_char(next_char)\n",
"print(''.join(chars))"
]
},
{
"cell_type": "code",
"execution_count": 491,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ \"Explanation\\nWhy the edits made under my username Hardcore Metallica Fan were reverted? They weren't vandalisms, just closure on some GAs after I voted at New York Dolls FAC. And please don't remove the template from the talk page since I'm retired now.89.205.38.27\"], dtype=object)"
]
},
"execution_count": 491,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"train[:1]['comment_text'].values"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Scratch"
]
},
{
"cell_type": "code",
"execution_count": 117,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/johnb/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:31: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=Tensor(\"in...)`\n"
]
},
{
"ename": "ValueError",
"evalue": "Cannot feed value of shape (6, 7, 11) for Tensor 'inputs_63:0', which has shape '(5, 7, 11)'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-117-f12b86af4240>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 34\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[0mstart\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mround\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandom\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandom\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mnum_samples\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_training_steps\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_symbols\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 36\u001b[0;31m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstart\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 37\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msummary\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstart\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mpredict\u001b[0;34m(self, x, batch_size, verbose, steps)\u001b[0m\n\u001b[1;32m 1788\u001b[0m \u001b[0mf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict_function\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1789\u001b[0m return self._predict_loop(f, ins, batch_size=batch_size,\n\u001b[0;32m-> 1790\u001b[0;31m verbose=verbose, steps=steps)\n\u001b[0m\u001b[1;32m 1791\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1792\u001b[0m def train_on_batch(self, x, y,\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_predict_loop\u001b[0;34m(self, f, ins, batch_size, verbose, steps)\u001b[0m\n\u001b[1;32m 1297\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1298\u001b[0m \u001b[0mins_batch\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_slice_arrays\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_ids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1299\u001b[0;31m \u001b[0mbatch_outs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1300\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_outs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1301\u001b[0m \u001b[0mbatch_outs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mbatch_outs\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2355\u001b[0m \u001b[0msession\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_session\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2356\u001b[0m updated = session.run(fetches=fetches, feed_dict=feed_dict,\n\u001b[0;32m-> 2357\u001b[0;31m **self.session_kwargs)\n\u001b[0m\u001b[1;32m 2358\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2359\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 887\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 888\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 889\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 890\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 891\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1094\u001b[0m \u001b[0;34m'Cannot feed value of shape %r for Tensor %r, '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1095\u001b[0m \u001b[0;34m'which has shape %r'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1096\u001b[0;31m % (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))\n\u001b[0m\u001b[1;32m 1097\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_feedable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msubfeed_t\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Tensor %s may not be fed.'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0msubfeed_t\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mValueError\u001b[0m: Cannot feed value of shape (6, 7, 11) for Tensor 'inputs_63:0', which has shape '(5, 7, 11)'"
]
}
],
"source": [
"num_samples = 5\n",
"num_training_steps = 7\n",
"num_symbols = 11\n",
"\n",
"lstm_state_size = 13\n",
"\n",
"X1_train\n",
"\n",
"# start = np.round(np.random.random([num_samples, num_training_steps, num_symbols])*10)\n",
"# end = model.predict(start)\n",
"# print(model.summary())\n",
"# print(start)\n",
"# print(end)"
]
},
{
"cell_type": "code",
"execution_count": 119,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(2323, 11, 101)"
]
},
"execution_count": 119,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X1_train.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment