Skip to content

Instantly share code, notes, and snippets.

@astrung
Created June 26, 2018 09:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save astrung/0af1899cd73f1eaab60157fc9f14242b to your computer and use it in GitHub Desktop.
Save astrung/0af1899cd73f1eaab60157fc9f14242b to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "GAN_tutorial.ipynb",
"version": "0.3.2",
"views": {},
"default_view": {},
"provenance": [],
"collapsed_sections": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"accelerator": "GPU"
},
"cells": [
{
"metadata": {
"id": "uSHxgiHBdgtO",
"colab_type": "code",
"colab": {
"autoexec": {
"startup": false,
"wait_interval": 0
},
"base_uri": "https://localhost:8080/",
"height": 119
},
"outputId": "4a0a1365-3d11-4d32-a4c4-1396b1861794",
"executionInfo": {
"status": "ok",
"timestamp": 1527556517332,
"user_tz": -420,
"elapsed": 3492,
"user": {
"displayName": "Nguyen tuan anh",
"photoUrl": "//lh4.googleusercontent.com/-c4nAWdwavCY/AAAAAAAAAAI/AAAAAAAAAAo/jNbEVeV39zE/s50-c-k-no/photo.jpg",
"userId": "108611573643209987894"
}
}
},
"cell_type": "code",
"source": [
"!pip install keras"
],
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"text": [
"Requirement already satisfied: keras in /usr/local/lib/python3.6/dist-packages (2.1.6)\n",
"Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from keras) (3.12)\n",
"Requirement already satisfied: numpy>=1.9.1 in /usr/local/lib/python3.6/dist-packages (from keras) (1.14.3)\n",
"Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from keras) (1.11.0)\n",
"Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from keras) (0.19.1)\n",
"Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras) (2.7.1)\n"
],
"name": "stdout"
}
]
},
{
"metadata": {
"id": "oNnwgEHDdoAR",
"colab_type": "code",
"colab": {
"autoexec": {
"startup": false,
"wait_interval": 0
},
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "7e73ae1c-b4b4-4a9b-fac4-3b415ec8009f",
"executionInfo": {
"status": "ok",
"timestamp": 1527556653110,
"user_tz": -420,
"elapsed": 14419,
"user": {
"displayName": "Nguyen tuan anh",
"photoUrl": "//lh4.googleusercontent.com/-c4nAWdwavCY/AAAAAAAAAAI/AAAAAAAAAAo/jNbEVeV39zE/s50-c-k-no/photo.jpg",
"userId": "108611573643209987894"
}
}
},
"cell_type": "code",
"source": [
"import numpy as np\n",
"import time\n",
"from tensorflow.examples.tutorials.mnist import input_data\n",
"\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Activation, Flatten, Reshape\n",
"from keras.layers import Conv2D, Conv2DTranspose, UpSampling2D\n",
"from keras.layers import LeakyReLU, Dropout\n",
"from keras.layers import BatchNormalization\n",
"from keras.optimizers import Adam, RMSprop\n",
"\n",
"import matplotlib.pyplot as plt"
],
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
],
"name": "stderr"
}
]
},
{
"metadata": {
"id": "hA4-jwjyd_Yu",
"colab_type": "code",
"colab": {
"autoexec": {
"startup": false,
"wait_interval": 0
}
}
},
"cell_type": "code",
"source": [
"class ElapsedTimer(object):\n",
" def __init__(self):\n",
" self.start_time = time.time()\n",
" def elapsed(self,sec):\n",
" if sec < 60:\n",
" return str(sec) + \" sec\"\n",
" elif sec < (60 * 60):\n",
" return str(sec / 60) + \" min\"\n",
" else:\n",
" return str(sec / (60 * 60)) + \" hr\"\n",
" def elapsed_time(self):\n",
" print(\"Elapsed: %s \" % self.elapsed(time.time() - self.start_time) )"
],
"execution_count": 0,
"outputs": []
},
{
"metadata": {
"id": "ICPMTnkleMcH",
"colab_type": "code",
"colab": {
"autoexec": {
"startup": false,
"wait_interval": 0
}
}
},
"cell_type": "code",
"source": [
"class DCGAN(object):\n",
" def __init__(self, img_rows=28, img_cols=28, channel=1):\n",
"\n",
" self.img_rows = img_rows\n",
" self.img_cols = img_cols\n",
" self.channel = channel\n",
" self.D = None # discriminator\n",
" self.G = None # generator\n",
" self.AM = None # adversarial model\n",
" self.DM = None # discriminator model\n",
"\n",
" # (W−F+2P)/S+1\n",
" def discriminator(self):\n",
" if self.D:\n",
" return self.D\n",
" self.D = Sequential()\n",
" depth = 64\n",
" dropout = 0.4\n",
" # In: 28 x 28 x 1, depth = 1\n",
" # Out: 14 x 14 x 1, depth=64\n",
" input_shape = (self.img_rows, self.img_cols, self.channel)\n",
" self.D.add(Conv2D(depth*1, 5, strides=2, input_shape=input_shape,\\\n",
" padding='same'))\n",
" self.D.add(LeakyReLU(alpha=0.2))\n",
" self.D.add(Dropout(dropout))\n",
"\n",
" self.D.add(Conv2D(depth*2, 5, strides=2, padding='same'))\n",
" self.D.add(LeakyReLU(alpha=0.2))\n",
" self.D.add(Dropout(dropout))\n",
"\n",
" self.D.add(Conv2D(depth*4, 5, strides=2, padding='same'))\n",
" self.D.add(LeakyReLU(alpha=0.2))\n",
" self.D.add(Dropout(dropout))\n",
"\n",
" self.D.add(Conv2D(depth*8, 5, strides=1, padding='same'))\n",
" self.D.add(LeakyReLU(alpha=0.2))\n",
" self.D.add(Dropout(dropout))\n",
"\n",
" # Out: 1-dim probability\n",
" self.D.add(Flatten())\n",
" self.D.add(Dense(1))\n",
" self.D.add(Activation('sigmoid'))\n",
" self.D.summary()\n",
" return self.D\n",
"\n",
" def generator(self):\n",
" if self.G:\n",
" return self.G\n",
" self.G = Sequential()\n",
" dropout = 0.4\n",
" depth = 64+64+64+64\n",
" dim = 7\n",
" # In: 100\n",
" # Out: dim x dim x depth\n",
" self.G.add(Dense(dim*dim*depth, input_dim=100))\n",
" self.G.add(BatchNormalization(momentum=0.9))\n",
" self.G.add(Activation('relu'))\n",
" self.G.add(Reshape((dim, dim, depth)))\n",
" self.G.add(Dropout(dropout))\n",
"\n",
" # In: dim x dim x depth\n",
" # Out: 2*dim x 2*dim x depth/2\n",
" self.G.add(UpSampling2D())\n",
" self.G.add(Conv2DTranspose(int(depth/2), 5, padding='same'))\n",
" self.G.add(BatchNormalization(momentum=0.9))\n",
" self.G.add(Activation('relu'))\n",
"\n",
" self.G.add(UpSampling2D())\n",
" self.G.add(Conv2DTranspose(int(depth/4), 5, padding='same'))\n",
" self.G.add(BatchNormalization(momentum=0.9))\n",
" self.G.add(Activation('relu'))\n",
"\n",
" self.G.add(Conv2DTranspose(int(depth/8), 5, padding='same'))\n",
" self.G.add(BatchNormalization(momentum=0.9))\n",
" self.G.add(Activation('relu'))\n",
"\n",
" # Out: 28 x 28 x 1 grayscale image [0.0,1.0] per pix\n",
" self.G.add(Conv2DTranspose(1, 5, padding='same'))\n",
" self.G.add(Activation('sigmoid'))\n",
" self.G.summary()\n",
" return self.G\n",
"\n",
" def discriminator_model(self):\n",
" if self.DM:\n",
" return self.DM\n",
" optimizer = RMSprop(lr=0.0002, decay=6e-8)\n",
" self.DM = Sequential()\n",
" self.DM.add(self.discriminator())\n",
" self.DM.compile(loss='binary_crossentropy', optimizer=optimizer,\\\n",
" metrics=['accuracy'])\n",
" return self.DM\n",
"\n",
" def adversarial_model(self):\n",
" if self.AM:\n",
" return self.AM\n",
" optimizer = RMSprop(lr=0.0001, decay=3e-8)\n",
" self.AM = Sequential()\n",
" self.AM.add(self.generator())\n",
" self.AM.add(self.discriminator())\n",
" self.AM.compile(loss='binary_crossentropy', optimizer=optimizer,\\\n",
" metrics=['accuracy'])\n",
" return self.AM\n"
],
"execution_count": 0,
"outputs": []
},
{
"metadata": {
"id": "ENGWae3neXM2",
"colab_type": "code",
"colab": {
"autoexec": {
"startup": false,
"wait_interval": 0
}
}
},
"cell_type": "code",
"source": [
"class MNIST_DCGAN(object):\n",
" def __init__(self):\n",
" self.img_rows = 28\n",
" self.img_cols = 28\n",
" self.channel = 1\n",
"\n",
" self.x_train = input_data.read_data_sets(\"mnist\",\\\n",
" \tone_hot=True).train.images\n",
" self.x_train = self.x_train.reshape(-1, self.img_rows,\\\n",
" \tself.img_cols, 1).astype(np.float32)\n",
"\n",
" self.DCGAN = DCGAN()\n",
" self.discriminator = self.DCGAN.discriminator_model()\n",
" self.adversarial = self.DCGAN.adversarial_model()\n",
" self.generator = self.DCGAN.generator()\n",
"\n",
" def train(self, train_steps=2000, batch_size=256, save_interval=0):\n",
" noise_input = None\n",
" if save_interval>0:\n",
" noise_input = np.random.uniform(-1.0, 1.0, size=[16, 100])\n",
" for i in range(train_steps):\n",
" images_train = self.x_train[np.random.randint(0,\n",
" self.x_train.shape[0], size=batch_size), :, :, :]\n",
" noise = np.random.uniform(-1.0, 1.0, size=[batch_size, 100])\n",
" images_fake = self.generator.predict(noise)\n",
" x = np.concatenate((images_train, images_fake))\n",
" y = np.ones([2*batch_size, 1])\n",
" y[batch_size:, :] = 0\n",
" d_loss = self.discriminator.train_on_batch(x, y)\n",
"\n",
" y = np.ones([batch_size, 1])\n",
" noise = np.random.uniform(-1.0, 1.0, size=[batch_size, 100])\n",
" a_loss = self.adversarial.train_on_batch(noise, y)\n",
" log_mesg = \"%d: [D loss: %f, acc: %f]\" % (i, d_loss[0], d_loss[1])\n",
" log_mesg = \"%s [A loss: %f, acc: %f]\" % (log_mesg, a_loss[0], a_loss[1])\n",
" print(log_mesg)\n",
" if save_interval>0:\n",
" if (i+1)%save_interval==0:\n",
" self.plot_images(save2file=True, samples=noise_input.shape[0],\\\n",
" noise=noise_input, step=(i+1))\n",
"\n",
" def plot_images(self, save2file=False, fake=True, samples=16, noise=None, step=0):\n",
" filename = 'mnist.png'\n",
" if fake:\n",
" if noise is None:\n",
" noise = np.random.uniform(-1.0, 1.0, size=[samples, 100])\n",
" else:\n",
" filename = \"mnist_%d.png\" % step\n",
" images = self.generator.predict(noise)\n",
" else:\n",
" i = np.random.randint(0, self.x_train.shape[0], samples)\n",
" images = self.x_train[i, :, :, :]\n",
"\n",
" plt.figure(figsize=(10,10))\n",
" for i in range(images.shape[0]):\n",
" plt.subplot(4, 4, i+1)\n",
" image = images[i, :, :, :]\n",
" image = np.reshape(image, [self.img_rows, self.img_cols])\n",
" plt.imshow(image, cmap='gray')\n",
" plt.axis('off')\n",
" plt.tight_layout()\n",
" if save2file:\n",
" plt.savefig(filename)\n",
" plt.close('all')\n",
" else:\n",
" plt.show()"
],
"execution_count": 0,
"outputs": []
},
{
"metadata": {
"id": "3_RlW2Wkerzl",
"colab_type": "code",
"colab": {
"autoexec": {
"startup": false,
"wait_interval": 0
},
"base_uri": "https://localhost:8080/",
"height": 172632
},
"outputId": "e789ea8b-add9-4de5-d318-f388c3e1cd6d",
"executionInfo": {
"status": "ok",
"timestamp": 1527562338113,
"user_tz": -420,
"elapsed": 5497375,
"user": {
"displayName": "Nguyen tuan anh",
"photoUrl": "//lh4.googleusercontent.com/-c4nAWdwavCY/AAAAAAAAAAI/AAAAAAAAAAo/jNbEVeV39zE/s50-c-k-no/photo.jpg",
"userId": "108611573643209987894"
}
}
},
"cell_type": "code",
"source": [
"mnist_dcgan = MNIST_DCGAN()\n",
"timer = ElapsedTimer()\n",
"mnist_dcgan.train(train_steps=10000, batch_size=256, save_interval=500)\n",
"timer.elapsed_time()\n",
"mnist_dcgan.plot_images(fake=True)\n",
"mnist_dcgan.plot_images(fake=False, save2file=True)"
],
"execution_count": 5,
"outputs": [
{
"output_type": "stream",
"text": [
"WARNING:tensorflow:From <ipython-input-4-d9428bd41f87>:7: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please write your own downloading logic.\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/contrib/learn/python/learn/datasets/base.py:252: _internal_retry.<locals>.wrap.<locals>.wrapped_fn (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use urllib or similar directly.\n",
"Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use tf.data to implement this functionality.\n",
"Extracting mnist/train-images-idx3-ubyte.gz\n",
"Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use tf.data to implement this functionality.\n",
"Extracting mnist/train-labels-idx1-ubyte.gz\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use tf.one_hot on tensors.\n",
"Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n",
"Extracting mnist/t10k-images-idx3-ubyte.gz\n",
"Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n",
"Extracting mnist/t10k-labels-idx1-ubyte.gz\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"conv2d_1 (Conv2D) (None, 14, 14, 64) 1664 \n",
"_________________________________________________________________\n",
"leaky_re_lu_1 (LeakyReLU) (None, 14, 14, 64) 0 \n",
"_________________________________________________________________\n",
"dropout_1 (Dropout) (None, 14, 14, 64) 0 \n",
"_________________________________________________________________\n",
"conv2d_2 (Conv2D) (None, 7, 7, 128) 204928 \n",
"_________________________________________________________________\n",
"leaky_re_lu_2 (LeakyReLU) (None, 7, 7, 128) 0 \n",
"_________________________________________________________________\n",
"dropout_2 (Dropout) (None, 7, 7, 128) 0 \n",
"_________________________________________________________________\n",
"conv2d_3 (Conv2D) (None, 4, 4, 256) 819456 \n",
"_________________________________________________________________\n",
"leaky_re_lu_3 (LeakyReLU) (None, 4, 4, 256) 0 \n",
"_________________________________________________________________\n",
"dropout_3 (Dropout) (None, 4, 4, 256) 0 \n",
"_________________________________________________________________\n",
"conv2d_4 (Conv2D) (None, 4, 4, 512) 3277312 \n",
"_________________________________________________________________\n",
"leaky_re_lu_4 (LeakyReLU) (None, 4, 4, 512) 0 \n",
"_________________________________________________________________\n",
"dropout_4 (Dropout) (None, 4, 4, 512) 0 \n",
"_________________________________________________________________\n",
"flatten_1 (Flatten) (None, 8192) 0 \n",
"_________________________________________________________________\n",
"dense_1 (Dense) (None, 1) 8193 \n",
"_________________________________________________________________\n",
"activation_1 (Activation) (None, 1) 0 \n",
"=================================================================\n",
"Total params: 4,311,553\n",
"Trainable params: 4,311,553\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"dense_2 (Dense) (None, 12544) 1266944 \n",
"_________________________________________________________________\n",
"batch_normalization_1 (Batch (None, 12544) 50176 \n",
"_________________________________________________________________\n",
"activation_2 (Activation) (None, 12544) 0 \n",
"_________________________________________________________________\n",
"reshape_1 (Reshape) (None, 7, 7, 256) 0 \n",
"_________________________________________________________________\n",
"dropout_5 (Dropout) (None, 7, 7, 256) 0 \n",
"_________________________________________________________________\n",
"up_sampling2d_1 (UpSampling2 (None, 14, 14, 256) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose_1 (Conv2DTr (None, 14, 14, 128) 819328 \n",
"_________________________________________________________________\n",
"batch_normalization_2 (Batch (None, 14, 14, 128) 512 \n",
"_________________________________________________________________\n",
"activation_3 (Activation) (None, 14, 14, 128) 0 \n",
"_________________________________________________________________\n",
"up_sampling2d_2 (UpSampling2 (None, 28, 28, 128) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose_2 (Conv2DTr (None, 28, 28, 64) 204864 \n",
"_________________________________________________________________\n",
"batch_normalization_3 (Batch (None, 28, 28, 64) 256 \n",
"_________________________________________________________________\n",
"activation_4 (Activation) (None, 28, 28, 64) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose_3 (Conv2DTr (None, 28, 28, 32) 51232 \n",
"_________________________________________________________________\n",
"batch_normalization_4 (Batch (None, 28, 28, 32) 128 \n",
"_________________________________________________________________\n",
"activation_5 (Activation) (None, 28, 28, 32) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose_4 (Conv2DTr (None, 28, 28, 1) 801 \n",
"_________________________________________________________________\n",
"activation_6 (Activation) (None, 28, 28, 1) 0 \n",
"=================================================================\n",
"Total params: 2,394,241\n",
"Trainable params: 2,368,705\n",
"Non-trainable params: 25,536\n",
"_________________________________________________________________\n",
"WARNING:tensorflow:Variable *= will be deprecated. Use variable.assign_mul if you want assignment to the variable value or 'x = x * y' if you want a new python Tensor object.\n",
"0: [D loss: 0.691644, acc: 0.529297] [A loss: 1.347496, acc: 0.000000]\n",
"1: [D loss: 0.652509, acc: 0.724609] [A loss: 1.477316, acc: 0.000000]\n",
"2: [D loss: 0.589234, acc: 0.962891] [A loss: 1.234557, acc: 0.000000]\n",
"3: [D loss: 0.830015, acc: 0.500000] [A loss: 2.025546, acc: 0.000000]\n",
"4: [D loss: 0.490596, acc: 0.882812] [A loss: 1.493428, acc: 0.000000]\n",
"5: [D loss: 0.443281, acc: 0.982422] [A loss: 1.505041, acc: 0.000000]\n",
"6: [D loss: 0.404189, acc: 0.986328] [A loss: 1.928179, acc: 0.000000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7: [D loss: 0.421127, acc: 0.929688] [A loss: 2.347838, acc: 0.000000]\n",
"8: [D loss: 0.383633, acc: 0.996094] [A loss: 2.089724, acc: 0.000000]\n",
"9: [D loss: 0.391618, acc: 0.878906] [A loss: 2.977959, acc: 0.000000]\n",
"10: [D loss: 0.372992, acc: 0.990234] [A loss: 1.565651, acc: 0.000000]\n",
"11: [D loss: 0.507475, acc: 0.527344] [A loss: 3.608387, acc: 0.000000]\n",
"12: [D loss: 0.381290, acc: 0.894531] [A loss: 1.754790, acc: 0.000000]\n",
"13: [D loss: 0.299376, acc: 0.996094] [A loss: 2.071305, acc: 0.000000]\n",
"14: [D loss: 0.255700, acc: 0.998047] [A loss: 2.479736, acc: 0.000000]\n",
"15: [D loss: 0.241908, acc: 1.000000] [A loss: 2.793214, acc: 0.000000]\n",
"16: [D loss: 0.215357, acc: 1.000000] [A loss: 2.704420, acc: 0.000000]\n",
"17: [D loss: 0.229456, acc: 0.990234] [A loss: 3.145730, acc: 0.000000]\n",
"18: [D loss: 0.184648, acc: 1.000000] [A loss: 2.634926, acc: 0.000000]\n",
"19: [D loss: 0.205355, acc: 0.994141] [A loss: 3.650963, acc: 0.000000]\n",
"20: [D loss: 0.173983, acc: 0.998047] [A loss: 2.366300, acc: 0.000000]\n",
"21: [D loss: 0.261959, acc: 0.939453] [A loss: 4.333182, acc: 0.000000]\n",
"22: [D loss: 0.237384, acc: 0.955078] [A loss: 1.867406, acc: 0.000000]\n",
"23: [D loss: 0.309788, acc: 0.869141] [A loss: 3.620619, acc: 0.000000]\n",
"24: [D loss: 0.119381, acc: 0.998047] [A loss: 2.425673, acc: 0.000000]\n",
"25: [D loss: 0.128951, acc: 1.000000] [A loss: 2.639486, acc: 0.000000]\n",
"26: [D loss: 0.112522, acc: 1.000000] [A loss: 2.766650, acc: 0.000000]\n",
"27: [D loss: 0.127826, acc: 1.000000] [A loss: 3.078283, acc: 0.000000]\n",
"28: [D loss: 0.089408, acc: 1.000000] [A loss: 2.842899, acc: 0.000000]\n",
"29: [D loss: 0.111269, acc: 0.998047] [A loss: 3.199675, acc: 0.000000]\n",
"30: [D loss: 0.085385, acc: 1.000000] [A loss: 2.995476, acc: 0.000000]\n",
"31: [D loss: 0.086537, acc: 1.000000] [A loss: 3.208034, acc: 0.000000]\n",
"32: [D loss: 0.079885, acc: 1.000000] [A loss: 3.176480, acc: 0.000000]\n",
"33: [D loss: 0.060579, acc: 0.998047] [A loss: 2.657187, acc: 0.000000]\n",
"34: [D loss: 0.114165, acc: 0.996094] [A loss: 4.292346, acc: 0.000000]\n",
"35: [D loss: 0.134462, acc: 0.958984] [A loss: 1.329808, acc: 0.140625]\n",
"36: [D loss: 0.419844, acc: 0.714844] [A loss: 5.407030, acc: 0.000000]\n",
"37: [D loss: 0.445795, acc: 0.794922] [A loss: 1.569721, acc: 0.042969]\n",
"38: [D loss: 0.177015, acc: 0.951172] [A loss: 1.996840, acc: 0.003906]\n",
"39: [D loss: 0.096916, acc: 0.998047] [A loss: 1.862458, acc: 0.003906]\n",
"40: [D loss: 0.143311, acc: 0.978516] [A loss: 2.315307, acc: 0.000000]\n",
"41: [D loss: 0.110536, acc: 0.988281] [A loss: 2.353392, acc: 0.000000]\n",
"42: [D loss: 0.091769, acc: 0.998047] [A loss: 2.217509, acc: 0.000000]\n",
"43: [D loss: 0.136180, acc: 0.972656] [A loss: 2.757832, acc: 0.000000]\n",
"44: [D loss: 0.093546, acc: 0.996094] [A loss: 2.422946, acc: 0.000000]\n",
"45: [D loss: 0.122925, acc: 0.974609] [A loss: 2.629411, acc: 0.000000]\n",
"46: [D loss: 0.133398, acc: 0.978516] [A loss: 2.985326, acc: 0.000000]\n",
"47: [D loss: 0.092278, acc: 0.988281] [A loss: 2.192855, acc: 0.003906]\n",
"48: [D loss: 0.107845, acc: 0.988281] [A loss: 2.820819, acc: 0.000000]\n",
"49: [D loss: 0.121715, acc: 0.984375] [A loss: 2.697326, acc: 0.000000]\n",
"50: [D loss: 0.083218, acc: 0.994141] [A loss: 2.341876, acc: 0.000000]\n",
"51: [D loss: 0.169610, acc: 0.960938] [A loss: 3.745440, acc: 0.000000]\n",
"52: [D loss: 0.162608, acc: 0.951172] [A loss: 1.120662, acc: 0.210938]\n",
"53: [D loss: 0.345508, acc: 0.794922] [A loss: 4.742723, acc: 0.000000]\n",
"54: [D loss: 0.429167, acc: 0.843750] [A loss: 0.788267, acc: 0.445312]\n",
"55: [D loss: 0.551399, acc: 0.613281] [A loss: 3.081485, acc: 0.000000]\n",
"56: [D loss: 0.125863, acc: 0.960938] [A loss: 1.622792, acc: 0.011719]\n",
"57: [D loss: 0.173260, acc: 0.968750] [A loss: 1.958526, acc: 0.000000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"58: [D loss: 0.154009, acc: 0.978516] [A loss: 2.171678, acc: 0.000000]\n",
"59: [D loss: 0.168877, acc: 0.970703] [A loss: 2.270156, acc: 0.000000]\n",
"60: [D loss: 0.189225, acc: 0.964844] [A loss: 2.397440, acc: 0.000000]\n",
"61: [D loss: 0.174293, acc: 0.972656] [A loss: 2.407508, acc: 0.000000]\n",
"62: [D loss: 0.214016, acc: 0.945312] [A loss: 2.636619, acc: 0.000000]\n",
"63: [D loss: 0.198503, acc: 0.962891] [A loss: 2.582351, acc: 0.000000]\n",
"64: [D loss: 0.208262, acc: 0.968750] [A loss: 1.943428, acc: 0.000000]\n",
"65: [D loss: 0.299420, acc: 0.875000] [A loss: 3.113141, acc: 0.000000]\n",
"66: [D loss: 0.239719, acc: 0.941406] [A loss: 1.102054, acc: 0.125000]\n",
"67: [D loss: 0.516733, acc: 0.628906] [A loss: 3.784094, acc: 0.000000]\n",
"68: [D loss: 0.488228, acc: 0.800781] [A loss: 0.562603, acc: 0.738281]\n",
"69: [D loss: 0.832086, acc: 0.507812] [A loss: 2.329763, acc: 0.000000]\n",
"70: [D loss: 0.244620, acc: 0.955078] [A loss: 1.306105, acc: 0.015625]\n",
"71: [D loss: 0.354411, acc: 0.777344] [A loss: 2.065034, acc: 0.000000]\n",
"72: [D loss: 0.238488, acc: 0.962891] [A loss: 1.822536, acc: 0.000000]\n",
"73: [D loss: 0.296516, acc: 0.904297] [A loss: 2.041075, acc: 0.000000]\n",
"74: [D loss: 0.284266, acc: 0.917969] [A loss: 1.942916, acc: 0.000000]\n",
"75: [D loss: 0.305634, acc: 0.876953] [A loss: 2.331151, acc: 0.000000]\n",
"76: [D loss: 0.309457, acc: 0.927734] [A loss: 1.656565, acc: 0.000000]\n",
"77: [D loss: 0.391722, acc: 0.757812] [A loss: 2.986350, acc: 0.000000]\n",
"78: [D loss: 0.388004, acc: 0.878906] [A loss: 0.690172, acc: 0.566406]\n",
"79: [D loss: 0.855579, acc: 0.505859] [A loss: 3.038876, acc: 0.000000]\n",
"80: [D loss: 0.385582, acc: 0.884766] [A loss: 0.854299, acc: 0.265625]\n",
"81: [D loss: 0.632359, acc: 0.521484] [A loss: 2.153398, acc: 0.000000]\n",
"82: [D loss: 0.291553, acc: 0.958984] [A loss: 1.332101, acc: 0.003906]\n",
"83: [D loss: 0.442876, acc: 0.685547] [A loss: 2.262007, acc: 0.000000]\n",
"84: [D loss: 0.315630, acc: 0.955078] [A loss: 1.249917, acc: 0.031250]\n",
"85: [D loss: 0.487746, acc: 0.623047] [A loss: 2.455144, acc: 0.000000]\n",
"86: [D loss: 0.341183, acc: 0.921875] [A loss: 1.003130, acc: 0.156250]\n",
"87: [D loss: 0.588096, acc: 0.535156] [A loss: 2.609431, acc: 0.000000]\n",
"88: [D loss: 0.368904, acc: 0.890625] [A loss: 0.814004, acc: 0.328125]\n",
"89: [D loss: 0.635157, acc: 0.507812] [A loss: 2.325660, acc: 0.000000]\n",
"90: [D loss: 0.316529, acc: 0.957031] [A loss: 1.186976, acc: 0.035156]\n",
"91: [D loss: 0.476299, acc: 0.607422] [A loss: 2.291164, acc: 0.000000]\n",
"92: [D loss: 0.306405, acc: 0.966797] [A loss: 1.295896, acc: 0.003906]\n",
"93: [D loss: 0.470683, acc: 0.630859] [A loss: 2.427984, acc: 0.000000]\n",
"94: [D loss: 0.349368, acc: 0.941406] [A loss: 1.018472, acc: 0.105469]\n",
"95: [D loss: 0.573933, acc: 0.531250] [A loss: 2.517912, acc: 0.000000]\n",
"96: [D loss: 0.355646, acc: 0.935547] [A loss: 0.818122, acc: 0.312500]\n",
"97: [D loss: 0.657387, acc: 0.515625] [A loss: 2.510430, acc: 0.000000]\n",
"98: [D loss: 0.377215, acc: 0.896484] [A loss: 0.785327, acc: 0.347656]\n",
"99: [D loss: 0.676647, acc: 0.503906] [A loss: 2.238503, acc: 0.000000]\n",
"100: [D loss: 0.367635, acc: 0.931641] [A loss: 0.904972, acc: 0.210938]\n",
"101: [D loss: 0.586969, acc: 0.513672] [A loss: 2.126004, acc: 0.000000]\n",
"102: [D loss: 0.358991, acc: 0.947266] [A loss: 1.016569, acc: 0.117188]\n",
"103: [D loss: 0.572452, acc: 0.548828] [A loss: 2.247306, acc: 0.000000]\n",
"104: [D loss: 0.378328, acc: 0.927734] [A loss: 0.828332, acc: 0.281250]\n",
"105: [D loss: 0.640850, acc: 0.505859] [A loss: 2.514361, acc: 0.000000]\n",
"106: [D loss: 0.405771, acc: 0.894531] [A loss: 0.662171, acc: 0.585938]\n",
"107: [D loss: 0.723950, acc: 0.505859] [A loss: 2.385708, acc: 0.000000]\n",
"108: [D loss: 0.377319, acc: 0.929688] [A loss: 0.838835, acc: 0.273438]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"109: [D loss: 0.660966, acc: 0.513672] [A loss: 2.328837, acc: 0.000000]\n",
"110: [D loss: 0.371522, acc: 0.943359] [A loss: 0.993103, acc: 0.136719]\n",
"111: [D loss: 0.597181, acc: 0.535156] [A loss: 2.436018, acc: 0.000000]\n",
"112: [D loss: 0.383425, acc: 0.937500] [A loss: 0.907463, acc: 0.195312]\n",
"113: [D loss: 0.639105, acc: 0.509766] [A loss: 2.507535, acc: 0.000000]\n",
"114: [D loss: 0.384161, acc: 0.933594] [A loss: 0.817464, acc: 0.308594]\n",
"115: [D loss: 0.689984, acc: 0.501953] [A loss: 2.482255, acc: 0.000000]\n",
"116: [D loss: 0.433637, acc: 0.894531] [A loss: 0.839979, acc: 0.289062]\n",
"117: [D loss: 0.675669, acc: 0.513672] [A loss: 2.358858, acc: 0.000000]\n",
"118: [D loss: 0.424535, acc: 0.921875] [A loss: 0.890027, acc: 0.207031]\n",
"119: [D loss: 0.684240, acc: 0.515625] [A loss: 2.368053, acc: 0.000000]\n",
"120: [D loss: 0.420430, acc: 0.910156] [A loss: 0.870024, acc: 0.230469]\n",
"121: [D loss: 0.665709, acc: 0.507812] [A loss: 2.358837, acc: 0.000000]\n",
"122: [D loss: 0.421665, acc: 0.900391] [A loss: 0.921631, acc: 0.183594]\n",
"123: [D loss: 0.658460, acc: 0.511719] [A loss: 2.438681, acc: 0.000000]\n",
"124: [D loss: 0.425752, acc: 0.902344] [A loss: 0.885938, acc: 0.230469]\n",
"125: [D loss: 0.685751, acc: 0.509766] [A loss: 2.439930, acc: 0.000000]\n",
"126: [D loss: 0.430510, acc: 0.875000] [A loss: 0.838312, acc: 0.304688]\n",
"127: [D loss: 0.712067, acc: 0.507812] [A loss: 2.438775, acc: 0.000000]\n",
"128: [D loss: 0.440521, acc: 0.869141] [A loss: 0.816889, acc: 0.289062]\n",
"129: [D loss: 0.736954, acc: 0.507812] [A loss: 2.322883, acc: 0.000000]\n",
"130: [D loss: 0.442705, acc: 0.878906] [A loss: 0.848908, acc: 0.300781]\n",
"131: [D loss: 0.697313, acc: 0.509766] [A loss: 2.342509, acc: 0.000000]\n",
"132: [D loss: 0.459730, acc: 0.880859] [A loss: 0.852374, acc: 0.265625]\n",
"133: [D loss: 0.704543, acc: 0.507812] [A loss: 2.278614, acc: 0.000000]\n",
"134: [D loss: 0.455073, acc: 0.876953] [A loss: 0.925942, acc: 0.148438]\n",
"135: [D loss: 0.678944, acc: 0.509766] [A loss: 2.365038, acc: 0.000000]\n",
"136: [D loss: 0.469464, acc: 0.861328] [A loss: 0.768309, acc: 0.355469]\n",
"137: [D loss: 0.746683, acc: 0.509766] [A loss: 2.311462, acc: 0.000000]\n",
"138: [D loss: 0.470054, acc: 0.871094] [A loss: 0.828758, acc: 0.320312]\n",
"139: [D loss: 0.719688, acc: 0.503906] [A loss: 2.261018, acc: 0.000000]\n",
"140: [D loss: 0.457809, acc: 0.884766] [A loss: 0.925390, acc: 0.191406]\n",
"141: [D loss: 0.668190, acc: 0.517578] [A loss: 2.361824, acc: 0.000000]\n",
"142: [D loss: 0.456010, acc: 0.882812] [A loss: 0.946806, acc: 0.171875]\n",
"143: [D loss: 0.689138, acc: 0.511719] [A loss: 2.437965, acc: 0.000000]\n",
"144: [D loss: 0.490431, acc: 0.841797] [A loss: 0.711633, acc: 0.507812]\n",
"145: [D loss: 0.796987, acc: 0.505859] [A loss: 2.441046, acc: 0.000000]\n",
"146: [D loss: 0.504592, acc: 0.791016] [A loss: 0.665018, acc: 0.585938]\n",
"147: [D loss: 0.757757, acc: 0.507812] [A loss: 2.133158, acc: 0.000000]\n",
"148: [D loss: 0.474902, acc: 0.875000] [A loss: 0.906082, acc: 0.218750]\n",
"149: [D loss: 0.672962, acc: 0.525391] [A loss: 2.190451, acc: 0.000000]\n",
"150: [D loss: 0.474065, acc: 0.882812] [A loss: 0.870097, acc: 0.257812]\n",
"151: [D loss: 0.712114, acc: 0.529297] [A loss: 2.413450, acc: 0.000000]\n",
"152: [D loss: 0.510107, acc: 0.814453] [A loss: 0.685707, acc: 0.554688]\n",
"153: [D loss: 0.788067, acc: 0.509766] [A loss: 2.334450, acc: 0.000000]\n",
"154: [D loss: 0.520406, acc: 0.808594] [A loss: 0.660466, acc: 0.562500]\n",
"155: [D loss: 0.756976, acc: 0.507812] [A loss: 2.048501, acc: 0.000000]\n",
"156: [D loss: 0.500841, acc: 0.859375] [A loss: 0.837441, acc: 0.304688]\n",
"157: [D loss: 0.714258, acc: 0.517578] [A loss: 2.184768, acc: 0.000000]\n",
"158: [D loss: 0.490623, acc: 0.867188] [A loss: 0.861122, acc: 0.273438]\n",
"159: [D loss: 0.685675, acc: 0.517578] [A loss: 2.337075, acc: 0.000000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"160: [D loss: 0.493411, acc: 0.841797] [A loss: 0.737147, acc: 0.472656]\n",
"161: [D loss: 0.731282, acc: 0.505859] [A loss: 2.307868, acc: 0.000000]\n",
"162: [D loss: 0.528845, acc: 0.800781] [A loss: 0.661917, acc: 0.609375]\n",
"163: [D loss: 0.786306, acc: 0.505859] [A loss: 2.202077, acc: 0.000000]\n",
"164: [D loss: 0.512799, acc: 0.832031] [A loss: 0.804234, acc: 0.351562]\n",
"165: [D loss: 0.704014, acc: 0.513672] [A loss: 2.250959, acc: 0.000000]\n",
"166: [D loss: 0.499917, acc: 0.853516] [A loss: 0.803906, acc: 0.371094]\n",
"167: [D loss: 0.725282, acc: 0.515625] [A loss: 2.243581, acc: 0.000000]\n",
"168: [D loss: 0.526216, acc: 0.816406] [A loss: 0.760659, acc: 0.382812]\n",
"169: [D loss: 0.758937, acc: 0.503906] [A loss: 2.368959, acc: 0.000000]\n",
"170: [D loss: 0.531218, acc: 0.785156] [A loss: 0.674493, acc: 0.558594]\n",
"171: [D loss: 0.794833, acc: 0.501953] [A loss: 2.146349, acc: 0.000000]\n",
"172: [D loss: 0.539275, acc: 0.791016] [A loss: 0.760493, acc: 0.406250]\n",
"173: [D loss: 0.716302, acc: 0.509766] [A loss: 2.026356, acc: 0.000000]\n",
"174: [D loss: 0.514974, acc: 0.833984] [A loss: 0.831387, acc: 0.261719]\n",
"175: [D loss: 0.705322, acc: 0.509766] [A loss: 2.188520, acc: 0.000000]\n",
"176: [D loss: 0.533348, acc: 0.783203] [A loss: 0.717783, acc: 0.453125]\n",
"177: [D loss: 0.721573, acc: 0.503906] [A loss: 1.963706, acc: 0.000000]\n",
"178: [D loss: 0.522717, acc: 0.816406] [A loss: 0.829108, acc: 0.285156]\n",
"179: [D loss: 0.707362, acc: 0.507812] [A loss: 2.025793, acc: 0.000000]\n",
"180: [D loss: 0.517124, acc: 0.810547] [A loss: 0.783908, acc: 0.398438]\n",
"181: [D loss: 0.704182, acc: 0.517578] [A loss: 1.993935, acc: 0.000000]\n",
"182: [D loss: 0.514468, acc: 0.857422] [A loss: 0.770129, acc: 0.371094]\n",
"183: [D loss: 0.704439, acc: 0.505859] [A loss: 2.042246, acc: 0.000000]\n",
"184: [D loss: 0.519207, acc: 0.824219] [A loss: 0.756653, acc: 0.406250]\n",
"185: [D loss: 0.688607, acc: 0.521484] [A loss: 1.996774, acc: 0.000000]\n",
"186: [D loss: 0.528409, acc: 0.816406] [A loss: 0.787499, acc: 0.371094]\n",
"187: [D loss: 0.698768, acc: 0.515625] [A loss: 2.027165, acc: 0.000000]\n",
"188: [D loss: 0.514041, acc: 0.839844] [A loss: 0.757721, acc: 0.363281]\n",
"189: [D loss: 0.720238, acc: 0.507812] [A loss: 2.063649, acc: 0.000000]\n",
"190: [D loss: 0.522549, acc: 0.781250] [A loss: 0.711133, acc: 0.500000]\n",
"191: [D loss: 0.724538, acc: 0.507812] [A loss: 2.052145, acc: 0.000000]\n",
"192: [D loss: 0.544411, acc: 0.771484] [A loss: 0.697068, acc: 0.519531]\n",
"193: [D loss: 0.710586, acc: 0.513672] [A loss: 1.905840, acc: 0.000000]\n",
"194: [D loss: 0.535536, acc: 0.814453] [A loss: 0.804167, acc: 0.375000]\n",
"195: [D loss: 0.678575, acc: 0.507812] [A loss: 1.948555, acc: 0.000000]\n",
"196: [D loss: 0.518723, acc: 0.814453] [A loss: 0.795347, acc: 0.339844]\n",
"197: [D loss: 0.680673, acc: 0.513672] [A loss: 1.941670, acc: 0.000000]\n",
"198: [D loss: 0.525013, acc: 0.830078] [A loss: 0.722755, acc: 0.484375]\n",
"199: [D loss: 0.696472, acc: 0.525391] [A loss: 1.895743, acc: 0.000000]\n",
"200: [D loss: 0.524487, acc: 0.832031] [A loss: 0.824669, acc: 0.332031]\n",
"201: [D loss: 0.677548, acc: 0.517578] [A loss: 2.026530, acc: 0.000000]\n",
"202: [D loss: 0.533127, acc: 0.759766] [A loss: 0.687635, acc: 0.511719]\n",
"203: [D loss: 0.727868, acc: 0.521484] [A loss: 2.006216, acc: 0.000000]\n",
"204: [D loss: 0.534872, acc: 0.791016] [A loss: 0.682795, acc: 0.585938]\n",
"205: [D loss: 0.691771, acc: 0.521484] [A loss: 1.921250, acc: 0.000000]\n",
"206: [D loss: 0.525545, acc: 0.822266] [A loss: 0.815034, acc: 0.285156]\n",
"207: [D loss: 0.673031, acc: 0.527344] [A loss: 1.857036, acc: 0.000000]\n",
"208: [D loss: 0.531778, acc: 0.843750] [A loss: 0.807085, acc: 0.320312]\n",
"209: [D loss: 0.665652, acc: 0.533203] [A loss: 1.985853, acc: 0.000000]\n",
"210: [D loss: 0.542221, acc: 0.779297] [A loss: 0.797684, acc: 0.386719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"211: [D loss: 0.693485, acc: 0.527344] [A loss: 1.942352, acc: 0.000000]\n",
"212: [D loss: 0.575507, acc: 0.746094] [A loss: 0.653001, acc: 0.628906]\n",
"213: [D loss: 0.744177, acc: 0.515625] [A loss: 1.997246, acc: 0.000000]\n",
"214: [D loss: 0.550155, acc: 0.757812] [A loss: 0.675634, acc: 0.578125]\n",
"215: [D loss: 0.718254, acc: 0.507812] [A loss: 1.877222, acc: 0.000000]\n",
"216: [D loss: 0.551130, acc: 0.783203] [A loss: 0.725191, acc: 0.500000]\n",
"217: [D loss: 0.694822, acc: 0.527344] [A loss: 1.877297, acc: 0.000000]\n",
"218: [D loss: 0.549707, acc: 0.796875] [A loss: 0.792266, acc: 0.394531]\n",
"219: [D loss: 0.695090, acc: 0.521484] [A loss: 1.895336, acc: 0.000000]\n",
"220: [D loss: 0.555350, acc: 0.785156] [A loss: 0.734685, acc: 0.457031]\n",
"221: [D loss: 0.691918, acc: 0.531250] [A loss: 1.904979, acc: 0.000000]\n",
"222: [D loss: 0.562659, acc: 0.750000] [A loss: 0.736911, acc: 0.468750]\n",
"223: [D loss: 0.716666, acc: 0.523438] [A loss: 1.949764, acc: 0.000000]\n",
"224: [D loss: 0.583582, acc: 0.712891] [A loss: 0.672490, acc: 0.558594]\n",
"225: [D loss: 0.747131, acc: 0.509766] [A loss: 1.997950, acc: 0.000000]\n",
"226: [D loss: 0.583612, acc: 0.681641] [A loss: 0.659306, acc: 0.597656]\n",
"227: [D loss: 0.741185, acc: 0.505859] [A loss: 1.735012, acc: 0.000000]\n",
"228: [D loss: 0.554122, acc: 0.771484] [A loss: 0.853201, acc: 0.250000]\n",
"229: [D loss: 0.679573, acc: 0.533203] [A loss: 1.660880, acc: 0.000000]\n",
"230: [D loss: 0.562971, acc: 0.781250] [A loss: 0.916529, acc: 0.164062]\n",
"231: [D loss: 0.656482, acc: 0.556641] [A loss: 1.717215, acc: 0.000000]\n",
"232: [D loss: 0.548581, acc: 0.796875] [A loss: 0.827328, acc: 0.355469]\n",
"233: [D loss: 0.681273, acc: 0.539062] [A loss: 2.132412, acc: 0.000000]\n",
"234: [D loss: 0.586398, acc: 0.677734] [A loss: 0.562519, acc: 0.750000]\n",
"235: [D loss: 0.761172, acc: 0.509766] [A loss: 1.824272, acc: 0.000000]\n",
"236: [D loss: 0.593560, acc: 0.710938] [A loss: 0.707873, acc: 0.515625]\n",
"237: [D loss: 0.709830, acc: 0.533203] [A loss: 1.782889, acc: 0.000000]\n",
"238: [D loss: 0.554362, acc: 0.769531] [A loss: 0.922175, acc: 0.207031]\n",
"239: [D loss: 0.651635, acc: 0.587891] [A loss: 1.651583, acc: 0.000000]\n",
"240: [D loss: 0.575903, acc: 0.755859] [A loss: 0.927166, acc: 0.207031]\n",
"241: [D loss: 0.670428, acc: 0.558594] [A loss: 1.984983, acc: 0.000000]\n",
"242: [D loss: 0.581561, acc: 0.722656] [A loss: 0.625153, acc: 0.656250]\n",
"243: [D loss: 0.741367, acc: 0.513672] [A loss: 1.911183, acc: 0.000000]\n",
"244: [D loss: 0.592611, acc: 0.691406] [A loss: 0.636027, acc: 0.640625]\n",
"245: [D loss: 0.729901, acc: 0.519531] [A loss: 1.745614, acc: 0.000000]\n",
"246: [D loss: 0.584067, acc: 0.726562] [A loss: 0.749545, acc: 0.460938]\n",
"247: [D loss: 0.678953, acc: 0.531250] [A loss: 1.657466, acc: 0.000000]\n",
"248: [D loss: 0.588262, acc: 0.726562] [A loss: 0.740975, acc: 0.429688]\n",
"249: [D loss: 0.676060, acc: 0.529297] [A loss: 1.792158, acc: 0.000000]\n",
"250: [D loss: 0.584873, acc: 0.732422] [A loss: 0.852107, acc: 0.285156]\n",
"251: [D loss: 0.695963, acc: 0.542969] [A loss: 1.796835, acc: 0.000000]\n",
"252: [D loss: 0.600362, acc: 0.695312] [A loss: 0.692016, acc: 0.542969]\n",
"253: [D loss: 0.722452, acc: 0.515625] [A loss: 1.936894, acc: 0.000000]\n",
"254: [D loss: 0.599589, acc: 0.689453] [A loss: 0.629481, acc: 0.628906]\n",
"255: [D loss: 0.735977, acc: 0.509766] [A loss: 1.798537, acc: 0.000000]\n",
"256: [D loss: 0.612199, acc: 0.683594] [A loss: 0.699184, acc: 0.488281]\n",
"257: [D loss: 0.719882, acc: 0.519531] [A loss: 1.700712, acc: 0.000000]\n",
"258: [D loss: 0.593052, acc: 0.728516] [A loss: 0.793789, acc: 0.359375]\n",
"259: [D loss: 0.702926, acc: 0.535156] [A loss: 1.632458, acc: 0.003906]\n",
"260: [D loss: 0.597340, acc: 0.707031] [A loss: 0.814623, acc: 0.332031]\n",
"261: [D loss: 0.677415, acc: 0.541016] [A loss: 1.645298, acc: 0.000000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"262: [D loss: 0.595900, acc: 0.708984] [A loss: 0.788467, acc: 0.355469]\n",
"263: [D loss: 0.716174, acc: 0.537109] [A loss: 1.813042, acc: 0.000000]\n",
"264: [D loss: 0.617460, acc: 0.667969] [A loss: 0.630203, acc: 0.648438]\n",
"265: [D loss: 0.749922, acc: 0.505859] [A loss: 1.692494, acc: 0.000000]\n",
"266: [D loss: 0.618753, acc: 0.673828] [A loss: 0.737433, acc: 0.417969]\n",
"267: [D loss: 0.725406, acc: 0.511719] [A loss: 1.631487, acc: 0.000000]\n",
"268: [D loss: 0.605646, acc: 0.703125] [A loss: 0.755833, acc: 0.425781]\n",
"269: [D loss: 0.685798, acc: 0.527344] [A loss: 1.598639, acc: 0.000000]\n",
"270: [D loss: 0.603549, acc: 0.703125] [A loss: 0.836908, acc: 0.304688]\n",
"271: [D loss: 0.698511, acc: 0.541016] [A loss: 1.576993, acc: 0.000000]\n",
"272: [D loss: 0.612269, acc: 0.671875] [A loss: 0.794550, acc: 0.367188]\n",
"273: [D loss: 0.691344, acc: 0.525391] [A loss: 1.735093, acc: 0.000000]\n",
"274: [D loss: 0.607354, acc: 0.697266] [A loss: 0.696481, acc: 0.554688]\n",
"275: [D loss: 0.727640, acc: 0.519531] [A loss: 1.781667, acc: 0.000000]\n",
"276: [D loss: 0.625172, acc: 0.634766] [A loss: 0.660515, acc: 0.605469]\n",
"277: [D loss: 0.747098, acc: 0.509766] [A loss: 1.614957, acc: 0.000000]\n",
"278: [D loss: 0.635351, acc: 0.638672] [A loss: 0.721787, acc: 0.472656]\n",
"279: [D loss: 0.714008, acc: 0.523438] [A loss: 1.494725, acc: 0.000000]\n",
"280: [D loss: 0.615922, acc: 0.669922] [A loss: 0.742828, acc: 0.480469]\n",
"281: [D loss: 0.698323, acc: 0.527344] [A loss: 1.534140, acc: 0.000000]\n",
"282: [D loss: 0.618676, acc: 0.697266] [A loss: 0.786732, acc: 0.339844]\n",
"283: [D loss: 0.695089, acc: 0.531250] [A loss: 1.424163, acc: 0.000000]\n",
"284: [D loss: 0.608770, acc: 0.726562] [A loss: 0.820837, acc: 0.320312]\n",
"285: [D loss: 0.693711, acc: 0.523438] [A loss: 1.593145, acc: 0.000000]\n",
"286: [D loss: 0.621200, acc: 0.669922] [A loss: 0.739955, acc: 0.453125]\n",
"287: [D loss: 0.741281, acc: 0.503906] [A loss: 1.694829, acc: 0.000000]\n",
"288: [D loss: 0.638377, acc: 0.630859] [A loss: 0.639220, acc: 0.632812]\n",
"289: [D loss: 0.735143, acc: 0.505859] [A loss: 1.561998, acc: 0.000000]\n",
"290: [D loss: 0.644395, acc: 0.632812] [A loss: 0.689311, acc: 0.535156]\n",
"291: [D loss: 0.714694, acc: 0.519531] [A loss: 1.515950, acc: 0.000000]\n",
"292: [D loss: 0.638423, acc: 0.648438] [A loss: 0.811018, acc: 0.304688]\n",
"293: [D loss: 0.689634, acc: 0.521484] [A loss: 1.384672, acc: 0.000000]\n",
"294: [D loss: 0.635384, acc: 0.667969] [A loss: 0.830639, acc: 0.289062]\n",
"295: [D loss: 0.701532, acc: 0.533203] [A loss: 1.514750, acc: 0.000000]\n",
"296: [D loss: 0.639710, acc: 0.654297] [A loss: 0.697598, acc: 0.523438]\n",
"297: [D loss: 0.707301, acc: 0.527344] [A loss: 1.618384, acc: 0.000000]\n",
"298: [D loss: 0.629685, acc: 0.654297] [A loss: 0.659238, acc: 0.601562]\n",
"299: [D loss: 0.744223, acc: 0.509766] [A loss: 1.430559, acc: 0.003906]\n",
"300: [D loss: 0.644650, acc: 0.660156] [A loss: 0.735112, acc: 0.445312]\n",
"301: [D loss: 0.696154, acc: 0.525391] [A loss: 1.513414, acc: 0.003906]\n",
"302: [D loss: 0.651159, acc: 0.638672] [A loss: 0.803346, acc: 0.335938]\n",
"303: [D loss: 0.695644, acc: 0.537109] [A loss: 1.418068, acc: 0.003906]\n",
"304: [D loss: 0.644229, acc: 0.644531] [A loss: 0.750369, acc: 0.433594]\n",
"305: [D loss: 0.722026, acc: 0.513672] [A loss: 1.376234, acc: 0.007812]\n",
"306: [D loss: 0.674852, acc: 0.578125] [A loss: 0.690605, acc: 0.531250]\n",
"307: [D loss: 0.726950, acc: 0.509766] [A loss: 1.387858, acc: 0.000000]\n",
"308: [D loss: 0.634252, acc: 0.673828] [A loss: 0.770101, acc: 0.375000]\n",
"309: [D loss: 0.705578, acc: 0.525391] [A loss: 1.577125, acc: 0.000000]\n",
"310: [D loss: 0.665193, acc: 0.576172] [A loss: 0.739409, acc: 0.433594]\n",
"311: [D loss: 0.728513, acc: 0.507812] [A loss: 1.783286, acc: 0.000000]\n",
"312: [D loss: 0.645861, acc: 0.630859] [A loss: 0.591506, acc: 0.769531]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"313: [D loss: 0.753859, acc: 0.501953] [A loss: 1.568245, acc: 0.000000]\n",
"314: [D loss: 0.657353, acc: 0.615234] [A loss: 0.630870, acc: 0.636719]\n",
"315: [D loss: 0.730124, acc: 0.498047] [A loss: 1.446845, acc: 0.000000]\n",
"316: [D loss: 0.640607, acc: 0.623047] [A loss: 0.790034, acc: 0.300781]\n",
"317: [D loss: 0.694217, acc: 0.531250] [A loss: 1.441614, acc: 0.000000]\n",
"318: [D loss: 0.642606, acc: 0.664062] [A loss: 0.829882, acc: 0.265625]\n",
"319: [D loss: 0.683545, acc: 0.542969] [A loss: 1.418447, acc: 0.000000]\n",
"320: [D loss: 0.658667, acc: 0.595703] [A loss: 0.668462, acc: 0.640625]\n",
"321: [D loss: 0.715836, acc: 0.507812] [A loss: 1.605637, acc: 0.000000]\n",
"322: [D loss: 0.658633, acc: 0.599609] [A loss: 0.591709, acc: 0.757812]\n",
"323: [D loss: 0.745751, acc: 0.509766] [A loss: 1.425464, acc: 0.000000]\n",
"324: [D loss: 0.659224, acc: 0.597656] [A loss: 0.650129, acc: 0.656250]\n",
"325: [D loss: 0.710525, acc: 0.513672] [A loss: 1.224382, acc: 0.003906]\n",
"326: [D loss: 0.651151, acc: 0.625000] [A loss: 0.768010, acc: 0.347656]\n",
"327: [D loss: 0.675111, acc: 0.542969] [A loss: 1.155520, acc: 0.011719]\n",
"328: [D loss: 0.649223, acc: 0.638672] [A loss: 0.801196, acc: 0.312500]\n",
"329: [D loss: 0.683335, acc: 0.546875] [A loss: 1.287655, acc: 0.003906]\n",
"330: [D loss: 0.653456, acc: 0.621094] [A loss: 0.778668, acc: 0.332031]\n",
"331: [D loss: 0.683668, acc: 0.531250] [A loss: 1.299851, acc: 0.000000]\n",
"332: [D loss: 0.640233, acc: 0.658203] [A loss: 0.770014, acc: 0.367188]\n",
"333: [D loss: 0.693184, acc: 0.521484] [A loss: 1.437208, acc: 0.003906]\n",
"334: [D loss: 0.657248, acc: 0.605469] [A loss: 0.666929, acc: 0.566406]\n",
"335: [D loss: 0.728732, acc: 0.505859] [A loss: 1.443691, acc: 0.000000]\n",
"336: [D loss: 0.661979, acc: 0.587891] [A loss: 0.632664, acc: 0.679688]\n",
"337: [D loss: 0.739566, acc: 0.507812] [A loss: 1.369896, acc: 0.000000]\n",
"338: [D loss: 0.657711, acc: 0.580078] [A loss: 0.610785, acc: 0.761719]\n",
"339: [D loss: 0.719641, acc: 0.505859] [A loss: 1.319019, acc: 0.003906]\n",
"340: [D loss: 0.663177, acc: 0.613281] [A loss: 0.742654, acc: 0.421875]\n",
"341: [D loss: 0.690060, acc: 0.521484] [A loss: 1.103202, acc: 0.007812]\n",
"342: [D loss: 0.658832, acc: 0.607422] [A loss: 0.840609, acc: 0.226562]\n",
"343: [D loss: 0.690196, acc: 0.533203] [A loss: 1.162201, acc: 0.003906]\n",
"344: [D loss: 0.648281, acc: 0.656250] [A loss: 0.779343, acc: 0.371094]\n",
"345: [D loss: 0.692912, acc: 0.539062] [A loss: 1.202961, acc: 0.003906]\n",
"346: [D loss: 0.641328, acc: 0.656250] [A loss: 0.788150, acc: 0.320312]\n",
"347: [D loss: 0.687510, acc: 0.531250] [A loss: 1.323241, acc: 0.000000]\n",
"348: [D loss: 0.650846, acc: 0.619141] [A loss: 0.739649, acc: 0.437500]\n",
"349: [D loss: 0.701275, acc: 0.527344] [A loss: 1.309290, acc: 0.000000]\n",
"350: [D loss: 0.646505, acc: 0.642578] [A loss: 0.698516, acc: 0.519531]\n",
"351: [D loss: 0.710647, acc: 0.517578] [A loss: 1.421097, acc: 0.000000]\n",
"352: [D loss: 0.662172, acc: 0.609375] [A loss: 0.623133, acc: 0.746094]\n",
"353: [D loss: 0.722247, acc: 0.500000] [A loss: 1.293604, acc: 0.003906]\n",
"354: [D loss: 0.656854, acc: 0.599609] [A loss: 0.661529, acc: 0.617188]\n",
"355: [D loss: 0.727045, acc: 0.509766] [A loss: 1.242842, acc: 0.000000]\n",
"356: [D loss: 0.661689, acc: 0.611328] [A loss: 0.742129, acc: 0.351562]\n",
"357: [D loss: 0.682734, acc: 0.529297] [A loss: 1.137207, acc: 0.011719]\n",
"358: [D loss: 0.648920, acc: 0.623047] [A loss: 0.756990, acc: 0.363281]\n",
"359: [D loss: 0.675301, acc: 0.560547] [A loss: 1.256399, acc: 0.000000]\n",
"360: [D loss: 0.654892, acc: 0.621094] [A loss: 0.775012, acc: 0.292969]\n",
"361: [D loss: 0.693062, acc: 0.533203] [A loss: 1.237435, acc: 0.003906]\n",
"362: [D loss: 0.656561, acc: 0.605469] [A loss: 0.742412, acc: 0.421875]\n",
"363: [D loss: 0.696926, acc: 0.521484] [A loss: 1.305019, acc: 0.000000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"364: [D loss: 0.654453, acc: 0.634766] [A loss: 0.645594, acc: 0.664062]\n",
"365: [D loss: 0.712407, acc: 0.511719] [A loss: 1.323168, acc: 0.003906]\n",
"366: [D loss: 0.649911, acc: 0.630859] [A loss: 0.631848, acc: 0.687500]\n",
"367: [D loss: 0.725543, acc: 0.503906] [A loss: 1.260529, acc: 0.000000]\n",
"368: [D loss: 0.662568, acc: 0.601562] [A loss: 0.690974, acc: 0.515625]\n",
"369: [D loss: 0.717629, acc: 0.511719] [A loss: 1.150192, acc: 0.000000]\n",
"370: [D loss: 0.660226, acc: 0.634766] [A loss: 0.704646, acc: 0.472656]\n",
"371: [D loss: 0.703273, acc: 0.523438] [A loss: 1.139598, acc: 0.015625]\n",
"372: [D loss: 0.659295, acc: 0.619141] [A loss: 0.737355, acc: 0.402344]\n",
"373: [D loss: 0.694646, acc: 0.535156] [A loss: 1.105320, acc: 0.011719]\n",
"374: [D loss: 0.667102, acc: 0.595703] [A loss: 0.787542, acc: 0.296875]\n",
"375: [D loss: 0.696258, acc: 0.535156] [A loss: 1.132833, acc: 0.015625]\n",
"376: [D loss: 0.672189, acc: 0.583984] [A loss: 0.769923, acc: 0.332031]\n",
"377: [D loss: 0.676992, acc: 0.542969] [A loss: 1.064119, acc: 0.035156]\n",
"378: [D loss: 0.681136, acc: 0.562500] [A loss: 0.866012, acc: 0.175781]\n",
"379: [D loss: 0.686812, acc: 0.546875] [A loss: 1.083369, acc: 0.019531]\n",
"380: [D loss: 0.661482, acc: 0.640625] [A loss: 0.824353, acc: 0.207031]\n",
"381: [D loss: 0.694125, acc: 0.521484] [A loss: 1.218997, acc: 0.000000]\n",
"382: [D loss: 0.665715, acc: 0.617188] [A loss: 0.726615, acc: 0.449219]\n",
"383: [D loss: 0.701414, acc: 0.521484] [A loss: 1.276702, acc: 0.003906]\n",
"384: [D loss: 0.668981, acc: 0.548828] [A loss: 0.621951, acc: 0.714844]\n",
"385: [D loss: 0.751697, acc: 0.505859] [A loss: 1.232350, acc: 0.003906]\n",
"386: [D loss: 0.653229, acc: 0.644531] [A loss: 0.671459, acc: 0.593750]\n",
"387: [D loss: 0.706917, acc: 0.505859] [A loss: 1.142077, acc: 0.007812]\n",
"388: [D loss: 0.661263, acc: 0.642578] [A loss: 0.693115, acc: 0.503906]\n",
"389: [D loss: 0.695848, acc: 0.525391] [A loss: 1.052602, acc: 0.035156]\n",
"390: [D loss: 0.676246, acc: 0.591797] [A loss: 0.804588, acc: 0.281250]\n",
"391: [D loss: 0.677454, acc: 0.566406] [A loss: 1.004593, acc: 0.031250]\n",
"392: [D loss: 0.666653, acc: 0.603516] [A loss: 0.836240, acc: 0.199219]\n",
"393: [D loss: 0.680460, acc: 0.576172] [A loss: 0.971324, acc: 0.082031]\n",
"394: [D loss: 0.655896, acc: 0.626953] [A loss: 0.874971, acc: 0.132812]\n",
"395: [D loss: 0.682822, acc: 0.550781] [A loss: 1.131356, acc: 0.011719]\n",
"396: [D loss: 0.665604, acc: 0.578125] [A loss: 0.747590, acc: 0.378906]\n",
"397: [D loss: 0.683787, acc: 0.542969] [A loss: 1.221820, acc: 0.000000]\n",
"398: [D loss: 0.661505, acc: 0.566406] [A loss: 0.645231, acc: 0.687500]\n",
"399: [D loss: 0.716936, acc: 0.517578] [A loss: 1.318802, acc: 0.000000]\n",
"400: [D loss: 0.670235, acc: 0.572266] [A loss: 0.603788, acc: 0.750000]\n",
"401: [D loss: 0.730286, acc: 0.515625] [A loss: 1.221415, acc: 0.000000]\n",
"402: [D loss: 0.677121, acc: 0.552734] [A loss: 0.672689, acc: 0.589844]\n",
"403: [D loss: 0.721934, acc: 0.513672] [A loss: 1.036712, acc: 0.027344]\n",
"404: [D loss: 0.649922, acc: 0.658203] [A loss: 0.752812, acc: 0.382812]\n",
"405: [D loss: 0.680097, acc: 0.548828] [A loss: 0.964031, acc: 0.046875]\n",
"406: [D loss: 0.669602, acc: 0.593750] [A loss: 0.793837, acc: 0.265625]\n",
"407: [D loss: 0.687377, acc: 0.554688] [A loss: 0.989019, acc: 0.031250]\n",
"408: [D loss: 0.677169, acc: 0.550781] [A loss: 0.843428, acc: 0.183594]\n",
"409: [D loss: 0.683110, acc: 0.548828] [A loss: 0.963174, acc: 0.062500]\n",
"410: [D loss: 0.666413, acc: 0.599609] [A loss: 0.837990, acc: 0.175781]\n",
"411: [D loss: 0.686005, acc: 0.574219] [A loss: 0.973834, acc: 0.039062]\n",
"412: [D loss: 0.679802, acc: 0.597656] [A loss: 0.885783, acc: 0.109375]\n",
"413: [D loss: 0.663980, acc: 0.609375] [A loss: 0.936143, acc: 0.085938]\n",
"414: [D loss: 0.682129, acc: 0.550781] [A loss: 0.965991, acc: 0.066406]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"415: [D loss: 0.693230, acc: 0.548828] [A loss: 1.025963, acc: 0.035156]\n",
"416: [D loss: 0.666097, acc: 0.589844] [A loss: 0.822490, acc: 0.242188]\n",
"417: [D loss: 0.679610, acc: 0.564453] [A loss: 1.108463, acc: 0.015625]\n",
"418: [D loss: 0.672450, acc: 0.591797] [A loss: 0.740772, acc: 0.406250]\n",
"419: [D loss: 0.722228, acc: 0.505859] [A loss: 1.238113, acc: 0.015625]\n",
"420: [D loss: 0.678595, acc: 0.544922] [A loss: 0.637259, acc: 0.660156]\n",
"421: [D loss: 0.718855, acc: 0.511719] [A loss: 1.214938, acc: 0.003906]\n",
"422: [D loss: 0.675772, acc: 0.593750] [A loss: 0.693055, acc: 0.539062]\n",
"423: [D loss: 0.703915, acc: 0.503906] [A loss: 1.037344, acc: 0.035156]\n",
"424: [D loss: 0.685941, acc: 0.554688] [A loss: 0.754907, acc: 0.332031]\n",
"425: [D loss: 0.697256, acc: 0.529297] [A loss: 1.033446, acc: 0.015625]\n",
"426: [D loss: 0.667613, acc: 0.587891] [A loss: 0.769538, acc: 0.316406]\n",
"427: [D loss: 0.681960, acc: 0.541016] [A loss: 0.967305, acc: 0.054688]\n",
"428: [D loss: 0.679243, acc: 0.568359] [A loss: 0.828943, acc: 0.167969]\n",
"429: [D loss: 0.671313, acc: 0.556641] [A loss: 0.966443, acc: 0.054688]\n",
"430: [D loss: 0.675106, acc: 0.587891] [A loss: 0.829573, acc: 0.179688]\n",
"431: [D loss: 0.682563, acc: 0.564453] [A loss: 1.017800, acc: 0.031250]\n",
"432: [D loss: 0.674239, acc: 0.578125] [A loss: 0.814516, acc: 0.238281]\n",
"433: [D loss: 0.697134, acc: 0.523438] [A loss: 1.073568, acc: 0.000000]\n",
"434: [D loss: 0.663085, acc: 0.619141] [A loss: 0.741831, acc: 0.386719]\n",
"435: [D loss: 0.719723, acc: 0.507812] [A loss: 1.201495, acc: 0.003906]\n",
"436: [D loss: 0.679434, acc: 0.558594] [A loss: 0.653833, acc: 0.652344]\n",
"437: [D loss: 0.735277, acc: 0.505859] [A loss: 1.161752, acc: 0.000000]\n",
"438: [D loss: 0.687435, acc: 0.554688] [A loss: 0.668562, acc: 0.582031]\n",
"439: [D loss: 0.698942, acc: 0.519531] [A loss: 1.008528, acc: 0.031250]\n",
"440: [D loss: 0.676077, acc: 0.570312] [A loss: 0.751693, acc: 0.363281]\n",
"441: [D loss: 0.694688, acc: 0.541016] [A loss: 0.925567, acc: 0.046875]\n",
"442: [D loss: 0.673828, acc: 0.583984] [A loss: 0.823036, acc: 0.187500]\n",
"443: [D loss: 0.671236, acc: 0.564453] [A loss: 0.939200, acc: 0.062500]\n",
"444: [D loss: 0.669280, acc: 0.576172] [A loss: 0.819116, acc: 0.214844]\n",
"445: [D loss: 0.673311, acc: 0.578125] [A loss: 1.007489, acc: 0.042969]\n",
"446: [D loss: 0.676466, acc: 0.593750] [A loss: 0.800470, acc: 0.234375]\n",
"447: [D loss: 0.686184, acc: 0.544922] [A loss: 1.094368, acc: 0.011719]\n",
"448: [D loss: 0.662762, acc: 0.611328] [A loss: 0.728804, acc: 0.421875]\n",
"449: [D loss: 0.692919, acc: 0.531250] [A loss: 1.141956, acc: 0.000000]\n",
"450: [D loss: 0.677366, acc: 0.585938] [A loss: 0.664425, acc: 0.589844]\n",
"451: [D loss: 0.737342, acc: 0.505859] [A loss: 1.100623, acc: 0.011719]\n",
"452: [D loss: 0.678412, acc: 0.578125] [A loss: 0.739074, acc: 0.414062]\n",
"453: [D loss: 0.696644, acc: 0.535156] [A loss: 1.082763, acc: 0.003906]\n",
"454: [D loss: 0.677410, acc: 0.570312] [A loss: 0.696486, acc: 0.492188]\n",
"455: [D loss: 0.700923, acc: 0.519531] [A loss: 0.984192, acc: 0.035156]\n",
"456: [D loss: 0.667401, acc: 0.621094] [A loss: 0.766263, acc: 0.300781]\n",
"457: [D loss: 0.696987, acc: 0.517578] [A loss: 0.993678, acc: 0.007812]\n",
"458: [D loss: 0.679796, acc: 0.550781] [A loss: 0.778321, acc: 0.273438]\n",
"459: [D loss: 0.691194, acc: 0.541016] [A loss: 0.955015, acc: 0.031250]\n",
"460: [D loss: 0.673555, acc: 0.582031] [A loss: 0.787020, acc: 0.226562]\n",
"461: [D loss: 0.675051, acc: 0.570312] [A loss: 1.004095, acc: 0.039062]\n",
"462: [D loss: 0.666784, acc: 0.607422] [A loss: 0.762599, acc: 0.355469]\n",
"463: [D loss: 0.701202, acc: 0.531250] [A loss: 1.019765, acc: 0.019531]\n",
"464: [D loss: 0.673991, acc: 0.589844] [A loss: 0.727036, acc: 0.421875]\n",
"465: [D loss: 0.712710, acc: 0.505859] [A loss: 1.051077, acc: 0.015625]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"466: [D loss: 0.664539, acc: 0.574219] [A loss: 0.720423, acc: 0.445312]\n",
"467: [D loss: 0.701366, acc: 0.519531] [A loss: 1.009072, acc: 0.023438]\n",
"468: [D loss: 0.672490, acc: 0.580078] [A loss: 0.754601, acc: 0.363281]\n",
"469: [D loss: 0.698093, acc: 0.525391] [A loss: 1.008788, acc: 0.023438]\n",
"470: [D loss: 0.670600, acc: 0.613281] [A loss: 0.748064, acc: 0.355469]\n",
"471: [D loss: 0.699251, acc: 0.519531] [A loss: 1.005649, acc: 0.031250]\n",
"472: [D loss: 0.679206, acc: 0.566406] [A loss: 0.747337, acc: 0.382812]\n",
"473: [D loss: 0.685312, acc: 0.544922] [A loss: 1.005020, acc: 0.019531]\n",
"474: [D loss: 0.674072, acc: 0.537109] [A loss: 0.728030, acc: 0.414062]\n",
"475: [D loss: 0.704368, acc: 0.515625] [A loss: 1.045238, acc: 0.039062]\n",
"476: [D loss: 0.684641, acc: 0.564453] [A loss: 0.740421, acc: 0.410156]\n",
"477: [D loss: 0.685093, acc: 0.546875] [A loss: 0.985392, acc: 0.015625]\n",
"478: [D loss: 0.666010, acc: 0.613281] [A loss: 0.818941, acc: 0.203125]\n",
"479: [D loss: 0.675061, acc: 0.519531] [A loss: 0.966235, acc: 0.042969]\n",
"480: [D loss: 0.670773, acc: 0.611328] [A loss: 0.813665, acc: 0.199219]\n",
"481: [D loss: 0.679194, acc: 0.552734] [A loss: 1.035589, acc: 0.011719]\n",
"482: [D loss: 0.671358, acc: 0.601562] [A loss: 0.741558, acc: 0.394531]\n",
"483: [D loss: 0.694809, acc: 0.529297] [A loss: 1.052583, acc: 0.015625]\n",
"484: [D loss: 0.673352, acc: 0.593750] [A loss: 0.729498, acc: 0.402344]\n",
"485: [D loss: 0.693154, acc: 0.531250] [A loss: 1.046876, acc: 0.011719]\n",
"486: [D loss: 0.678974, acc: 0.560547] [A loss: 0.699841, acc: 0.492188]\n",
"487: [D loss: 0.694497, acc: 0.535156] [A loss: 1.079556, acc: 0.007812]\n",
"488: [D loss: 0.674109, acc: 0.574219] [A loss: 0.694012, acc: 0.507812]\n",
"489: [D loss: 0.705807, acc: 0.511719] [A loss: 1.046230, acc: 0.007812]\n",
"490: [D loss: 0.673861, acc: 0.605469] [A loss: 0.718149, acc: 0.449219]\n",
"491: [D loss: 0.703160, acc: 0.517578] [A loss: 0.926452, acc: 0.074219]\n",
"492: [D loss: 0.663871, acc: 0.607422] [A loss: 0.790672, acc: 0.285156]\n",
"493: [D loss: 0.686658, acc: 0.531250] [A loss: 0.949035, acc: 0.050781]\n",
"494: [D loss: 0.674389, acc: 0.576172] [A loss: 0.752606, acc: 0.273438]\n",
"495: [D loss: 0.678529, acc: 0.564453] [A loss: 0.978337, acc: 0.042969]\n",
"496: [D loss: 0.668913, acc: 0.587891] [A loss: 0.805769, acc: 0.226562]\n",
"497: [D loss: 0.687244, acc: 0.548828] [A loss: 1.015811, acc: 0.015625]\n",
"498: [D loss: 0.675754, acc: 0.587891] [A loss: 0.731674, acc: 0.402344]\n",
"499: [D loss: 0.704726, acc: 0.525391] [A loss: 1.025215, acc: 0.019531]\n",
"500: [D loss: 0.672886, acc: 0.591797] [A loss: 0.730512, acc: 0.410156]\n",
"501: [D loss: 0.694252, acc: 0.519531] [A loss: 0.992863, acc: 0.046875]\n",
"502: [D loss: 0.672186, acc: 0.582031] [A loss: 0.752267, acc: 0.324219]\n",
"503: [D loss: 0.690796, acc: 0.537109] [A loss: 0.969688, acc: 0.027344]\n",
"504: [D loss: 0.665194, acc: 0.619141] [A loss: 0.750486, acc: 0.328125]\n",
"505: [D loss: 0.702663, acc: 0.521484] [A loss: 0.998654, acc: 0.031250]\n",
"506: [D loss: 0.669202, acc: 0.603516] [A loss: 0.758732, acc: 0.308594]\n",
"507: [D loss: 0.699112, acc: 0.519531] [A loss: 0.970975, acc: 0.039062]\n",
"508: [D loss: 0.671352, acc: 0.595703] [A loss: 0.764114, acc: 0.316406]\n",
"509: [D loss: 0.683449, acc: 0.562500] [A loss: 0.955910, acc: 0.039062]\n",
"510: [D loss: 0.680438, acc: 0.556641] [A loss: 0.747509, acc: 0.378906]\n",
"511: [D loss: 0.687132, acc: 0.550781] [A loss: 0.954714, acc: 0.050781]\n",
"512: [D loss: 0.674684, acc: 0.589844] [A loss: 0.794733, acc: 0.250000]\n",
"513: [D loss: 0.675265, acc: 0.582031] [A loss: 0.926118, acc: 0.046875]\n",
"514: [D loss: 0.671406, acc: 0.587891] [A loss: 0.875561, acc: 0.074219]\n",
"515: [D loss: 0.681435, acc: 0.556641] [A loss: 0.942538, acc: 0.031250]\n",
"516: [D loss: 0.671377, acc: 0.580078] [A loss: 0.800423, acc: 0.253906]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"517: [D loss: 0.703344, acc: 0.529297] [A loss: 1.090059, acc: 0.000000]\n",
"518: [D loss: 0.669917, acc: 0.585938] [A loss: 0.698746, acc: 0.535156]\n",
"519: [D loss: 0.707161, acc: 0.505859] [A loss: 1.004443, acc: 0.019531]\n",
"520: [D loss: 0.676908, acc: 0.597656] [A loss: 0.727363, acc: 0.425781]\n",
"521: [D loss: 0.689329, acc: 0.550781] [A loss: 0.952855, acc: 0.035156]\n",
"522: [D loss: 0.668667, acc: 0.597656] [A loss: 0.782289, acc: 0.250000]\n",
"523: [D loss: 0.696713, acc: 0.521484] [A loss: 0.932484, acc: 0.109375]\n",
"524: [D loss: 0.660379, acc: 0.630859] [A loss: 0.869241, acc: 0.136719]\n",
"525: [D loss: 0.664922, acc: 0.597656] [A loss: 0.829917, acc: 0.171875]\n",
"526: [D loss: 0.672253, acc: 0.572266] [A loss: 0.915975, acc: 0.054688]\n",
"527: [D loss: 0.665450, acc: 0.623047] [A loss: 0.814652, acc: 0.195312]\n",
"528: [D loss: 0.681250, acc: 0.572266] [A loss: 1.010877, acc: 0.019531]\n",
"529: [D loss: 0.672518, acc: 0.583984] [A loss: 0.767611, acc: 0.289062]\n",
"530: [D loss: 0.691863, acc: 0.511719] [A loss: 1.071688, acc: 0.015625]\n",
"531: [D loss: 0.671938, acc: 0.587891] [A loss: 0.737985, acc: 0.406250]\n",
"532: [D loss: 0.692754, acc: 0.531250] [A loss: 1.079101, acc: 0.000000]\n",
"533: [D loss: 0.669500, acc: 0.605469] [A loss: 0.679281, acc: 0.566406]\n",
"534: [D loss: 0.686736, acc: 0.519531] [A loss: 1.021555, acc: 0.023438]\n",
"535: [D loss: 0.658111, acc: 0.623047] [A loss: 0.716194, acc: 0.417969]\n",
"536: [D loss: 0.683454, acc: 0.541016] [A loss: 0.927837, acc: 0.093750]\n",
"537: [D loss: 0.658586, acc: 0.628906] [A loss: 0.802784, acc: 0.230469]\n",
"538: [D loss: 0.671505, acc: 0.554688] [A loss: 0.972219, acc: 0.042969]\n",
"539: [D loss: 0.672267, acc: 0.595703] [A loss: 0.799319, acc: 0.257812]\n",
"540: [D loss: 0.693986, acc: 0.529297] [A loss: 0.913746, acc: 0.058594]\n",
"541: [D loss: 0.663268, acc: 0.605469] [A loss: 0.833290, acc: 0.160156]\n",
"542: [D loss: 0.679492, acc: 0.568359] [A loss: 0.954231, acc: 0.070312]\n",
"543: [D loss: 0.662457, acc: 0.628906] [A loss: 0.807946, acc: 0.218750]\n",
"544: [D loss: 0.673557, acc: 0.582031] [A loss: 0.965406, acc: 0.058594]\n",
"545: [D loss: 0.671198, acc: 0.583984] [A loss: 0.797016, acc: 0.253906]\n",
"546: [D loss: 0.687841, acc: 0.539062] [A loss: 0.984482, acc: 0.039062]\n",
"547: [D loss: 0.666275, acc: 0.607422] [A loss: 0.848239, acc: 0.175781]\n",
"548: [D loss: 0.671552, acc: 0.564453] [A loss: 0.981109, acc: 0.027344]\n",
"549: [D loss: 0.675664, acc: 0.587891] [A loss: 0.813134, acc: 0.203125]\n",
"550: [D loss: 0.680780, acc: 0.556641] [A loss: 1.029626, acc: 0.015625]\n",
"551: [D loss: 0.669985, acc: 0.611328] [A loss: 0.736978, acc: 0.433594]\n",
"552: [D loss: 0.683527, acc: 0.546875] [A loss: 1.056745, acc: 0.023438]\n",
"553: [D loss: 0.674613, acc: 0.589844] [A loss: 0.721660, acc: 0.414062]\n",
"554: [D loss: 0.691510, acc: 0.519531] [A loss: 1.039556, acc: 0.058594]\n",
"555: [D loss: 0.669830, acc: 0.585938] [A loss: 0.733096, acc: 0.363281]\n",
"556: [D loss: 0.695500, acc: 0.533203] [A loss: 1.016475, acc: 0.019531]\n",
"557: [D loss: 0.680039, acc: 0.552734] [A loss: 0.751366, acc: 0.343750]\n",
"558: [D loss: 0.688644, acc: 0.544922] [A loss: 0.998666, acc: 0.031250]\n",
"559: [D loss: 0.665359, acc: 0.619141] [A loss: 0.740534, acc: 0.402344]\n",
"560: [D loss: 0.685170, acc: 0.539062] [A loss: 1.003748, acc: 0.031250]\n",
"561: [D loss: 0.672712, acc: 0.556641] [A loss: 0.740367, acc: 0.367188]\n",
"562: [D loss: 0.679641, acc: 0.546875] [A loss: 0.984572, acc: 0.039062]\n",
"563: [D loss: 0.661609, acc: 0.603516] [A loss: 0.764113, acc: 0.316406]\n",
"564: [D loss: 0.672950, acc: 0.566406] [A loss: 0.992339, acc: 0.046875]\n",
"565: [D loss: 0.660487, acc: 0.597656] [A loss: 0.753716, acc: 0.351562]\n",
"566: [D loss: 0.678628, acc: 0.541016] [A loss: 1.011868, acc: 0.039062]\n",
"567: [D loss: 0.667232, acc: 0.619141] [A loss: 0.779539, acc: 0.285156]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"568: [D loss: 0.685560, acc: 0.541016] [A loss: 0.947090, acc: 0.066406]\n",
"569: [D loss: 0.670498, acc: 0.580078] [A loss: 0.784030, acc: 0.296875]\n",
"570: [D loss: 0.684720, acc: 0.566406] [A loss: 1.007908, acc: 0.031250]\n",
"571: [D loss: 0.674671, acc: 0.576172] [A loss: 0.794422, acc: 0.273438]\n",
"572: [D loss: 0.693848, acc: 0.548828] [A loss: 0.991297, acc: 0.031250]\n",
"573: [D loss: 0.666419, acc: 0.601562] [A loss: 0.744639, acc: 0.367188]\n",
"574: [D loss: 0.671872, acc: 0.554688] [A loss: 0.948196, acc: 0.058594]\n",
"575: [D loss: 0.678252, acc: 0.582031] [A loss: 0.775316, acc: 0.292969]\n",
"576: [D loss: 0.685207, acc: 0.541016] [A loss: 0.974418, acc: 0.066406]\n",
"577: [D loss: 0.675542, acc: 0.544922] [A loss: 0.794424, acc: 0.199219]\n",
"578: [D loss: 0.677978, acc: 0.554688] [A loss: 0.976481, acc: 0.027344]\n",
"579: [D loss: 0.670948, acc: 0.585938] [A loss: 0.784526, acc: 0.265625]\n",
"580: [D loss: 0.678861, acc: 0.558594] [A loss: 0.979989, acc: 0.039062]\n",
"581: [D loss: 0.677929, acc: 0.568359] [A loss: 0.768059, acc: 0.328125]\n",
"582: [D loss: 0.681317, acc: 0.552734] [A loss: 0.969734, acc: 0.066406]\n",
"583: [D loss: 0.665063, acc: 0.632812] [A loss: 0.780768, acc: 0.285156]\n",
"584: [D loss: 0.682251, acc: 0.542969] [A loss: 0.960243, acc: 0.062500]\n",
"585: [D loss: 0.667900, acc: 0.591797] [A loss: 0.784784, acc: 0.289062]\n",
"586: [D loss: 0.684970, acc: 0.548828] [A loss: 0.956910, acc: 0.070312]\n",
"587: [D loss: 0.672936, acc: 0.582031] [A loss: 0.836106, acc: 0.222656]\n",
"588: [D loss: 0.687474, acc: 0.535156] [A loss: 0.978602, acc: 0.042969]\n",
"589: [D loss: 0.673452, acc: 0.583984] [A loss: 0.780064, acc: 0.289062]\n",
"590: [D loss: 0.679637, acc: 0.568359] [A loss: 0.969252, acc: 0.027344]\n",
"591: [D loss: 0.669592, acc: 0.587891] [A loss: 0.787000, acc: 0.289062]\n",
"592: [D loss: 0.675945, acc: 0.568359] [A loss: 0.980775, acc: 0.050781]\n",
"593: [D loss: 0.669751, acc: 0.611328] [A loss: 0.722134, acc: 0.429688]\n",
"594: [D loss: 0.696153, acc: 0.529297] [A loss: 1.027643, acc: 0.019531]\n",
"595: [D loss: 0.669630, acc: 0.595703] [A loss: 0.783561, acc: 0.265625]\n",
"596: [D loss: 0.690629, acc: 0.550781] [A loss: 0.980602, acc: 0.035156]\n",
"597: [D loss: 0.666081, acc: 0.589844] [A loss: 0.764496, acc: 0.308594]\n",
"598: [D loss: 0.680572, acc: 0.566406] [A loss: 0.995978, acc: 0.027344]\n",
"599: [D loss: 0.671271, acc: 0.589844] [A loss: 0.790917, acc: 0.289062]\n",
"600: [D loss: 0.682439, acc: 0.554688] [A loss: 0.947693, acc: 0.066406]\n",
"601: [D loss: 0.655039, acc: 0.632812] [A loss: 0.826972, acc: 0.222656]\n",
"602: [D loss: 0.668274, acc: 0.583984] [A loss: 0.915278, acc: 0.089844]\n",
"603: [D loss: 0.664570, acc: 0.589844] [A loss: 0.825287, acc: 0.214844]\n",
"604: [D loss: 0.676493, acc: 0.570312] [A loss: 0.925659, acc: 0.066406]\n",
"605: [D loss: 0.670068, acc: 0.582031] [A loss: 0.820056, acc: 0.222656]\n",
"606: [D loss: 0.683073, acc: 0.568359] [A loss: 0.953951, acc: 0.082031]\n",
"607: [D loss: 0.666606, acc: 0.597656] [A loss: 0.776935, acc: 0.324219]\n",
"608: [D loss: 0.677045, acc: 0.552734] [A loss: 1.044327, acc: 0.019531]\n",
"609: [D loss: 0.659765, acc: 0.623047] [A loss: 0.717704, acc: 0.484375]\n",
"610: [D loss: 0.702078, acc: 0.503906] [A loss: 1.038211, acc: 0.023438]\n",
"611: [D loss: 0.661104, acc: 0.632812] [A loss: 0.677661, acc: 0.542969]\n",
"612: [D loss: 0.698442, acc: 0.531250] [A loss: 1.044531, acc: 0.019531]\n",
"613: [D loss: 0.676020, acc: 0.585938] [A loss: 0.730565, acc: 0.429688]\n",
"614: [D loss: 0.697435, acc: 0.527344] [A loss: 0.918856, acc: 0.070312]\n",
"615: [D loss: 0.666015, acc: 0.617188] [A loss: 0.797091, acc: 0.234375]\n",
"616: [D loss: 0.665549, acc: 0.574219] [A loss: 0.906029, acc: 0.085938]\n",
"617: [D loss: 0.669890, acc: 0.593750] [A loss: 0.834163, acc: 0.203125]\n",
"618: [D loss: 0.667416, acc: 0.578125] [A loss: 0.882341, acc: 0.128906]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"619: [D loss: 0.672548, acc: 0.580078] [A loss: 0.870258, acc: 0.148438]\n",
"620: [D loss: 0.671660, acc: 0.585938] [A loss: 0.933446, acc: 0.078125]\n",
"621: [D loss: 0.671686, acc: 0.595703] [A loss: 0.830146, acc: 0.187500]\n",
"622: [D loss: 0.690413, acc: 0.560547] [A loss: 0.936999, acc: 0.078125]\n",
"623: [D loss: 0.670866, acc: 0.583984] [A loss: 0.810712, acc: 0.222656]\n",
"624: [D loss: 0.675944, acc: 0.554688] [A loss: 0.952383, acc: 0.058594]\n",
"625: [D loss: 0.658336, acc: 0.611328] [A loss: 0.783775, acc: 0.308594]\n",
"626: [D loss: 0.677815, acc: 0.556641] [A loss: 0.941987, acc: 0.082031]\n",
"627: [D loss: 0.676053, acc: 0.580078] [A loss: 0.804657, acc: 0.261719]\n",
"628: [D loss: 0.672636, acc: 0.537109] [A loss: 0.972578, acc: 0.101562]\n",
"629: [D loss: 0.674733, acc: 0.572266] [A loss: 0.834173, acc: 0.195312]\n",
"630: [D loss: 0.668530, acc: 0.591797] [A loss: 0.940874, acc: 0.070312]\n",
"631: [D loss: 0.670910, acc: 0.576172] [A loss: 0.799618, acc: 0.261719]\n",
"632: [D loss: 0.670725, acc: 0.587891] [A loss: 0.982233, acc: 0.066406]\n",
"633: [D loss: 0.659274, acc: 0.640625] [A loss: 0.795618, acc: 0.320312]\n",
"634: [D loss: 0.681357, acc: 0.556641] [A loss: 1.061345, acc: 0.039062]\n",
"635: [D loss: 0.677644, acc: 0.564453] [A loss: 0.726683, acc: 0.449219]\n",
"636: [D loss: 0.689040, acc: 0.550781] [A loss: 1.046762, acc: 0.027344]\n",
"637: [D loss: 0.668023, acc: 0.609375] [A loss: 0.707786, acc: 0.460938]\n",
"638: [D loss: 0.692349, acc: 0.541016] [A loss: 0.982756, acc: 0.074219]\n",
"639: [D loss: 0.650656, acc: 0.656250] [A loss: 0.764660, acc: 0.371094]\n",
"640: [D loss: 0.680424, acc: 0.542969] [A loss: 0.972917, acc: 0.070312]\n",
"641: [D loss: 0.670408, acc: 0.587891] [A loss: 0.777682, acc: 0.359375]\n",
"642: [D loss: 0.682902, acc: 0.589844] [A loss: 0.955196, acc: 0.070312]\n",
"643: [D loss: 0.660761, acc: 0.632812] [A loss: 0.812630, acc: 0.234375]\n",
"644: [D loss: 0.667345, acc: 0.599609] [A loss: 0.905053, acc: 0.117188]\n",
"645: [D loss: 0.671671, acc: 0.582031] [A loss: 0.823142, acc: 0.191406]\n",
"646: [D loss: 0.676857, acc: 0.582031] [A loss: 0.926346, acc: 0.097656]\n",
"647: [D loss: 0.652859, acc: 0.650391] [A loss: 0.824006, acc: 0.265625]\n",
"648: [D loss: 0.667399, acc: 0.603516] [A loss: 0.943550, acc: 0.097656]\n",
"649: [D loss: 0.672742, acc: 0.574219] [A loss: 0.867235, acc: 0.210938]\n",
"650: [D loss: 0.677409, acc: 0.580078] [A loss: 0.985888, acc: 0.054688]\n",
"651: [D loss: 0.672878, acc: 0.566406] [A loss: 0.805853, acc: 0.269531]\n",
"652: [D loss: 0.670325, acc: 0.568359] [A loss: 1.005436, acc: 0.046875]\n",
"653: [D loss: 0.653923, acc: 0.632812] [A loss: 0.811408, acc: 0.261719]\n",
"654: [D loss: 0.687075, acc: 0.550781] [A loss: 1.008204, acc: 0.054688]\n",
"655: [D loss: 0.661900, acc: 0.623047] [A loss: 0.763009, acc: 0.363281]\n",
"656: [D loss: 0.673453, acc: 0.554688] [A loss: 1.015568, acc: 0.035156]\n",
"657: [D loss: 0.654042, acc: 0.626953] [A loss: 0.768275, acc: 0.398438]\n",
"658: [D loss: 0.695414, acc: 0.537109] [A loss: 1.080695, acc: 0.023438]\n",
"659: [D loss: 0.664758, acc: 0.605469] [A loss: 0.768277, acc: 0.351562]\n",
"660: [D loss: 0.687498, acc: 0.566406] [A loss: 1.014300, acc: 0.039062]\n",
"661: [D loss: 0.663607, acc: 0.605469] [A loss: 0.760558, acc: 0.394531]\n",
"662: [D loss: 0.687580, acc: 0.550781] [A loss: 0.998435, acc: 0.039062]\n",
"663: [D loss: 0.675043, acc: 0.564453] [A loss: 0.751551, acc: 0.375000]\n",
"664: [D loss: 0.691215, acc: 0.533203] [A loss: 0.971745, acc: 0.066406]\n",
"665: [D loss: 0.664078, acc: 0.599609] [A loss: 0.749904, acc: 0.382812]\n",
"666: [D loss: 0.685307, acc: 0.552734] [A loss: 0.945464, acc: 0.062500]\n",
"667: [D loss: 0.662926, acc: 0.619141] [A loss: 0.786150, acc: 0.300781]\n",
"668: [D loss: 0.668850, acc: 0.562500] [A loss: 0.937958, acc: 0.082031]\n",
"669: [D loss: 0.676467, acc: 0.572266] [A loss: 0.864170, acc: 0.164062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"670: [D loss: 0.672009, acc: 0.568359] [A loss: 0.866767, acc: 0.195312]\n",
"671: [D loss: 0.673657, acc: 0.570312] [A loss: 0.911103, acc: 0.113281]\n",
"672: [D loss: 0.673875, acc: 0.582031] [A loss: 0.856628, acc: 0.160156]\n",
"673: [D loss: 0.667912, acc: 0.621094] [A loss: 0.914359, acc: 0.121094]\n",
"674: [D loss: 0.663573, acc: 0.621094] [A loss: 0.888465, acc: 0.167969]\n",
"675: [D loss: 0.676363, acc: 0.574219] [A loss: 0.884869, acc: 0.175781]\n",
"676: [D loss: 0.672230, acc: 0.576172] [A loss: 0.942010, acc: 0.093750]\n",
"677: [D loss: 0.666551, acc: 0.601562] [A loss: 0.883773, acc: 0.136719]\n",
"678: [D loss: 0.669827, acc: 0.570312] [A loss: 0.887912, acc: 0.132812]\n",
"679: [D loss: 0.664822, acc: 0.585938] [A loss: 0.897213, acc: 0.148438]\n",
"680: [D loss: 0.668808, acc: 0.582031] [A loss: 0.929591, acc: 0.125000]\n",
"681: [D loss: 0.671908, acc: 0.603516] [A loss: 0.866814, acc: 0.210938]\n",
"682: [D loss: 0.680615, acc: 0.566406] [A loss: 0.966176, acc: 0.128906]\n",
"683: [D loss: 0.677213, acc: 0.574219] [A loss: 0.938721, acc: 0.113281]\n",
"684: [D loss: 0.666491, acc: 0.585938] [A loss: 0.760101, acc: 0.375000]\n",
"685: [D loss: 0.691896, acc: 0.554688] [A loss: 1.003510, acc: 0.093750]\n",
"686: [D loss: 0.675940, acc: 0.570312] [A loss: 0.758595, acc: 0.375000]\n",
"687: [D loss: 0.681566, acc: 0.564453] [A loss: 1.038425, acc: 0.039062]\n",
"688: [D loss: 0.661159, acc: 0.619141] [A loss: 0.736959, acc: 0.417969]\n",
"689: [D loss: 0.702438, acc: 0.556641] [A loss: 1.031237, acc: 0.023438]\n",
"690: [D loss: 0.679446, acc: 0.566406] [A loss: 0.725056, acc: 0.425781]\n",
"691: [D loss: 0.673737, acc: 0.570312] [A loss: 0.964219, acc: 0.062500]\n",
"692: [D loss: 0.658528, acc: 0.630859] [A loss: 0.767400, acc: 0.335938]\n",
"693: [D loss: 0.674996, acc: 0.601562] [A loss: 0.966986, acc: 0.050781]\n",
"694: [D loss: 0.655546, acc: 0.597656] [A loss: 0.797432, acc: 0.304688]\n",
"695: [D loss: 0.681594, acc: 0.550781] [A loss: 0.988551, acc: 0.089844]\n",
"696: [D loss: 0.664432, acc: 0.599609] [A loss: 0.777312, acc: 0.359375]\n",
"697: [D loss: 0.680513, acc: 0.548828] [A loss: 0.977863, acc: 0.082031]\n",
"698: [D loss: 0.665607, acc: 0.603516] [A loss: 0.822061, acc: 0.238281]\n",
"699: [D loss: 0.673551, acc: 0.599609] [A loss: 0.926693, acc: 0.105469]\n",
"700: [D loss: 0.669843, acc: 0.597656] [A loss: 0.806691, acc: 0.257812]\n",
"701: [D loss: 0.679460, acc: 0.562500] [A loss: 0.959853, acc: 0.074219]\n",
"702: [D loss: 0.676105, acc: 0.574219] [A loss: 0.807935, acc: 0.300781]\n",
"703: [D loss: 0.678179, acc: 0.546875] [A loss: 0.997644, acc: 0.050781]\n",
"704: [D loss: 0.660527, acc: 0.623047] [A loss: 0.900630, acc: 0.117188]\n",
"705: [D loss: 0.672161, acc: 0.583984] [A loss: 0.865112, acc: 0.152344]\n",
"706: [D loss: 0.663651, acc: 0.591797] [A loss: 0.911562, acc: 0.105469]\n",
"707: [D loss: 0.675037, acc: 0.546875] [A loss: 0.938930, acc: 0.121094]\n",
"708: [D loss: 0.679608, acc: 0.562500] [A loss: 0.926019, acc: 0.093750]\n",
"709: [D loss: 0.676283, acc: 0.570312] [A loss: 0.834379, acc: 0.230469]\n",
"710: [D loss: 0.679928, acc: 0.539062] [A loss: 0.923178, acc: 0.089844]\n",
"711: [D loss: 0.664372, acc: 0.595703] [A loss: 0.850243, acc: 0.191406]\n",
"712: [D loss: 0.664315, acc: 0.582031] [A loss: 0.975712, acc: 0.089844]\n",
"713: [D loss: 0.662084, acc: 0.605469] [A loss: 0.860437, acc: 0.183594]\n",
"714: [D loss: 0.675414, acc: 0.546875] [A loss: 0.992146, acc: 0.078125]\n",
"715: [D loss: 0.663088, acc: 0.607422] [A loss: 0.779523, acc: 0.328125]\n",
"716: [D loss: 0.680063, acc: 0.558594] [A loss: 1.022552, acc: 0.058594]\n",
"717: [D loss: 0.663229, acc: 0.605469] [A loss: 0.741126, acc: 0.421875]\n",
"718: [D loss: 0.695776, acc: 0.556641] [A loss: 0.986722, acc: 0.054688]\n",
"719: [D loss: 0.670166, acc: 0.574219] [A loss: 0.802270, acc: 0.273438]\n",
"720: [D loss: 0.683170, acc: 0.568359] [A loss: 0.938508, acc: 0.097656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"721: [D loss: 0.676962, acc: 0.570312] [A loss: 0.801670, acc: 0.277344]\n",
"722: [D loss: 0.668752, acc: 0.580078] [A loss: 0.932740, acc: 0.097656]\n",
"723: [D loss: 0.651530, acc: 0.636719] [A loss: 0.795825, acc: 0.335938]\n",
"724: [D loss: 0.660131, acc: 0.605469] [A loss: 0.970432, acc: 0.070312]\n",
"725: [D loss: 0.657544, acc: 0.613281] [A loss: 0.865922, acc: 0.191406]\n",
"726: [D loss: 0.660045, acc: 0.595703] [A loss: 0.940572, acc: 0.128906]\n",
"727: [D loss: 0.667915, acc: 0.601562] [A loss: 0.884862, acc: 0.140625]\n",
"728: [D loss: 0.669119, acc: 0.617188] [A loss: 0.892144, acc: 0.144531]\n",
"729: [D loss: 0.669324, acc: 0.580078] [A loss: 0.928961, acc: 0.082031]\n",
"730: [D loss: 0.667061, acc: 0.621094] [A loss: 0.869633, acc: 0.167969]\n",
"731: [D loss: 0.669433, acc: 0.585938] [A loss: 0.957927, acc: 0.101562]\n",
"732: [D loss: 0.668229, acc: 0.609375] [A loss: 0.866756, acc: 0.195312]\n",
"733: [D loss: 0.674762, acc: 0.582031] [A loss: 0.958650, acc: 0.105469]\n",
"734: [D loss: 0.651979, acc: 0.607422] [A loss: 0.834833, acc: 0.187500]\n",
"735: [D loss: 0.694100, acc: 0.542969] [A loss: 1.084364, acc: 0.011719]\n",
"736: [D loss: 0.667873, acc: 0.574219] [A loss: 0.719802, acc: 0.457031]\n",
"737: [D loss: 0.701045, acc: 0.525391] [A loss: 1.078669, acc: 0.031250]\n",
"738: [D loss: 0.665385, acc: 0.574219] [A loss: 0.661637, acc: 0.589844]\n",
"739: [D loss: 0.702474, acc: 0.550781] [A loss: 1.041592, acc: 0.023438]\n",
"740: [D loss: 0.671716, acc: 0.574219] [A loss: 0.721467, acc: 0.468750]\n",
"741: [D loss: 0.685424, acc: 0.527344] [A loss: 0.935463, acc: 0.097656]\n",
"742: [D loss: 0.649544, acc: 0.636719] [A loss: 0.799847, acc: 0.281250]\n",
"743: [D loss: 0.699318, acc: 0.529297] [A loss: 0.932092, acc: 0.093750]\n",
"744: [D loss: 0.672207, acc: 0.613281] [A loss: 0.821470, acc: 0.250000]\n",
"745: [D loss: 0.669187, acc: 0.583984] [A loss: 0.923453, acc: 0.089844]\n",
"746: [D loss: 0.676028, acc: 0.548828] [A loss: 0.809800, acc: 0.277344]\n",
"747: [D loss: 0.662338, acc: 0.591797] [A loss: 0.904517, acc: 0.121094]\n",
"748: [D loss: 0.659604, acc: 0.576172] [A loss: 0.860482, acc: 0.207031]\n",
"749: [D loss: 0.663248, acc: 0.580078] [A loss: 0.886966, acc: 0.164062]\n",
"750: [D loss: 0.640748, acc: 0.662109] [A loss: 0.883357, acc: 0.187500]\n",
"751: [D loss: 0.665082, acc: 0.603516] [A loss: 0.904404, acc: 0.144531]\n",
"752: [D loss: 0.662074, acc: 0.619141] [A loss: 0.862767, acc: 0.207031]\n",
"753: [D loss: 0.673403, acc: 0.582031] [A loss: 0.906890, acc: 0.109375]\n",
"754: [D loss: 0.668976, acc: 0.578125] [A loss: 0.840711, acc: 0.238281]\n",
"755: [D loss: 0.666185, acc: 0.570312] [A loss: 1.009950, acc: 0.085938]\n",
"756: [D loss: 0.674022, acc: 0.572266] [A loss: 0.817833, acc: 0.269531]\n",
"757: [D loss: 0.666438, acc: 0.582031] [A loss: 0.942574, acc: 0.070312]\n",
"758: [D loss: 0.658923, acc: 0.582031] [A loss: 0.839257, acc: 0.257812]\n",
"759: [D loss: 0.664691, acc: 0.568359] [A loss: 1.126942, acc: 0.023438]\n",
"760: [D loss: 0.667016, acc: 0.587891] [A loss: 0.724362, acc: 0.453125]\n",
"761: [D loss: 0.686831, acc: 0.546875] [A loss: 1.017092, acc: 0.078125]\n",
"762: [D loss: 0.658443, acc: 0.603516] [A loss: 0.803998, acc: 0.304688]\n",
"763: [D loss: 0.682865, acc: 0.570312] [A loss: 1.000941, acc: 0.078125]\n",
"764: [D loss: 0.680882, acc: 0.582031] [A loss: 0.801565, acc: 0.296875]\n",
"765: [D loss: 0.683510, acc: 0.562500] [A loss: 0.940715, acc: 0.109375]\n",
"766: [D loss: 0.665451, acc: 0.603516] [A loss: 0.806833, acc: 0.281250]\n",
"767: [D loss: 0.664859, acc: 0.589844] [A loss: 0.966950, acc: 0.097656]\n",
"768: [D loss: 0.648029, acc: 0.636719] [A loss: 0.835092, acc: 0.226562]\n",
"769: [D loss: 0.683824, acc: 0.589844] [A loss: 0.929243, acc: 0.093750]\n",
"770: [D loss: 0.662123, acc: 0.572266] [A loss: 0.868453, acc: 0.203125]\n",
"771: [D loss: 0.674439, acc: 0.570312] [A loss: 0.996542, acc: 0.070312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"772: [D loss: 0.667714, acc: 0.580078] [A loss: 0.806237, acc: 0.285156]\n",
"773: [D loss: 0.706325, acc: 0.523438] [A loss: 0.934117, acc: 0.117188]\n",
"774: [D loss: 0.670168, acc: 0.582031] [A loss: 0.833996, acc: 0.222656]\n",
"775: [D loss: 0.676111, acc: 0.589844] [A loss: 0.911525, acc: 0.128906]\n",
"776: [D loss: 0.664288, acc: 0.585938] [A loss: 0.847807, acc: 0.195312]\n",
"777: [D loss: 0.673117, acc: 0.562500] [A loss: 0.940273, acc: 0.089844]\n",
"778: [D loss: 0.663326, acc: 0.603516] [A loss: 0.839589, acc: 0.226562]\n",
"779: [D loss: 0.663348, acc: 0.611328] [A loss: 0.908671, acc: 0.117188]\n",
"780: [D loss: 0.666128, acc: 0.585938] [A loss: 0.887653, acc: 0.175781]\n",
"781: [D loss: 0.665345, acc: 0.603516] [A loss: 0.932396, acc: 0.101562]\n",
"782: [D loss: 0.663197, acc: 0.603516] [A loss: 0.794637, acc: 0.316406]\n",
"783: [D loss: 0.695074, acc: 0.554688] [A loss: 1.018517, acc: 0.042969]\n",
"784: [D loss: 0.659430, acc: 0.626953] [A loss: 0.786107, acc: 0.343750]\n",
"785: [D loss: 0.693204, acc: 0.541016] [A loss: 1.039463, acc: 0.027344]\n",
"786: [D loss: 0.667529, acc: 0.576172] [A loss: 0.765022, acc: 0.375000]\n",
"787: [D loss: 0.694525, acc: 0.548828] [A loss: 1.024998, acc: 0.046875]\n",
"788: [D loss: 0.670205, acc: 0.591797] [A loss: 0.750407, acc: 0.347656]\n",
"789: [D loss: 0.687933, acc: 0.554688] [A loss: 1.006544, acc: 0.042969]\n",
"790: [D loss: 0.649582, acc: 0.625000] [A loss: 0.742768, acc: 0.382812]\n",
"791: [D loss: 0.697748, acc: 0.541016] [A loss: 0.993937, acc: 0.058594]\n",
"792: [D loss: 0.675712, acc: 0.582031] [A loss: 0.731952, acc: 0.425781]\n",
"793: [D loss: 0.676518, acc: 0.552734] [A loss: 0.951885, acc: 0.078125]\n",
"794: [D loss: 0.667576, acc: 0.572266] [A loss: 0.802493, acc: 0.300781]\n",
"795: [D loss: 0.678848, acc: 0.574219] [A loss: 0.936111, acc: 0.093750]\n",
"796: [D loss: 0.660319, acc: 0.593750] [A loss: 0.826694, acc: 0.269531]\n",
"797: [D loss: 0.669515, acc: 0.568359] [A loss: 0.895683, acc: 0.128906]\n",
"798: [D loss: 0.662550, acc: 0.576172] [A loss: 0.897688, acc: 0.160156]\n",
"799: [D loss: 0.693055, acc: 0.548828] [A loss: 1.022057, acc: 0.062500]\n",
"800: [D loss: 0.673631, acc: 0.574219] [A loss: 0.722104, acc: 0.433594]\n",
"801: [D loss: 0.677701, acc: 0.566406] [A loss: 0.923390, acc: 0.113281]\n",
"802: [D loss: 0.664123, acc: 0.597656] [A loss: 0.825973, acc: 0.250000]\n",
"803: [D loss: 0.676732, acc: 0.550781] [A loss: 0.926649, acc: 0.156250]\n",
"804: [D loss: 0.664960, acc: 0.597656] [A loss: 0.830077, acc: 0.222656]\n",
"805: [D loss: 0.672881, acc: 0.568359] [A loss: 0.858352, acc: 0.164062]\n",
"806: [D loss: 0.672301, acc: 0.570312] [A loss: 0.864176, acc: 0.164062]\n",
"807: [D loss: 0.677304, acc: 0.554688] [A loss: 0.873980, acc: 0.164062]\n",
"808: [D loss: 0.663341, acc: 0.580078] [A loss: 0.852542, acc: 0.203125]\n",
"809: [D loss: 0.664861, acc: 0.578125] [A loss: 0.896938, acc: 0.171875]\n",
"810: [D loss: 0.670907, acc: 0.568359] [A loss: 0.867067, acc: 0.175781]\n",
"811: [D loss: 0.668748, acc: 0.595703] [A loss: 0.896648, acc: 0.156250]\n",
"812: [D loss: 0.667478, acc: 0.582031] [A loss: 0.870230, acc: 0.199219]\n",
"813: [D loss: 0.657304, acc: 0.617188] [A loss: 0.861501, acc: 0.179688]\n",
"814: [D loss: 0.684373, acc: 0.552734] [A loss: 0.957355, acc: 0.070312]\n",
"815: [D loss: 0.662198, acc: 0.626953] [A loss: 0.759022, acc: 0.375000]\n",
"816: [D loss: 0.692093, acc: 0.521484] [A loss: 1.031718, acc: 0.035156]\n",
"817: [D loss: 0.666705, acc: 0.625000] [A loss: 0.764989, acc: 0.367188]\n",
"818: [D loss: 0.689483, acc: 0.539062] [A loss: 0.982052, acc: 0.062500]\n",
"819: [D loss: 0.651487, acc: 0.644531] [A loss: 0.828336, acc: 0.257812]\n",
"820: [D loss: 0.698332, acc: 0.517578] [A loss: 1.023420, acc: 0.058594]\n",
"821: [D loss: 0.664294, acc: 0.593750] [A loss: 0.734594, acc: 0.449219]\n",
"822: [D loss: 0.690161, acc: 0.552734] [A loss: 0.969595, acc: 0.085938]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"823: [D loss: 0.651719, acc: 0.615234] [A loss: 0.778030, acc: 0.308594]\n",
"824: [D loss: 0.658417, acc: 0.591797] [A loss: 0.969493, acc: 0.105469]\n",
"825: [D loss: 0.679150, acc: 0.558594] [A loss: 0.877564, acc: 0.167969]\n",
"826: [D loss: 0.648105, acc: 0.638672] [A loss: 0.871466, acc: 0.214844]\n",
"827: [D loss: 0.681982, acc: 0.568359] [A loss: 0.951116, acc: 0.101562]\n",
"828: [D loss: 0.659170, acc: 0.580078] [A loss: 0.805246, acc: 0.269531]\n",
"829: [D loss: 0.675848, acc: 0.558594] [A loss: 0.952744, acc: 0.085938]\n",
"830: [D loss: 0.652692, acc: 0.619141] [A loss: 0.770628, acc: 0.398438]\n",
"831: [D loss: 0.685766, acc: 0.599609] [A loss: 0.989257, acc: 0.117188]\n",
"832: [D loss: 0.643555, acc: 0.654297] [A loss: 0.815348, acc: 0.281250]\n",
"833: [D loss: 0.689946, acc: 0.570312] [A loss: 0.947623, acc: 0.113281]\n",
"834: [D loss: 0.672344, acc: 0.589844] [A loss: 0.856479, acc: 0.222656]\n",
"835: [D loss: 0.674714, acc: 0.568359] [A loss: 0.907977, acc: 0.144531]\n",
"836: [D loss: 0.668678, acc: 0.605469] [A loss: 0.843459, acc: 0.218750]\n",
"837: [D loss: 0.672210, acc: 0.591797] [A loss: 0.894316, acc: 0.128906]\n",
"838: [D loss: 0.676245, acc: 0.556641] [A loss: 0.872411, acc: 0.160156]\n",
"839: [D loss: 0.673869, acc: 0.593750] [A loss: 0.871683, acc: 0.183594]\n",
"840: [D loss: 0.660809, acc: 0.595703] [A loss: 0.885525, acc: 0.171875]\n",
"841: [D loss: 0.673582, acc: 0.558594] [A loss: 0.838934, acc: 0.257812]\n",
"842: [D loss: 0.670572, acc: 0.583984] [A loss: 0.899244, acc: 0.144531]\n",
"843: [D loss: 0.664442, acc: 0.599609] [A loss: 0.882579, acc: 0.179688]\n",
"844: [D loss: 0.661349, acc: 0.591797] [A loss: 0.975833, acc: 0.109375]\n",
"845: [D loss: 0.665816, acc: 0.601562] [A loss: 0.817836, acc: 0.304688]\n",
"846: [D loss: 0.691730, acc: 0.539062] [A loss: 1.099432, acc: 0.019531]\n",
"847: [D loss: 0.668443, acc: 0.583984] [A loss: 0.699558, acc: 0.570312]\n",
"848: [D loss: 0.719652, acc: 0.517578] [A loss: 1.025479, acc: 0.066406]\n",
"849: [D loss: 0.668956, acc: 0.568359] [A loss: 0.741443, acc: 0.457031]\n",
"850: [D loss: 0.696207, acc: 0.564453] [A loss: 0.972837, acc: 0.082031]\n",
"851: [D loss: 0.662851, acc: 0.587891] [A loss: 0.825952, acc: 0.218750]\n",
"852: [D loss: 0.671030, acc: 0.560547] [A loss: 0.939425, acc: 0.097656]\n",
"853: [D loss: 0.656080, acc: 0.595703] [A loss: 0.834376, acc: 0.238281]\n",
"854: [D loss: 0.681513, acc: 0.572266] [A loss: 0.918485, acc: 0.093750]\n",
"855: [D loss: 0.664832, acc: 0.585938] [A loss: 0.775656, acc: 0.367188]\n",
"856: [D loss: 0.674143, acc: 0.552734] [A loss: 0.963802, acc: 0.105469]\n",
"857: [D loss: 0.656042, acc: 0.615234] [A loss: 0.821851, acc: 0.250000]\n",
"858: [D loss: 0.689494, acc: 0.582031] [A loss: 0.887831, acc: 0.167969]\n",
"859: [D loss: 0.663529, acc: 0.589844] [A loss: 0.857707, acc: 0.222656]\n",
"860: [D loss: 0.666573, acc: 0.603516] [A loss: 0.859971, acc: 0.195312]\n",
"861: [D loss: 0.676342, acc: 0.544922] [A loss: 0.904656, acc: 0.132812]\n",
"862: [D loss: 0.664339, acc: 0.587891] [A loss: 0.880054, acc: 0.179688]\n",
"863: [D loss: 0.673518, acc: 0.578125] [A loss: 0.936101, acc: 0.113281]\n",
"864: [D loss: 0.657854, acc: 0.611328] [A loss: 0.903345, acc: 0.152344]\n",
"865: [D loss: 0.683101, acc: 0.552734] [A loss: 0.885188, acc: 0.195312]\n",
"866: [D loss: 0.667428, acc: 0.619141] [A loss: 0.905711, acc: 0.171875]\n",
"867: [D loss: 0.668314, acc: 0.566406] [A loss: 0.899498, acc: 0.167969]\n",
"868: [D loss: 0.667337, acc: 0.566406] [A loss: 0.906833, acc: 0.148438]\n",
"869: [D loss: 0.679435, acc: 0.580078] [A loss: 0.893669, acc: 0.171875]\n",
"870: [D loss: 0.670910, acc: 0.572266] [A loss: 0.960716, acc: 0.085938]\n",
"871: [D loss: 0.679092, acc: 0.568359] [A loss: 0.829026, acc: 0.222656]\n",
"872: [D loss: 0.654708, acc: 0.613281] [A loss: 0.956434, acc: 0.101562]\n",
"873: [D loss: 0.681227, acc: 0.566406] [A loss: 0.971845, acc: 0.093750]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"874: [D loss: 0.676086, acc: 0.591797] [A loss: 0.810704, acc: 0.265625]\n",
"875: [D loss: 0.669079, acc: 0.558594] [A loss: 0.989927, acc: 0.089844]\n",
"876: [D loss: 0.673611, acc: 0.576172] [A loss: 0.865497, acc: 0.195312]\n",
"877: [D loss: 0.669829, acc: 0.589844] [A loss: 1.028751, acc: 0.058594]\n",
"878: [D loss: 0.674626, acc: 0.570312] [A loss: 0.844110, acc: 0.218750]\n",
"879: [D loss: 0.659686, acc: 0.623047] [A loss: 0.927641, acc: 0.144531]\n",
"880: [D loss: 0.684300, acc: 0.548828] [A loss: 0.864854, acc: 0.191406]\n",
"881: [D loss: 0.683785, acc: 0.570312] [A loss: 0.963748, acc: 0.074219]\n",
"882: [D loss: 0.673924, acc: 0.611328] [A loss: 0.819147, acc: 0.234375]\n",
"883: [D loss: 0.680795, acc: 0.552734] [A loss: 0.995213, acc: 0.066406]\n",
"884: [D loss: 0.666738, acc: 0.609375] [A loss: 0.821094, acc: 0.234375]\n",
"885: [D loss: 0.681163, acc: 0.562500] [A loss: 1.044202, acc: 0.042969]\n",
"886: [D loss: 0.660746, acc: 0.597656] [A loss: 0.725260, acc: 0.457031]\n",
"887: [D loss: 0.694964, acc: 0.537109] [A loss: 1.071127, acc: 0.019531]\n",
"888: [D loss: 0.665429, acc: 0.597656] [A loss: 0.689129, acc: 0.542969]\n",
"889: [D loss: 0.705352, acc: 0.541016] [A loss: 1.063419, acc: 0.011719]\n",
"890: [D loss: 0.665029, acc: 0.603516] [A loss: 0.715668, acc: 0.453125]\n",
"891: [D loss: 0.695974, acc: 0.542969] [A loss: 1.026516, acc: 0.097656]\n",
"892: [D loss: 0.677105, acc: 0.572266] [A loss: 0.814032, acc: 0.269531]\n",
"893: [D loss: 0.672275, acc: 0.589844] [A loss: 0.912532, acc: 0.113281]\n",
"894: [D loss: 0.669615, acc: 0.583984] [A loss: 0.852723, acc: 0.226562]\n",
"895: [D loss: 0.680952, acc: 0.568359] [A loss: 0.885455, acc: 0.125000]\n",
"896: [D loss: 0.670270, acc: 0.580078] [A loss: 0.800184, acc: 0.292969]\n",
"897: [D loss: 0.678607, acc: 0.566406] [A loss: 0.947525, acc: 0.085938]\n",
"898: [D loss: 0.671179, acc: 0.589844] [A loss: 0.812271, acc: 0.296875]\n",
"899: [D loss: 0.665668, acc: 0.585938] [A loss: 0.936609, acc: 0.128906]\n",
"900: [D loss: 0.659989, acc: 0.609375] [A loss: 0.815771, acc: 0.281250]\n",
"901: [D loss: 0.689885, acc: 0.539062] [A loss: 1.067970, acc: 0.035156]\n",
"902: [D loss: 0.671197, acc: 0.566406] [A loss: 0.739023, acc: 0.453125]\n",
"903: [D loss: 0.692572, acc: 0.541016] [A loss: 1.040157, acc: 0.054688]\n",
"904: [D loss: 0.660304, acc: 0.613281] [A loss: 0.763687, acc: 0.378906]\n",
"905: [D loss: 0.678061, acc: 0.550781] [A loss: 0.942029, acc: 0.117188]\n",
"906: [D loss: 0.663806, acc: 0.625000] [A loss: 0.805736, acc: 0.308594]\n",
"907: [D loss: 0.675416, acc: 0.599609] [A loss: 0.959069, acc: 0.093750]\n",
"908: [D loss: 0.674215, acc: 0.587891] [A loss: 0.838850, acc: 0.246094]\n",
"909: [D loss: 0.674398, acc: 0.580078] [A loss: 0.931846, acc: 0.113281]\n",
"910: [D loss: 0.674280, acc: 0.585938] [A loss: 0.908378, acc: 0.164062]\n",
"911: [D loss: 0.671761, acc: 0.570312] [A loss: 0.821210, acc: 0.269531]\n",
"912: [D loss: 0.675456, acc: 0.556641] [A loss: 0.972771, acc: 0.058594]\n",
"913: [D loss: 0.667837, acc: 0.595703] [A loss: 0.785589, acc: 0.343750]\n",
"914: [D loss: 0.682472, acc: 0.548828] [A loss: 0.952752, acc: 0.089844]\n",
"915: [D loss: 0.668991, acc: 0.574219] [A loss: 0.768631, acc: 0.335938]\n",
"916: [D loss: 0.689050, acc: 0.560547] [A loss: 0.972326, acc: 0.097656]\n",
"917: [D loss: 0.656820, acc: 0.603516] [A loss: 0.829842, acc: 0.234375]\n",
"918: [D loss: 0.691730, acc: 0.552734] [A loss: 0.953135, acc: 0.089844]\n",
"919: [D loss: 0.664076, acc: 0.585938] [A loss: 0.839082, acc: 0.230469]\n",
"920: [D loss: 0.677213, acc: 0.580078] [A loss: 0.973677, acc: 0.089844]\n",
"921: [D loss: 0.672090, acc: 0.572266] [A loss: 0.833535, acc: 0.226562]\n",
"922: [D loss: 0.674341, acc: 0.582031] [A loss: 0.941968, acc: 0.136719]\n",
"923: [D loss: 0.658202, acc: 0.587891] [A loss: 0.834695, acc: 0.226562]\n",
"924: [D loss: 0.673855, acc: 0.566406] [A loss: 0.994715, acc: 0.058594]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"925: [D loss: 0.673940, acc: 0.585938] [A loss: 0.744885, acc: 0.398438]\n",
"926: [D loss: 0.682575, acc: 0.542969] [A loss: 1.032346, acc: 0.027344]\n",
"927: [D loss: 0.677410, acc: 0.566406] [A loss: 0.795449, acc: 0.312500]\n",
"928: [D loss: 0.687872, acc: 0.560547] [A loss: 0.980461, acc: 0.074219]\n",
"929: [D loss: 0.681831, acc: 0.562500] [A loss: 0.769503, acc: 0.320312]\n",
"930: [D loss: 0.673218, acc: 0.570312] [A loss: 0.854046, acc: 0.218750]\n",
"931: [D loss: 0.671433, acc: 0.566406] [A loss: 0.891275, acc: 0.148438]\n",
"932: [D loss: 0.694593, acc: 0.541016] [A loss: 0.847159, acc: 0.210938]\n",
"933: [D loss: 0.670769, acc: 0.587891] [A loss: 0.873700, acc: 0.203125]\n",
"934: [D loss: 0.667728, acc: 0.595703] [A loss: 0.809035, acc: 0.296875]\n",
"935: [D loss: 0.684030, acc: 0.560547] [A loss: 0.949592, acc: 0.105469]\n",
"936: [D loss: 0.661199, acc: 0.607422] [A loss: 0.834052, acc: 0.242188]\n",
"937: [D loss: 0.677371, acc: 0.574219] [A loss: 0.937393, acc: 0.132812]\n",
"938: [D loss: 0.658937, acc: 0.613281] [A loss: 0.877811, acc: 0.195312]\n",
"939: [D loss: 0.680791, acc: 0.585938] [A loss: 0.940252, acc: 0.093750]\n",
"940: [D loss: 0.674004, acc: 0.599609] [A loss: 0.799764, acc: 0.296875]\n",
"941: [D loss: 0.688032, acc: 0.560547] [A loss: 0.987975, acc: 0.074219]\n",
"942: [D loss: 0.658444, acc: 0.619141] [A loss: 0.790730, acc: 0.328125]\n",
"943: [D loss: 0.703286, acc: 0.535156] [A loss: 1.035679, acc: 0.039062]\n",
"944: [D loss: 0.679647, acc: 0.574219] [A loss: 0.693197, acc: 0.527344]\n",
"945: [D loss: 0.696741, acc: 0.535156] [A loss: 0.985629, acc: 0.042969]\n",
"946: [D loss: 0.678546, acc: 0.580078] [A loss: 0.794106, acc: 0.265625]\n",
"947: [D loss: 0.680993, acc: 0.541016] [A loss: 0.946168, acc: 0.121094]\n",
"948: [D loss: 0.663060, acc: 0.595703] [A loss: 0.843856, acc: 0.187500]\n",
"949: [D loss: 0.664454, acc: 0.589844] [A loss: 0.852853, acc: 0.246094]\n",
"950: [D loss: 0.692147, acc: 0.566406] [A loss: 0.959631, acc: 0.070312]\n",
"951: [D loss: 0.686464, acc: 0.519531] [A loss: 0.794842, acc: 0.285156]\n",
"952: [D loss: 0.663712, acc: 0.605469] [A loss: 0.947732, acc: 0.121094]\n",
"953: [D loss: 0.678985, acc: 0.560547] [A loss: 0.833406, acc: 0.226562]\n",
"954: [D loss: 0.685408, acc: 0.564453] [A loss: 0.875639, acc: 0.164062]\n",
"955: [D loss: 0.674371, acc: 0.578125] [A loss: 0.883582, acc: 0.214844]\n",
"956: [D loss: 0.667757, acc: 0.578125] [A loss: 0.938001, acc: 0.132812]\n",
"957: [D loss: 0.664654, acc: 0.605469] [A loss: 0.901076, acc: 0.125000]\n",
"958: [D loss: 0.681292, acc: 0.583984] [A loss: 0.907112, acc: 0.113281]\n",
"959: [D loss: 0.672296, acc: 0.589844] [A loss: 0.849847, acc: 0.187500]\n",
"960: [D loss: 0.652503, acc: 0.623047] [A loss: 0.887373, acc: 0.175781]\n",
"961: [D loss: 0.680014, acc: 0.572266] [A loss: 0.866020, acc: 0.183594]\n",
"962: [D loss: 0.669195, acc: 0.568359] [A loss: 0.929317, acc: 0.121094]\n",
"963: [D loss: 0.675533, acc: 0.583984] [A loss: 0.892191, acc: 0.175781]\n",
"964: [D loss: 0.665064, acc: 0.593750] [A loss: 0.829426, acc: 0.242188]\n",
"965: [D loss: 0.684676, acc: 0.562500] [A loss: 1.001942, acc: 0.078125]\n",
"966: [D loss: 0.656803, acc: 0.599609] [A loss: 0.823200, acc: 0.281250]\n",
"967: [D loss: 0.705071, acc: 0.541016] [A loss: 1.070756, acc: 0.039062]\n",
"968: [D loss: 0.690998, acc: 0.523438] [A loss: 0.740237, acc: 0.437500]\n",
"969: [D loss: 0.692486, acc: 0.521484] [A loss: 1.031091, acc: 0.035156]\n",
"970: [D loss: 0.657626, acc: 0.605469] [A loss: 0.797454, acc: 0.281250]\n",
"971: [D loss: 0.679863, acc: 0.554688] [A loss: 1.092260, acc: 0.039062]\n",
"972: [D loss: 0.676008, acc: 0.609375] [A loss: 0.736137, acc: 0.433594]\n",
"973: [D loss: 0.705818, acc: 0.515625] [A loss: 1.047202, acc: 0.015625]\n",
"974: [D loss: 0.677754, acc: 0.562500] [A loss: 0.716497, acc: 0.480469]\n",
"975: [D loss: 0.675457, acc: 0.576172] [A loss: 0.907186, acc: 0.144531]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"976: [D loss: 0.671065, acc: 0.591797] [A loss: 0.866266, acc: 0.179688]\n",
"977: [D loss: 0.685403, acc: 0.552734] [A loss: 0.856172, acc: 0.171875]\n",
"978: [D loss: 0.665186, acc: 0.591797] [A loss: 0.835344, acc: 0.226562]\n",
"979: [D loss: 0.673962, acc: 0.583984] [A loss: 0.935603, acc: 0.101562]\n",
"980: [D loss: 0.672127, acc: 0.576172] [A loss: 0.823923, acc: 0.281250]\n",
"981: [D loss: 0.690278, acc: 0.542969] [A loss: 0.928965, acc: 0.132812]\n",
"982: [D loss: 0.667572, acc: 0.578125] [A loss: 0.866165, acc: 0.230469]\n",
"983: [D loss: 0.679492, acc: 0.583984] [A loss: 0.861952, acc: 0.199219]\n",
"984: [D loss: 0.665579, acc: 0.595703] [A loss: 0.887848, acc: 0.136719]\n",
"985: [D loss: 0.679682, acc: 0.568359] [A loss: 0.902720, acc: 0.164062]\n",
"986: [D loss: 0.673766, acc: 0.564453] [A loss: 0.945302, acc: 0.101562]\n",
"987: [D loss: 0.671651, acc: 0.568359] [A loss: 0.864427, acc: 0.171875]\n",
"988: [D loss: 0.683702, acc: 0.587891] [A loss: 0.922598, acc: 0.128906]\n",
"989: [D loss: 0.655236, acc: 0.628906] [A loss: 0.847626, acc: 0.257812]\n",
"990: [D loss: 0.690314, acc: 0.558594] [A loss: 1.029750, acc: 0.039062]\n",
"991: [D loss: 0.676517, acc: 0.562500] [A loss: 0.733341, acc: 0.402344]\n",
"992: [D loss: 0.700056, acc: 0.537109] [A loss: 0.987268, acc: 0.054688]\n",
"993: [D loss: 0.662532, acc: 0.623047] [A loss: 0.771078, acc: 0.382812]\n",
"994: [D loss: 0.683792, acc: 0.566406] [A loss: 1.001359, acc: 0.085938]\n",
"995: [D loss: 0.682804, acc: 0.552734] [A loss: 0.771166, acc: 0.332031]\n",
"996: [D loss: 0.676307, acc: 0.558594] [A loss: 0.994992, acc: 0.070312]\n",
"997: [D loss: 0.671517, acc: 0.572266] [A loss: 0.810434, acc: 0.277344]\n",
"998: [D loss: 0.696599, acc: 0.542969] [A loss: 1.030623, acc: 0.046875]\n",
"999: [D loss: 0.670218, acc: 0.582031] [A loss: 0.781887, acc: 0.304688]\n",
"1000: [D loss: 0.685934, acc: 0.552734] [A loss: 0.966509, acc: 0.089844]\n",
"1001: [D loss: 0.655200, acc: 0.636719] [A loss: 0.807447, acc: 0.308594]\n",
"1002: [D loss: 0.692694, acc: 0.556641] [A loss: 0.987758, acc: 0.058594]\n",
"1003: [D loss: 0.674175, acc: 0.574219] [A loss: 0.800590, acc: 0.289062]\n",
"1004: [D loss: 0.714893, acc: 0.521484] [A loss: 1.005843, acc: 0.050781]\n",
"1005: [D loss: 0.671261, acc: 0.599609] [A loss: 0.752481, acc: 0.410156]\n",
"1006: [D loss: 0.696108, acc: 0.544922] [A loss: 1.031590, acc: 0.085938]\n",
"1007: [D loss: 0.669084, acc: 0.607422] [A loss: 0.716915, acc: 0.468750]\n",
"1008: [D loss: 0.698408, acc: 0.531250] [A loss: 1.034129, acc: 0.046875]\n",
"1009: [D loss: 0.673226, acc: 0.566406] [A loss: 0.753134, acc: 0.355469]\n",
"1010: [D loss: 0.679592, acc: 0.554688] [A loss: 0.864242, acc: 0.183594]\n",
"1011: [D loss: 0.655361, acc: 0.597656] [A loss: 0.875555, acc: 0.191406]\n",
"1012: [D loss: 0.674334, acc: 0.580078] [A loss: 0.851998, acc: 0.203125]\n",
"1013: [D loss: 0.660328, acc: 0.599609] [A loss: 0.842623, acc: 0.210938]\n",
"1014: [D loss: 0.685486, acc: 0.558594] [A loss: 0.901873, acc: 0.148438]\n",
"1015: [D loss: 0.670721, acc: 0.593750] [A loss: 0.883265, acc: 0.179688]\n",
"1016: [D loss: 0.672472, acc: 0.591797] [A loss: 0.911146, acc: 0.132812]\n",
"1017: [D loss: 0.683297, acc: 0.558594] [A loss: 0.804537, acc: 0.277344]\n",
"1018: [D loss: 0.671108, acc: 0.570312] [A loss: 0.974663, acc: 0.070312]\n",
"1019: [D loss: 0.674350, acc: 0.572266] [A loss: 0.885802, acc: 0.160156]\n",
"1020: [D loss: 0.688232, acc: 0.544922] [A loss: 0.981426, acc: 0.074219]\n",
"1021: [D loss: 0.677567, acc: 0.574219] [A loss: 0.785937, acc: 0.324219]\n",
"1022: [D loss: 0.693663, acc: 0.550781] [A loss: 0.954445, acc: 0.085938]\n",
"1023: [D loss: 0.678735, acc: 0.525391] [A loss: 0.794227, acc: 0.316406]\n",
"1024: [D loss: 0.694259, acc: 0.550781] [A loss: 0.945369, acc: 0.101562]\n",
"1025: [D loss: 0.673462, acc: 0.560547] [A loss: 0.801786, acc: 0.277344]\n",
"1026: [D loss: 0.677694, acc: 0.578125] [A loss: 0.973457, acc: 0.078125]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1027: [D loss: 0.667265, acc: 0.593750] [A loss: 0.782779, acc: 0.339844]\n",
"1028: [D loss: 0.692508, acc: 0.562500] [A loss: 1.035104, acc: 0.050781]\n",
"1029: [D loss: 0.694186, acc: 0.533203] [A loss: 0.753083, acc: 0.359375]\n",
"1030: [D loss: 0.693063, acc: 0.542969] [A loss: 1.032328, acc: 0.031250]\n",
"1031: [D loss: 0.669125, acc: 0.591797] [A loss: 0.727157, acc: 0.453125]\n",
"1032: [D loss: 0.700124, acc: 0.523438] [A loss: 1.004749, acc: 0.058594]\n",
"1033: [D loss: 0.683876, acc: 0.554688] [A loss: 0.734043, acc: 0.445312]\n",
"1034: [D loss: 0.703490, acc: 0.541016] [A loss: 0.945888, acc: 0.085938]\n",
"1035: [D loss: 0.678999, acc: 0.566406] [A loss: 0.825688, acc: 0.226562]\n",
"1036: [D loss: 0.678489, acc: 0.554688] [A loss: 0.826816, acc: 0.222656]\n",
"1037: [D loss: 0.681367, acc: 0.572266] [A loss: 0.875321, acc: 0.152344]\n",
"1038: [D loss: 0.687553, acc: 0.556641] [A loss: 0.866022, acc: 0.199219]\n",
"1039: [D loss: 0.693539, acc: 0.515625] [A loss: 0.872231, acc: 0.164062]\n",
"1040: [D loss: 0.671115, acc: 0.609375] [A loss: 0.845638, acc: 0.238281]\n",
"1041: [D loss: 0.684648, acc: 0.554688] [A loss: 0.848574, acc: 0.226562]\n",
"1042: [D loss: 0.672687, acc: 0.587891] [A loss: 0.849925, acc: 0.171875]\n",
"1043: [D loss: 0.688447, acc: 0.554688] [A loss: 0.901279, acc: 0.125000]\n",
"1044: [D loss: 0.672807, acc: 0.597656] [A loss: 0.869312, acc: 0.175781]\n",
"1045: [D loss: 0.676499, acc: 0.546875] [A loss: 0.872615, acc: 0.191406]\n",
"1046: [D loss: 0.659222, acc: 0.587891] [A loss: 0.892154, acc: 0.199219]\n",
"1047: [D loss: 0.693051, acc: 0.552734] [A loss: 0.974067, acc: 0.093750]\n",
"1048: [D loss: 0.676514, acc: 0.562500] [A loss: 0.838304, acc: 0.261719]\n",
"1049: [D loss: 0.685847, acc: 0.582031] [A loss: 0.981346, acc: 0.070312]\n",
"1050: [D loss: 0.678967, acc: 0.576172] [A loss: 0.741703, acc: 0.425781]\n",
"1051: [D loss: 0.703574, acc: 0.525391] [A loss: 1.031731, acc: 0.039062]\n",
"1052: [D loss: 0.675982, acc: 0.562500] [A loss: 0.708802, acc: 0.511719]\n",
"1053: [D loss: 0.693238, acc: 0.539062] [A loss: 0.992012, acc: 0.058594]\n",
"1054: [D loss: 0.663247, acc: 0.611328] [A loss: 0.774558, acc: 0.351562]\n",
"1055: [D loss: 0.699157, acc: 0.542969] [A loss: 0.978405, acc: 0.066406]\n",
"1056: [D loss: 0.676109, acc: 0.597656] [A loss: 0.756437, acc: 0.398438]\n",
"1057: [D loss: 0.691629, acc: 0.552734] [A loss: 0.957681, acc: 0.101562]\n",
"1058: [D loss: 0.666307, acc: 0.615234] [A loss: 0.796143, acc: 0.296875]\n",
"1059: [D loss: 0.683157, acc: 0.552734] [A loss: 0.929013, acc: 0.121094]\n",
"1060: [D loss: 0.673766, acc: 0.580078] [A loss: 0.809767, acc: 0.281250]\n",
"1061: [D loss: 0.680631, acc: 0.562500] [A loss: 0.922781, acc: 0.136719]\n",
"1062: [D loss: 0.666428, acc: 0.601562] [A loss: 0.785283, acc: 0.324219]\n",
"1063: [D loss: 0.677673, acc: 0.552734] [A loss: 0.947856, acc: 0.117188]\n",
"1064: [D loss: 0.671544, acc: 0.601562] [A loss: 0.798371, acc: 0.296875]\n",
"1065: [D loss: 0.691943, acc: 0.544922] [A loss: 0.963044, acc: 0.058594]\n",
"1066: [D loss: 0.677904, acc: 0.595703] [A loss: 0.759467, acc: 0.359375]\n",
"1067: [D loss: 0.681011, acc: 0.552734] [A loss: 0.953339, acc: 0.128906]\n",
"1068: [D loss: 0.690518, acc: 0.544922] [A loss: 0.847631, acc: 0.175781]\n",
"1069: [D loss: 0.669491, acc: 0.570312] [A loss: 0.866895, acc: 0.230469]\n",
"1070: [D loss: 0.696006, acc: 0.533203] [A loss: 0.881952, acc: 0.164062]\n",
"1071: [D loss: 0.673034, acc: 0.591797] [A loss: 0.872959, acc: 0.160156]\n",
"1072: [D loss: 0.673443, acc: 0.585938] [A loss: 0.868454, acc: 0.183594]\n",
"1073: [D loss: 0.690601, acc: 0.556641] [A loss: 0.892378, acc: 0.140625]\n",
"1074: [D loss: 0.680614, acc: 0.560547] [A loss: 0.862571, acc: 0.210938]\n",
"1075: [D loss: 0.688735, acc: 0.535156] [A loss: 0.927575, acc: 0.144531]\n",
"1076: [D loss: 0.675918, acc: 0.572266] [A loss: 0.874046, acc: 0.207031]\n",
"1077: [D loss: 0.666796, acc: 0.599609] [A loss: 0.920076, acc: 0.117188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1078: [D loss: 0.677943, acc: 0.568359] [A loss: 0.855421, acc: 0.187500]\n",
"1079: [D loss: 0.683582, acc: 0.558594] [A loss: 0.980242, acc: 0.093750]\n",
"1080: [D loss: 0.670381, acc: 0.552734] [A loss: 0.793820, acc: 0.304688]\n",
"1081: [D loss: 0.689660, acc: 0.537109] [A loss: 1.028217, acc: 0.058594]\n",
"1082: [D loss: 0.673311, acc: 0.613281] [A loss: 0.713691, acc: 0.464844]\n",
"1083: [D loss: 0.700467, acc: 0.570312] [A loss: 1.074924, acc: 0.031250]\n",
"1084: [D loss: 0.686630, acc: 0.554688] [A loss: 0.716697, acc: 0.484375]\n",
"1085: [D loss: 0.682315, acc: 0.552734] [A loss: 1.005162, acc: 0.054688]\n",
"1086: [D loss: 0.687557, acc: 0.562500] [A loss: 0.781650, acc: 0.347656]\n",
"1087: [D loss: 0.688220, acc: 0.574219] [A loss: 0.929883, acc: 0.097656]\n",
"1088: [D loss: 0.678713, acc: 0.562500] [A loss: 0.733291, acc: 0.417969]\n",
"1089: [D loss: 0.681854, acc: 0.558594] [A loss: 0.941975, acc: 0.160156]\n",
"1090: [D loss: 0.693915, acc: 0.544922] [A loss: 0.843431, acc: 0.214844]\n",
"1091: [D loss: 0.679717, acc: 0.556641] [A loss: 0.920645, acc: 0.125000]\n",
"1092: [D loss: 0.673111, acc: 0.589844] [A loss: 0.752271, acc: 0.402344]\n",
"1093: [D loss: 0.687783, acc: 0.566406] [A loss: 0.925423, acc: 0.164062]\n",
"1094: [D loss: 0.663420, acc: 0.605469] [A loss: 0.852133, acc: 0.261719]\n",
"1095: [D loss: 0.709872, acc: 0.509766] [A loss: 0.964414, acc: 0.062500]\n",
"1096: [D loss: 0.675501, acc: 0.570312] [A loss: 0.799538, acc: 0.281250]\n",
"1097: [D loss: 0.686197, acc: 0.562500] [A loss: 0.947903, acc: 0.109375]\n",
"1098: [D loss: 0.679109, acc: 0.560547] [A loss: 0.776032, acc: 0.316406]\n",
"1099: [D loss: 0.685494, acc: 0.535156] [A loss: 0.975623, acc: 0.062500]\n",
"1100: [D loss: 0.675284, acc: 0.556641] [A loss: 0.812319, acc: 0.277344]\n",
"1101: [D loss: 0.687036, acc: 0.558594] [A loss: 0.968888, acc: 0.121094]\n",
"1102: [D loss: 0.658035, acc: 0.587891] [A loss: 0.737679, acc: 0.437500]\n",
"1103: [D loss: 0.698985, acc: 0.556641] [A loss: 1.056513, acc: 0.050781]\n",
"1104: [D loss: 0.674904, acc: 0.576172] [A loss: 0.740710, acc: 0.441406]\n",
"1105: [D loss: 0.706705, acc: 0.535156] [A loss: 0.994114, acc: 0.074219]\n",
"1106: [D loss: 0.676698, acc: 0.537109] [A loss: 0.735451, acc: 0.417969]\n",
"1107: [D loss: 0.700530, acc: 0.519531] [A loss: 0.933384, acc: 0.136719]\n",
"1108: [D loss: 0.670679, acc: 0.570312] [A loss: 0.798909, acc: 0.289062]\n",
"1109: [D loss: 0.683384, acc: 0.574219] [A loss: 0.850156, acc: 0.230469]\n",
"1110: [D loss: 0.681893, acc: 0.535156] [A loss: 0.835699, acc: 0.269531]\n",
"1111: [D loss: 0.689390, acc: 0.537109] [A loss: 0.916872, acc: 0.101562]\n",
"1112: [D loss: 0.678501, acc: 0.531250] [A loss: 0.804301, acc: 0.253906]\n",
"1113: [D loss: 0.678272, acc: 0.570312] [A loss: 0.864263, acc: 0.214844]\n",
"1114: [D loss: 0.682453, acc: 0.550781] [A loss: 0.880791, acc: 0.179688]\n",
"1115: [D loss: 0.687356, acc: 0.546875] [A loss: 0.863242, acc: 0.167969]\n",
"1116: [D loss: 0.677927, acc: 0.560547] [A loss: 0.891838, acc: 0.160156]\n",
"1117: [D loss: 0.673294, acc: 0.582031] [A loss: 0.886890, acc: 0.175781]\n",
"1118: [D loss: 0.684010, acc: 0.542969] [A loss: 0.879895, acc: 0.195312]\n",
"1119: [D loss: 0.671862, acc: 0.568359] [A loss: 0.856879, acc: 0.218750]\n",
"1120: [D loss: 0.671827, acc: 0.572266] [A loss: 0.904429, acc: 0.160156]\n",
"1121: [D loss: 0.686750, acc: 0.535156] [A loss: 0.917022, acc: 0.144531]\n",
"1122: [D loss: 0.670272, acc: 0.572266] [A loss: 0.823438, acc: 0.281250]\n",
"1123: [D loss: 0.725968, acc: 0.521484] [A loss: 1.024332, acc: 0.035156]\n",
"1124: [D loss: 0.682536, acc: 0.582031] [A loss: 0.762891, acc: 0.359375]\n",
"1125: [D loss: 0.700487, acc: 0.531250] [A loss: 1.035388, acc: 0.054688]\n",
"1126: [D loss: 0.664069, acc: 0.578125] [A loss: 0.713184, acc: 0.468750]\n",
"1127: [D loss: 0.710622, acc: 0.519531] [A loss: 1.070289, acc: 0.027344]\n",
"1128: [D loss: 0.694870, acc: 0.539062] [A loss: 0.727801, acc: 0.457031]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1129: [D loss: 0.700831, acc: 0.533203] [A loss: 0.944835, acc: 0.078125]\n",
"1130: [D loss: 0.665834, acc: 0.593750] [A loss: 0.777362, acc: 0.390625]\n",
"1131: [D loss: 0.693112, acc: 0.539062] [A loss: 0.954208, acc: 0.113281]\n",
"1132: [D loss: 0.679360, acc: 0.572266] [A loss: 0.782603, acc: 0.347656]\n",
"1133: [D loss: 0.683773, acc: 0.568359] [A loss: 0.974037, acc: 0.097656]\n",
"1134: [D loss: 0.677299, acc: 0.580078] [A loss: 0.774733, acc: 0.312500]\n",
"1135: [D loss: 0.678980, acc: 0.568359] [A loss: 0.958220, acc: 0.101562]\n",
"1136: [D loss: 0.684219, acc: 0.550781] [A loss: 0.847017, acc: 0.242188]\n",
"1137: [D loss: 0.685058, acc: 0.572266] [A loss: 0.959771, acc: 0.074219]\n",
"1138: [D loss: 0.680136, acc: 0.564453] [A loss: 0.777472, acc: 0.347656]\n",
"1139: [D loss: 0.687226, acc: 0.537109] [A loss: 0.972440, acc: 0.078125]\n",
"1140: [D loss: 0.683876, acc: 0.550781] [A loss: 0.780544, acc: 0.339844]\n",
"1141: [D loss: 0.680240, acc: 0.548828] [A loss: 0.908160, acc: 0.152344]\n",
"1142: [D loss: 0.674335, acc: 0.562500] [A loss: 0.913401, acc: 0.152344]\n",
"1143: [D loss: 0.675237, acc: 0.564453] [A loss: 0.831063, acc: 0.218750]\n",
"1144: [D loss: 0.679407, acc: 0.556641] [A loss: 0.905789, acc: 0.167969]\n",
"1145: [D loss: 0.670299, acc: 0.580078] [A loss: 0.823775, acc: 0.230469]\n",
"1146: [D loss: 0.688225, acc: 0.554688] [A loss: 0.951445, acc: 0.097656]\n",
"1147: [D loss: 0.678986, acc: 0.585938] [A loss: 0.781525, acc: 0.347656]\n",
"1148: [D loss: 0.699406, acc: 0.535156] [A loss: 1.075374, acc: 0.027344]\n",
"1149: [D loss: 0.682163, acc: 0.550781] [A loss: 0.685149, acc: 0.535156]\n",
"1150: [D loss: 0.698741, acc: 0.552734] [A loss: 1.005914, acc: 0.062500]\n",
"1151: [D loss: 0.679123, acc: 0.552734] [A loss: 0.708870, acc: 0.496094]\n",
"1152: [D loss: 0.705705, acc: 0.529297] [A loss: 0.957990, acc: 0.085938]\n",
"1153: [D loss: 0.678728, acc: 0.578125] [A loss: 0.806130, acc: 0.292969]\n",
"1154: [D loss: 0.684733, acc: 0.566406] [A loss: 0.904851, acc: 0.148438]\n",
"1155: [D loss: 0.682723, acc: 0.576172] [A loss: 0.793897, acc: 0.285156]\n",
"1156: [D loss: 0.671304, acc: 0.587891] [A loss: 0.892468, acc: 0.175781]\n",
"1157: [D loss: 0.684226, acc: 0.546875] [A loss: 0.860554, acc: 0.183594]\n",
"1158: [D loss: 0.678062, acc: 0.558594] [A loss: 0.878391, acc: 0.160156]\n",
"1159: [D loss: 0.699960, acc: 0.539062] [A loss: 0.904855, acc: 0.136719]\n",
"1160: [D loss: 0.677679, acc: 0.580078] [A loss: 0.878626, acc: 0.187500]\n",
"1161: [D loss: 0.673805, acc: 0.580078] [A loss: 0.896819, acc: 0.144531]\n",
"1162: [D loss: 0.674974, acc: 0.578125] [A loss: 0.849577, acc: 0.238281]\n",
"1163: [D loss: 0.684870, acc: 0.556641] [A loss: 0.916940, acc: 0.140625]\n",
"1164: [D loss: 0.668473, acc: 0.560547] [A loss: 0.928055, acc: 0.148438]\n",
"1165: [D loss: 0.687731, acc: 0.546875] [A loss: 0.911701, acc: 0.175781]\n",
"1166: [D loss: 0.675951, acc: 0.582031] [A loss: 0.835509, acc: 0.257812]\n",
"1167: [D loss: 0.676805, acc: 0.582031] [A loss: 0.940022, acc: 0.121094]\n",
"1168: [D loss: 0.662660, acc: 0.609375] [A loss: 0.792254, acc: 0.332031]\n",
"1169: [D loss: 0.693704, acc: 0.552734] [A loss: 1.108163, acc: 0.054688]\n",
"1170: [D loss: 0.680320, acc: 0.548828] [A loss: 0.698161, acc: 0.488281]\n",
"1171: [D loss: 0.708273, acc: 0.515625] [A loss: 1.050660, acc: 0.050781]\n",
"1172: [D loss: 0.689986, acc: 0.548828] [A loss: 0.693934, acc: 0.542969]\n",
"1173: [D loss: 0.698034, acc: 0.535156] [A loss: 1.030590, acc: 0.058594]\n",
"1174: [D loss: 0.673400, acc: 0.574219] [A loss: 0.727260, acc: 0.449219]\n",
"1175: [D loss: 0.683898, acc: 0.566406] [A loss: 0.998530, acc: 0.066406]\n",
"1176: [D loss: 0.662752, acc: 0.613281] [A loss: 0.796711, acc: 0.304688]\n",
"1177: [D loss: 0.706820, acc: 0.533203] [A loss: 0.946387, acc: 0.078125]\n",
"1178: [D loss: 0.679211, acc: 0.593750] [A loss: 0.800492, acc: 0.273438]\n",
"1179: [D loss: 0.677273, acc: 0.544922] [A loss: 0.932386, acc: 0.152344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1180: [D loss: 0.689707, acc: 0.562500] [A loss: 0.850475, acc: 0.203125]\n",
"1181: [D loss: 0.687475, acc: 0.542969] [A loss: 0.873566, acc: 0.195312]\n",
"1182: [D loss: 0.685596, acc: 0.558594] [A loss: 0.868572, acc: 0.175781]\n",
"1183: [D loss: 0.671129, acc: 0.595703] [A loss: 0.835114, acc: 0.234375]\n",
"1184: [D loss: 0.681424, acc: 0.558594] [A loss: 0.905692, acc: 0.140625]\n",
"1185: [D loss: 0.669340, acc: 0.623047] [A loss: 0.817904, acc: 0.277344]\n",
"1186: [D loss: 0.700784, acc: 0.523438] [A loss: 0.962976, acc: 0.089844]\n",
"1187: [D loss: 0.678340, acc: 0.570312] [A loss: 0.779210, acc: 0.320312]\n",
"1188: [D loss: 0.676945, acc: 0.570312] [A loss: 0.959327, acc: 0.101562]\n",
"1189: [D loss: 0.666198, acc: 0.574219] [A loss: 0.776165, acc: 0.320312]\n",
"1190: [D loss: 0.685718, acc: 0.572266] [A loss: 1.011695, acc: 0.082031]\n",
"1191: [D loss: 0.678060, acc: 0.560547] [A loss: 0.732664, acc: 0.429688]\n",
"1192: [D loss: 0.699568, acc: 0.539062] [A loss: 0.986321, acc: 0.082031]\n",
"1193: [D loss: 0.681685, acc: 0.574219] [A loss: 0.749600, acc: 0.406250]\n",
"1194: [D loss: 0.694031, acc: 0.539062] [A loss: 1.021689, acc: 0.039062]\n",
"1195: [D loss: 0.674314, acc: 0.554688] [A loss: 0.752050, acc: 0.402344]\n",
"1196: [D loss: 0.688969, acc: 0.562500] [A loss: 0.958923, acc: 0.113281]\n",
"1197: [D loss: 0.683791, acc: 0.558594] [A loss: 0.744064, acc: 0.390625]\n",
"1198: [D loss: 0.712587, acc: 0.546875] [A loss: 0.962695, acc: 0.085938]\n",
"1199: [D loss: 0.671436, acc: 0.570312] [A loss: 0.788416, acc: 0.316406]\n",
"1200: [D loss: 0.689357, acc: 0.537109] [A loss: 0.914480, acc: 0.132812]\n",
"1201: [D loss: 0.674910, acc: 0.548828] [A loss: 0.815980, acc: 0.281250]\n",
"1202: [D loss: 0.694010, acc: 0.537109] [A loss: 0.928653, acc: 0.128906]\n",
"1203: [D loss: 0.677901, acc: 0.576172] [A loss: 0.842453, acc: 0.246094]\n",
"1204: [D loss: 0.694340, acc: 0.523438] [A loss: 0.941498, acc: 0.144531]\n",
"1205: [D loss: 0.680404, acc: 0.560547] [A loss: 0.822072, acc: 0.250000]\n",
"1206: [D loss: 0.689070, acc: 0.542969] [A loss: 0.965054, acc: 0.101562]\n",
"1207: [D loss: 0.674432, acc: 0.580078] [A loss: 0.845930, acc: 0.230469]\n",
"1208: [D loss: 0.681710, acc: 0.568359] [A loss: 0.880643, acc: 0.183594]\n",
"1209: [D loss: 0.677555, acc: 0.568359] [A loss: 0.880570, acc: 0.199219]\n",
"1210: [D loss: 0.693430, acc: 0.533203] [A loss: 0.914071, acc: 0.121094]\n",
"1211: [D loss: 0.689235, acc: 0.539062] [A loss: 0.855841, acc: 0.230469]\n",
"1212: [D loss: 0.681676, acc: 0.583984] [A loss: 0.974201, acc: 0.070312]\n",
"1213: [D loss: 0.678191, acc: 0.597656] [A loss: 0.774513, acc: 0.304688]\n",
"1214: [D loss: 0.694564, acc: 0.554688] [A loss: 0.975948, acc: 0.089844]\n",
"1215: [D loss: 0.678631, acc: 0.576172] [A loss: 0.749027, acc: 0.398438]\n",
"1216: [D loss: 0.704479, acc: 0.535156] [A loss: 1.042989, acc: 0.058594]\n",
"1217: [D loss: 0.673442, acc: 0.576172] [A loss: 0.706657, acc: 0.519531]\n",
"1218: [D loss: 0.706703, acc: 0.523438] [A loss: 1.042921, acc: 0.039062]\n",
"1219: [D loss: 0.673137, acc: 0.572266] [A loss: 0.710355, acc: 0.476562]\n",
"1220: [D loss: 0.708978, acc: 0.533203] [A loss: 0.978670, acc: 0.046875]\n",
"1221: [D loss: 0.678995, acc: 0.570312] [A loss: 0.764931, acc: 0.355469]\n",
"1222: [D loss: 0.673815, acc: 0.578125] [A loss: 0.889035, acc: 0.160156]\n",
"1223: [D loss: 0.682976, acc: 0.570312] [A loss: 0.856962, acc: 0.167969]\n",
"1224: [D loss: 0.685933, acc: 0.574219] [A loss: 0.918730, acc: 0.105469]\n",
"1225: [D loss: 0.670783, acc: 0.599609] [A loss: 0.846768, acc: 0.207031]\n",
"1226: [D loss: 0.680499, acc: 0.566406] [A loss: 0.878211, acc: 0.183594]\n",
"1227: [D loss: 0.685459, acc: 0.568359] [A loss: 0.852990, acc: 0.234375]\n",
"1228: [D loss: 0.685809, acc: 0.548828] [A loss: 0.888689, acc: 0.179688]\n",
"1229: [D loss: 0.700143, acc: 0.525391] [A loss: 0.873194, acc: 0.175781]\n",
"1230: [D loss: 0.666733, acc: 0.593750] [A loss: 0.846929, acc: 0.218750]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1231: [D loss: 0.672400, acc: 0.585938] [A loss: 0.864669, acc: 0.199219]\n",
"1232: [D loss: 0.683221, acc: 0.542969] [A loss: 0.879855, acc: 0.175781]\n",
"1233: [D loss: 0.677249, acc: 0.572266] [A loss: 0.843351, acc: 0.214844]\n",
"1234: [D loss: 0.691274, acc: 0.531250] [A loss: 0.859728, acc: 0.179688]\n",
"1235: [D loss: 0.672630, acc: 0.615234] [A loss: 0.874499, acc: 0.195312]\n",
"1236: [D loss: 0.687049, acc: 0.550781] [A loss: 0.913624, acc: 0.156250]\n",
"1237: [D loss: 0.689294, acc: 0.554688] [A loss: 0.856650, acc: 0.246094]\n",
"1238: [D loss: 0.680370, acc: 0.570312] [A loss: 0.942518, acc: 0.105469]\n",
"1239: [D loss: 0.663651, acc: 0.617188] [A loss: 0.872813, acc: 0.207031]\n",
"1240: [D loss: 0.680514, acc: 0.574219] [A loss: 0.979556, acc: 0.113281]\n",
"1241: [D loss: 0.675129, acc: 0.580078] [A loss: 0.751036, acc: 0.375000]\n",
"1242: [D loss: 0.704832, acc: 0.523438] [A loss: 1.080645, acc: 0.031250]\n",
"1243: [D loss: 0.680504, acc: 0.580078] [A loss: 0.681295, acc: 0.566406]\n",
"1244: [D loss: 0.703542, acc: 0.533203] [A loss: 1.090467, acc: 0.019531]\n",
"1245: [D loss: 0.691642, acc: 0.535156] [A loss: 0.707656, acc: 0.507812]\n",
"1246: [D loss: 0.693277, acc: 0.537109] [A loss: 0.980119, acc: 0.074219]\n",
"1247: [D loss: 0.659351, acc: 0.603516] [A loss: 0.778611, acc: 0.324219]\n",
"1248: [D loss: 0.695610, acc: 0.546875] [A loss: 0.943330, acc: 0.125000]\n",
"1249: [D loss: 0.676956, acc: 0.558594] [A loss: 0.756524, acc: 0.378906]\n",
"1250: [D loss: 0.704851, acc: 0.539062] [A loss: 0.955072, acc: 0.140625]\n",
"1251: [D loss: 0.678011, acc: 0.587891] [A loss: 0.776053, acc: 0.328125]\n",
"1252: [D loss: 0.688427, acc: 0.550781] [A loss: 0.954005, acc: 0.105469]\n",
"1253: [D loss: 0.669300, acc: 0.578125] [A loss: 0.760762, acc: 0.394531]\n",
"1254: [D loss: 0.704982, acc: 0.542969] [A loss: 0.986638, acc: 0.101562]\n",
"1255: [D loss: 0.686251, acc: 0.560547] [A loss: 0.760568, acc: 0.382812]\n",
"1256: [D loss: 0.716423, acc: 0.544922] [A loss: 0.965019, acc: 0.089844]\n",
"1257: [D loss: 0.680564, acc: 0.570312] [A loss: 0.769031, acc: 0.355469]\n",
"1258: [D loss: 0.694640, acc: 0.529297] [A loss: 0.952658, acc: 0.109375]\n",
"1259: [D loss: 0.676494, acc: 0.574219] [A loss: 0.740047, acc: 0.402344]\n",
"1260: [D loss: 0.713998, acc: 0.517578] [A loss: 0.964913, acc: 0.089844]\n",
"1261: [D loss: 0.657667, acc: 0.611328] [A loss: 0.757081, acc: 0.355469]\n",
"1262: [D loss: 0.699120, acc: 0.537109] [A loss: 0.952694, acc: 0.117188]\n",
"1263: [D loss: 0.676206, acc: 0.564453] [A loss: 0.786363, acc: 0.335938]\n",
"1264: [D loss: 0.681334, acc: 0.552734] [A loss: 0.909820, acc: 0.164062]\n",
"1265: [D loss: 0.686623, acc: 0.552734] [A loss: 0.790505, acc: 0.296875]\n",
"1266: [D loss: 0.683608, acc: 0.552734] [A loss: 0.936189, acc: 0.156250]\n",
"1267: [D loss: 0.690430, acc: 0.541016] [A loss: 0.851401, acc: 0.242188]\n",
"1268: [D loss: 0.678769, acc: 0.562500] [A loss: 0.863813, acc: 0.199219]\n",
"1269: [D loss: 0.682932, acc: 0.535156] [A loss: 0.853670, acc: 0.226562]\n",
"1270: [D loss: 0.679610, acc: 0.578125] [A loss: 0.918420, acc: 0.156250]\n",
"1271: [D loss: 0.682880, acc: 0.558594] [A loss: 0.879251, acc: 0.183594]\n",
"1272: [D loss: 0.684918, acc: 0.554688] [A loss: 0.908580, acc: 0.101562]\n",
"1273: [D loss: 0.684265, acc: 0.556641] [A loss: 0.807005, acc: 0.296875]\n",
"1274: [D loss: 0.684934, acc: 0.533203] [A loss: 0.940584, acc: 0.066406]\n",
"1275: [D loss: 0.686466, acc: 0.548828] [A loss: 0.800080, acc: 0.281250]\n",
"1276: [D loss: 0.690705, acc: 0.537109] [A loss: 0.983962, acc: 0.085938]\n",
"1277: [D loss: 0.680277, acc: 0.558594] [A loss: 0.840231, acc: 0.226562]\n",
"1278: [D loss: 0.673130, acc: 0.574219] [A loss: 0.916922, acc: 0.144531]\n",
"1279: [D loss: 0.681319, acc: 0.558594] [A loss: 0.854663, acc: 0.234375]\n",
"1280: [D loss: 0.681746, acc: 0.554688] [A loss: 0.942172, acc: 0.128906]\n",
"1281: [D loss: 0.668503, acc: 0.613281] [A loss: 0.780106, acc: 0.363281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1282: [D loss: 0.714220, acc: 0.513672] [A loss: 0.942433, acc: 0.125000]\n",
"1283: [D loss: 0.683827, acc: 0.537109] [A loss: 0.822872, acc: 0.261719]\n",
"1284: [D loss: 0.683545, acc: 0.558594] [A loss: 0.953449, acc: 0.109375]\n",
"1285: [D loss: 0.688008, acc: 0.558594] [A loss: 0.834956, acc: 0.230469]\n",
"1286: [D loss: 0.688294, acc: 0.560547] [A loss: 0.950657, acc: 0.128906]\n",
"1287: [D loss: 0.671888, acc: 0.562500] [A loss: 0.864954, acc: 0.187500]\n",
"1288: [D loss: 0.686314, acc: 0.552734] [A loss: 0.897248, acc: 0.152344]\n",
"1289: [D loss: 0.691454, acc: 0.529297] [A loss: 0.890691, acc: 0.175781]\n",
"1290: [D loss: 0.682673, acc: 0.544922] [A loss: 0.859976, acc: 0.214844]\n",
"1291: [D loss: 0.667629, acc: 0.607422] [A loss: 0.937101, acc: 0.148438]\n",
"1292: [D loss: 0.679196, acc: 0.566406] [A loss: 0.823053, acc: 0.285156]\n",
"1293: [D loss: 0.705197, acc: 0.511719] [A loss: 1.090763, acc: 0.031250]\n",
"1294: [D loss: 0.680196, acc: 0.587891] [A loss: 0.688823, acc: 0.519531]\n",
"1295: [D loss: 0.701697, acc: 0.523438] [A loss: 1.054330, acc: 0.042969]\n",
"1296: [D loss: 0.663792, acc: 0.595703] [A loss: 0.741587, acc: 0.410156]\n",
"1297: [D loss: 0.695760, acc: 0.564453] [A loss: 0.929226, acc: 0.128906]\n",
"1298: [D loss: 0.680490, acc: 0.552734] [A loss: 0.781068, acc: 0.343750]\n",
"1299: [D loss: 0.682290, acc: 0.550781] [A loss: 0.973806, acc: 0.113281]\n",
"1300: [D loss: 0.684167, acc: 0.550781] [A loss: 0.816289, acc: 0.308594]\n",
"1301: [D loss: 0.685506, acc: 0.554688] [A loss: 0.915341, acc: 0.144531]\n",
"1302: [D loss: 0.686512, acc: 0.562500] [A loss: 0.817021, acc: 0.281250]\n",
"1303: [D loss: 0.684309, acc: 0.535156] [A loss: 0.871795, acc: 0.187500]\n",
"1304: [D loss: 0.687106, acc: 0.566406] [A loss: 0.849064, acc: 0.187500]\n",
"1305: [D loss: 0.675153, acc: 0.583984] [A loss: 0.968207, acc: 0.113281]\n",
"1306: [D loss: 0.672755, acc: 0.572266] [A loss: 0.806096, acc: 0.308594]\n",
"1307: [D loss: 0.703171, acc: 0.535156] [A loss: 0.976987, acc: 0.085938]\n",
"1308: [D loss: 0.682338, acc: 0.566406] [A loss: 0.773108, acc: 0.328125]\n",
"1309: [D loss: 0.685702, acc: 0.539062] [A loss: 0.967595, acc: 0.140625]\n",
"1310: [D loss: 0.699322, acc: 0.533203] [A loss: 0.839638, acc: 0.226562]\n",
"1311: [D loss: 0.669483, acc: 0.595703] [A loss: 0.903268, acc: 0.160156]\n",
"1312: [D loss: 0.675987, acc: 0.566406] [A loss: 0.856402, acc: 0.238281]\n",
"1313: [D loss: 0.671986, acc: 0.568359] [A loss: 0.955359, acc: 0.113281]\n",
"1314: [D loss: 0.665053, acc: 0.599609] [A loss: 0.841715, acc: 0.253906]\n",
"1315: [D loss: 0.681496, acc: 0.576172] [A loss: 0.985713, acc: 0.085938]\n",
"1316: [D loss: 0.675368, acc: 0.566406] [A loss: 0.782705, acc: 0.390625]\n",
"1317: [D loss: 0.699648, acc: 0.552734] [A loss: 1.000106, acc: 0.054688]\n",
"1318: [D loss: 0.685776, acc: 0.552734] [A loss: 0.716286, acc: 0.492188]\n",
"1319: [D loss: 0.707598, acc: 0.531250] [A loss: 1.069589, acc: 0.035156]\n",
"1320: [D loss: 0.680401, acc: 0.570312] [A loss: 0.738186, acc: 0.421875]\n",
"1321: [D loss: 0.694232, acc: 0.548828] [A loss: 0.998440, acc: 0.066406]\n",
"1322: [D loss: 0.692292, acc: 0.537109] [A loss: 0.721415, acc: 0.472656]\n",
"1323: [D loss: 0.703482, acc: 0.535156] [A loss: 1.015861, acc: 0.062500]\n",
"1324: [D loss: 0.670642, acc: 0.582031] [A loss: 0.732665, acc: 0.441406]\n",
"1325: [D loss: 0.686706, acc: 0.562500] [A loss: 0.955292, acc: 0.093750]\n",
"1326: [D loss: 0.681003, acc: 0.568359] [A loss: 0.753697, acc: 0.429688]\n",
"1327: [D loss: 0.685827, acc: 0.544922] [A loss: 0.990838, acc: 0.082031]\n",
"1328: [D loss: 0.677511, acc: 0.574219] [A loss: 0.732822, acc: 0.441406]\n",
"1329: [D loss: 0.700973, acc: 0.515625] [A loss: 0.975707, acc: 0.117188]\n",
"1330: [D loss: 0.692729, acc: 0.539062] [A loss: 0.864944, acc: 0.183594]\n",
"1331: [D loss: 0.681967, acc: 0.562500] [A loss: 0.894125, acc: 0.125000]\n",
"1332: [D loss: 0.681732, acc: 0.535156] [A loss: 0.861911, acc: 0.195312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1333: [D loss: 0.680428, acc: 0.558594] [A loss: 0.845362, acc: 0.226562]\n",
"1334: [D loss: 0.675302, acc: 0.585938] [A loss: 0.965085, acc: 0.093750]\n",
"1335: [D loss: 0.682842, acc: 0.558594] [A loss: 0.809298, acc: 0.285156]\n",
"1336: [D loss: 0.686448, acc: 0.566406] [A loss: 0.941859, acc: 0.121094]\n",
"1337: [D loss: 0.679875, acc: 0.552734] [A loss: 0.848006, acc: 0.230469]\n",
"1338: [D loss: 0.675642, acc: 0.607422] [A loss: 0.902603, acc: 0.167969]\n",
"1339: [D loss: 0.685062, acc: 0.554688] [A loss: 0.854235, acc: 0.246094]\n",
"1340: [D loss: 0.677056, acc: 0.562500] [A loss: 0.945139, acc: 0.113281]\n",
"1341: [D loss: 0.669702, acc: 0.582031] [A loss: 0.845903, acc: 0.257812]\n",
"1342: [D loss: 0.683804, acc: 0.554688] [A loss: 0.979949, acc: 0.121094]\n",
"1343: [D loss: 0.679218, acc: 0.564453] [A loss: 0.845982, acc: 0.226562]\n",
"1344: [D loss: 0.707293, acc: 0.533203] [A loss: 0.994034, acc: 0.070312]\n",
"1345: [D loss: 0.674554, acc: 0.576172] [A loss: 0.725286, acc: 0.425781]\n",
"1346: [D loss: 0.709619, acc: 0.531250] [A loss: 1.112225, acc: 0.023438]\n",
"1347: [D loss: 0.679164, acc: 0.574219] [A loss: 0.699444, acc: 0.527344]\n",
"1348: [D loss: 0.729211, acc: 0.529297] [A loss: 1.029326, acc: 0.042969]\n",
"1349: [D loss: 0.675216, acc: 0.566406] [A loss: 0.723811, acc: 0.476562]\n",
"1350: [D loss: 0.683378, acc: 0.542969] [A loss: 0.958398, acc: 0.136719]\n",
"1351: [D loss: 0.666086, acc: 0.570312] [A loss: 0.765468, acc: 0.367188]\n",
"1352: [D loss: 0.692454, acc: 0.548828] [A loss: 0.979925, acc: 0.113281]\n",
"1353: [D loss: 0.670704, acc: 0.593750] [A loss: 0.758502, acc: 0.406250]\n",
"1354: [D loss: 0.737267, acc: 0.515625] [A loss: 0.998958, acc: 0.066406]\n",
"1355: [D loss: 0.682439, acc: 0.548828] [A loss: 0.792472, acc: 0.312500]\n",
"1356: [D loss: 0.688032, acc: 0.544922] [A loss: 0.953297, acc: 0.109375]\n",
"1357: [D loss: 0.676330, acc: 0.576172] [A loss: 0.828900, acc: 0.230469]\n",
"1358: [D loss: 0.683268, acc: 0.550781] [A loss: 0.854021, acc: 0.191406]\n",
"1359: [D loss: 0.684534, acc: 0.550781] [A loss: 0.791074, acc: 0.296875]\n",
"1360: [D loss: 0.688967, acc: 0.558594] [A loss: 0.916817, acc: 0.148438]\n",
"1361: [D loss: 0.687019, acc: 0.550781] [A loss: 0.747160, acc: 0.402344]\n",
"1362: [D loss: 0.686508, acc: 0.539062] [A loss: 0.994920, acc: 0.085938]\n",
"1363: [D loss: 0.687459, acc: 0.560547] [A loss: 0.797987, acc: 0.300781]\n",
"1364: [D loss: 0.692127, acc: 0.527344] [A loss: 0.987105, acc: 0.109375]\n",
"1365: [D loss: 0.670243, acc: 0.578125] [A loss: 0.762259, acc: 0.343750]\n",
"1366: [D loss: 0.703790, acc: 0.533203] [A loss: 0.937486, acc: 0.074219]\n",
"1367: [D loss: 0.690291, acc: 0.550781] [A loss: 0.821997, acc: 0.269531]\n",
"1368: [D loss: 0.685418, acc: 0.533203] [A loss: 0.896966, acc: 0.152344]\n",
"1369: [D loss: 0.684076, acc: 0.574219] [A loss: 0.855798, acc: 0.234375]\n",
"1370: [D loss: 0.677924, acc: 0.570312] [A loss: 0.871040, acc: 0.191406]\n",
"1371: [D loss: 0.680582, acc: 0.562500] [A loss: 0.866735, acc: 0.210938]\n",
"1372: [D loss: 0.689437, acc: 0.535156] [A loss: 0.906064, acc: 0.199219]\n",
"1373: [D loss: 0.685401, acc: 0.552734] [A loss: 0.865638, acc: 0.222656]\n",
"1374: [D loss: 0.702693, acc: 0.523438] [A loss: 0.936421, acc: 0.121094]\n",
"1375: [D loss: 0.666238, acc: 0.572266] [A loss: 0.772883, acc: 0.355469]\n",
"1376: [D loss: 0.684360, acc: 0.558594] [A loss: 0.991803, acc: 0.082031]\n",
"1377: [D loss: 0.681071, acc: 0.560547] [A loss: 0.760361, acc: 0.398438]\n",
"1378: [D loss: 0.693084, acc: 0.550781] [A loss: 1.009928, acc: 0.109375]\n",
"1379: [D loss: 0.702080, acc: 0.537109] [A loss: 0.688759, acc: 0.542969]\n",
"1380: [D loss: 0.723869, acc: 0.525391] [A loss: 1.049233, acc: 0.054688]\n",
"1381: [D loss: 0.686914, acc: 0.554688] [A loss: 0.715478, acc: 0.476562]\n",
"1382: [D loss: 0.708771, acc: 0.533203] [A loss: 0.962812, acc: 0.097656]\n",
"1383: [D loss: 0.671473, acc: 0.580078] [A loss: 0.756403, acc: 0.351562]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1384: [D loss: 0.690368, acc: 0.556641] [A loss: 0.911640, acc: 0.144531]\n",
"1385: [D loss: 0.684061, acc: 0.548828] [A loss: 0.796839, acc: 0.320312]\n",
"1386: [D loss: 0.694243, acc: 0.525391] [A loss: 0.899639, acc: 0.105469]\n",
"1387: [D loss: 0.681645, acc: 0.544922] [A loss: 0.829227, acc: 0.242188]\n",
"1388: [D loss: 0.689745, acc: 0.535156] [A loss: 0.884362, acc: 0.175781]\n",
"1389: [D loss: 0.689313, acc: 0.527344] [A loss: 0.828010, acc: 0.242188]\n",
"1390: [D loss: 0.672492, acc: 0.585938] [A loss: 0.886430, acc: 0.167969]\n",
"1391: [D loss: 0.681296, acc: 0.558594] [A loss: 0.877771, acc: 0.203125]\n",
"1392: [D loss: 0.691584, acc: 0.529297] [A loss: 0.894842, acc: 0.183594]\n",
"1393: [D loss: 0.680700, acc: 0.578125] [A loss: 0.857185, acc: 0.207031]\n",
"1394: [D loss: 0.681466, acc: 0.558594] [A loss: 0.852989, acc: 0.214844]\n",
"1395: [D loss: 0.678924, acc: 0.554688] [A loss: 0.938470, acc: 0.140625]\n",
"1396: [D loss: 0.682377, acc: 0.554688] [A loss: 0.820635, acc: 0.285156]\n",
"1397: [D loss: 0.700509, acc: 0.546875] [A loss: 1.046132, acc: 0.046875]\n",
"1398: [D loss: 0.688401, acc: 0.515625] [A loss: 0.770365, acc: 0.335938]\n",
"1399: [D loss: 0.701816, acc: 0.535156] [A loss: 1.032041, acc: 0.042969]\n",
"1400: [D loss: 0.683706, acc: 0.558594] [A loss: 0.706843, acc: 0.523438]\n",
"1401: [D loss: 0.709754, acc: 0.511719] [A loss: 1.014928, acc: 0.054688]\n",
"1402: [D loss: 0.677711, acc: 0.580078] [A loss: 0.703192, acc: 0.484375]\n",
"1403: [D loss: 0.711204, acc: 0.525391] [A loss: 1.018620, acc: 0.050781]\n",
"1404: [D loss: 0.680664, acc: 0.542969] [A loss: 0.693277, acc: 0.507812]\n",
"1405: [D loss: 0.701721, acc: 0.541016] [A loss: 0.924081, acc: 0.109375]\n",
"1406: [D loss: 0.680881, acc: 0.566406] [A loss: 0.822865, acc: 0.277344]\n",
"1407: [D loss: 0.684263, acc: 0.560547] [A loss: 0.868461, acc: 0.253906]\n",
"1408: [D loss: 0.687580, acc: 0.570312] [A loss: 0.805217, acc: 0.273438]\n",
"1409: [D loss: 0.691661, acc: 0.541016] [A loss: 0.897346, acc: 0.195312]\n",
"1410: [D loss: 0.689138, acc: 0.525391] [A loss: 0.849652, acc: 0.230469]\n",
"1411: [D loss: 0.680606, acc: 0.556641] [A loss: 0.865184, acc: 0.230469]\n",
"1412: [D loss: 0.689758, acc: 0.519531] [A loss: 0.887471, acc: 0.171875]\n",
"1413: [D loss: 0.691512, acc: 0.539062] [A loss: 0.784730, acc: 0.308594]\n",
"1414: [D loss: 0.678902, acc: 0.583984] [A loss: 0.942747, acc: 0.097656]\n",
"1415: [D loss: 0.678407, acc: 0.574219] [A loss: 0.789424, acc: 0.320312]\n",
"1416: [D loss: 0.690016, acc: 0.542969] [A loss: 0.934054, acc: 0.125000]\n",
"1417: [D loss: 0.680465, acc: 0.562500] [A loss: 0.779014, acc: 0.332031]\n",
"1418: [D loss: 0.695683, acc: 0.527344] [A loss: 1.027205, acc: 0.050781]\n",
"1419: [D loss: 0.681087, acc: 0.542969] [A loss: 0.771824, acc: 0.320312]\n",
"1420: [D loss: 0.690640, acc: 0.533203] [A loss: 0.933602, acc: 0.140625]\n",
"1421: [D loss: 0.681739, acc: 0.552734] [A loss: 0.795928, acc: 0.281250]\n",
"1422: [D loss: 0.689998, acc: 0.570312] [A loss: 0.900994, acc: 0.152344]\n",
"1423: [D loss: 0.672987, acc: 0.570312] [A loss: 0.853505, acc: 0.234375]\n",
"1424: [D loss: 0.699051, acc: 0.515625] [A loss: 0.863188, acc: 0.218750]\n",
"1425: [D loss: 0.691634, acc: 0.535156] [A loss: 0.875309, acc: 0.179688]\n",
"1426: [D loss: 0.675875, acc: 0.574219] [A loss: 0.856870, acc: 0.218750]\n",
"1427: [D loss: 0.695253, acc: 0.560547] [A loss: 0.876323, acc: 0.156250]\n",
"1428: [D loss: 0.667550, acc: 0.599609] [A loss: 0.867643, acc: 0.230469]\n",
"1429: [D loss: 0.680031, acc: 0.578125] [A loss: 0.860936, acc: 0.218750]\n",
"1430: [D loss: 0.672268, acc: 0.607422] [A loss: 0.894192, acc: 0.156250]\n",
"1431: [D loss: 0.665964, acc: 0.621094] [A loss: 0.951120, acc: 0.125000]\n",
"1432: [D loss: 0.694968, acc: 0.535156] [A loss: 0.801813, acc: 0.289062]\n",
"1433: [D loss: 0.691097, acc: 0.544922] [A loss: 0.965982, acc: 0.109375]\n",
"1434: [D loss: 0.686982, acc: 0.568359] [A loss: 0.735734, acc: 0.460938]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1435: [D loss: 0.705790, acc: 0.537109] [A loss: 1.043381, acc: 0.070312]\n",
"1436: [D loss: 0.697942, acc: 0.539062] [A loss: 0.670314, acc: 0.558594]\n",
"1437: [D loss: 0.711590, acc: 0.546875] [A loss: 1.010039, acc: 0.054688]\n",
"1438: [D loss: 0.688803, acc: 0.550781] [A loss: 0.732676, acc: 0.480469]\n",
"1439: [D loss: 0.687649, acc: 0.558594] [A loss: 0.915057, acc: 0.144531]\n",
"1440: [D loss: 0.675647, acc: 0.578125] [A loss: 0.760677, acc: 0.355469]\n",
"1441: [D loss: 0.695549, acc: 0.531250] [A loss: 0.932372, acc: 0.136719]\n",
"1442: [D loss: 0.685279, acc: 0.537109] [A loss: 0.785809, acc: 0.289062]\n",
"1443: [D loss: 0.714338, acc: 0.537109] [A loss: 0.938313, acc: 0.113281]\n",
"1444: [D loss: 0.689828, acc: 0.544922] [A loss: 0.809042, acc: 0.281250]\n",
"1445: [D loss: 0.687189, acc: 0.580078] [A loss: 0.941099, acc: 0.105469]\n",
"1446: [D loss: 0.694390, acc: 0.541016] [A loss: 0.768837, acc: 0.339844]\n",
"1447: [D loss: 0.687559, acc: 0.542969] [A loss: 0.871814, acc: 0.171875]\n",
"1448: [D loss: 0.687727, acc: 0.531250] [A loss: 0.793578, acc: 0.300781]\n",
"1449: [D loss: 0.688495, acc: 0.546875] [A loss: 0.954632, acc: 0.093750]\n",
"1450: [D loss: 0.683011, acc: 0.550781] [A loss: 0.764640, acc: 0.355469]\n",
"1451: [D loss: 0.690532, acc: 0.558594] [A loss: 0.925405, acc: 0.125000]\n",
"1452: [D loss: 0.679970, acc: 0.560547] [A loss: 0.801227, acc: 0.304688]\n",
"1453: [D loss: 0.691339, acc: 0.525391] [A loss: 0.896740, acc: 0.164062]\n",
"1454: [D loss: 0.684444, acc: 0.541016] [A loss: 0.818332, acc: 0.222656]\n",
"1455: [D loss: 0.675421, acc: 0.580078] [A loss: 0.933947, acc: 0.148438]\n",
"1456: [D loss: 0.688858, acc: 0.533203] [A loss: 0.828777, acc: 0.265625]\n",
"1457: [D loss: 0.690317, acc: 0.539062] [A loss: 0.896162, acc: 0.164062]\n",
"1458: [D loss: 0.672464, acc: 0.572266] [A loss: 0.832716, acc: 0.238281]\n",
"1459: [D loss: 0.687963, acc: 0.537109] [A loss: 0.904266, acc: 0.164062]\n",
"1460: [D loss: 0.706074, acc: 0.517578] [A loss: 0.900147, acc: 0.156250]\n",
"1461: [D loss: 0.685900, acc: 0.570312] [A loss: 0.847050, acc: 0.226562]\n",
"1462: [D loss: 0.693250, acc: 0.535156] [A loss: 0.894220, acc: 0.152344]\n",
"1463: [D loss: 0.677041, acc: 0.566406] [A loss: 0.825356, acc: 0.250000]\n",
"1464: [D loss: 0.701685, acc: 0.529297] [A loss: 0.929042, acc: 0.125000]\n",
"1465: [D loss: 0.688109, acc: 0.544922] [A loss: 0.806350, acc: 0.285156]\n",
"1466: [D loss: 0.684903, acc: 0.546875] [A loss: 1.037941, acc: 0.089844]\n",
"1467: [D loss: 0.690106, acc: 0.525391] [A loss: 0.661779, acc: 0.613281]\n",
"1468: [D loss: 0.731755, acc: 0.513672] [A loss: 1.049011, acc: 0.058594]\n",
"1469: [D loss: 0.697328, acc: 0.531250] [A loss: 0.678619, acc: 0.546875]\n",
"1470: [D loss: 0.705559, acc: 0.552734] [A loss: 0.967964, acc: 0.093750]\n",
"1471: [D loss: 0.688338, acc: 0.558594] [A loss: 0.754815, acc: 0.386719]\n",
"1472: [D loss: 0.687606, acc: 0.541016] [A loss: 0.906167, acc: 0.156250]\n",
"1473: [D loss: 0.696459, acc: 0.535156] [A loss: 0.826095, acc: 0.257812]\n",
"1474: [D loss: 0.685465, acc: 0.564453] [A loss: 0.842245, acc: 0.203125]\n",
"1475: [D loss: 0.691395, acc: 0.542969] [A loss: 0.851689, acc: 0.226562]\n",
"1476: [D loss: 0.688377, acc: 0.550781] [A loss: 0.841665, acc: 0.230469]\n",
"1477: [D loss: 0.677719, acc: 0.582031] [A loss: 0.860724, acc: 0.214844]\n",
"1478: [D loss: 0.682299, acc: 0.568359] [A loss: 0.875799, acc: 0.152344]\n",
"1479: [D loss: 0.693547, acc: 0.525391] [A loss: 0.828799, acc: 0.261719]\n",
"1480: [D loss: 0.703648, acc: 0.511719] [A loss: 0.912331, acc: 0.093750]\n",
"1481: [D loss: 0.677328, acc: 0.578125] [A loss: 0.756095, acc: 0.382812]\n",
"1482: [D loss: 0.704890, acc: 0.533203] [A loss: 1.000448, acc: 0.058594]\n",
"1483: [D loss: 0.678818, acc: 0.578125] [A loss: 0.738554, acc: 0.414062]\n",
"1484: [D loss: 0.707371, acc: 0.539062] [A loss: 0.977276, acc: 0.082031]\n",
"1485: [D loss: 0.680261, acc: 0.558594] [A loss: 0.736122, acc: 0.417969]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1486: [D loss: 0.699059, acc: 0.548828] [A loss: 0.927256, acc: 0.121094]\n",
"1487: [D loss: 0.681676, acc: 0.566406] [A loss: 0.754882, acc: 0.359375]\n",
"1488: [D loss: 0.687574, acc: 0.572266] [A loss: 0.974137, acc: 0.109375]\n",
"1489: [D loss: 0.678019, acc: 0.558594] [A loss: 0.741126, acc: 0.394531]\n",
"1490: [D loss: 0.703975, acc: 0.517578] [A loss: 0.992583, acc: 0.066406]\n",
"1491: [D loss: 0.685848, acc: 0.548828] [A loss: 0.797362, acc: 0.316406]\n",
"1492: [D loss: 0.687167, acc: 0.566406] [A loss: 0.899618, acc: 0.128906]\n",
"1493: [D loss: 0.677633, acc: 0.564453] [A loss: 0.793071, acc: 0.273438]\n",
"1494: [D loss: 0.692556, acc: 0.558594] [A loss: 0.945507, acc: 0.089844]\n",
"1495: [D loss: 0.684597, acc: 0.574219] [A loss: 0.824213, acc: 0.277344]\n",
"1496: [D loss: 0.693714, acc: 0.515625] [A loss: 0.812864, acc: 0.296875]\n",
"1497: [D loss: 0.676887, acc: 0.566406] [A loss: 0.862143, acc: 0.210938]\n",
"1498: [D loss: 0.695843, acc: 0.517578] [A loss: 0.898831, acc: 0.152344]\n",
"1499: [D loss: 0.693181, acc: 0.544922] [A loss: 0.796052, acc: 0.296875]\n",
"1500: [D loss: 0.681300, acc: 0.556641] [A loss: 0.906457, acc: 0.183594]\n",
"1501: [D loss: 0.677593, acc: 0.587891] [A loss: 0.838713, acc: 0.250000]\n",
"1502: [D loss: 0.690063, acc: 0.527344] [A loss: 0.844491, acc: 0.226562]\n",
"1503: [D loss: 0.679626, acc: 0.570312] [A loss: 0.864772, acc: 0.191406]\n",
"1504: [D loss: 0.676058, acc: 0.574219] [A loss: 0.862211, acc: 0.207031]\n",
"1505: [D loss: 0.702673, acc: 0.550781] [A loss: 0.834066, acc: 0.250000]\n",
"1506: [D loss: 0.692751, acc: 0.531250] [A loss: 0.831944, acc: 0.230469]\n",
"1507: [D loss: 0.696879, acc: 0.548828] [A loss: 0.912311, acc: 0.132812]\n",
"1508: [D loss: 0.685166, acc: 0.552734] [A loss: 0.796599, acc: 0.289062]\n",
"1509: [D loss: 0.694121, acc: 0.521484] [A loss: 0.951495, acc: 0.101562]\n",
"1510: [D loss: 0.680583, acc: 0.589844] [A loss: 0.760761, acc: 0.398438]\n",
"1511: [D loss: 0.685259, acc: 0.537109] [A loss: 0.985680, acc: 0.105469]\n",
"1512: [D loss: 0.685958, acc: 0.572266] [A loss: 0.750081, acc: 0.390625]\n",
"1513: [D loss: 0.704576, acc: 0.533203] [A loss: 1.003852, acc: 0.058594]\n",
"1514: [D loss: 0.682105, acc: 0.556641] [A loss: 0.680046, acc: 0.550781]\n",
"1515: [D loss: 0.709990, acc: 0.511719] [A loss: 1.084857, acc: 0.042969]\n",
"1516: [D loss: 0.683513, acc: 0.564453] [A loss: 0.707737, acc: 0.480469]\n",
"1517: [D loss: 0.700943, acc: 0.537109] [A loss: 0.959002, acc: 0.125000]\n",
"1518: [D loss: 0.699924, acc: 0.533203] [A loss: 0.771550, acc: 0.371094]\n",
"1519: [D loss: 0.703995, acc: 0.521484] [A loss: 0.903787, acc: 0.105469]\n",
"1520: [D loss: 0.678125, acc: 0.564453] [A loss: 0.779300, acc: 0.328125]\n",
"1521: [D loss: 0.690797, acc: 0.548828] [A loss: 0.889953, acc: 0.187500]\n",
"1522: [D loss: 0.691736, acc: 0.552734] [A loss: 0.768044, acc: 0.363281]\n",
"1523: [D loss: 0.703722, acc: 0.511719] [A loss: 0.880499, acc: 0.144531]\n",
"1524: [D loss: 0.684119, acc: 0.574219] [A loss: 0.842865, acc: 0.246094]\n",
"1525: [D loss: 0.685846, acc: 0.560547] [A loss: 0.858787, acc: 0.175781]\n",
"1526: [D loss: 0.685242, acc: 0.585938] [A loss: 0.847382, acc: 0.269531]\n",
"1527: [D loss: 0.676400, acc: 0.587891] [A loss: 0.888415, acc: 0.164062]\n",
"1528: [D loss: 0.681918, acc: 0.562500] [A loss: 0.809154, acc: 0.281250]\n",
"1529: [D loss: 0.690848, acc: 0.535156] [A loss: 0.888975, acc: 0.144531]\n",
"1530: [D loss: 0.691706, acc: 0.527344] [A loss: 0.827256, acc: 0.230469]\n",
"1531: [D loss: 0.698150, acc: 0.515625] [A loss: 0.872242, acc: 0.187500]\n",
"1532: [D loss: 0.684797, acc: 0.527344] [A loss: 0.851099, acc: 0.242188]\n",
"1533: [D loss: 0.679375, acc: 0.558594] [A loss: 0.836439, acc: 0.273438]\n",
"1534: [D loss: 0.693956, acc: 0.544922] [A loss: 0.942061, acc: 0.082031]\n",
"1535: [D loss: 0.684434, acc: 0.556641] [A loss: 0.796686, acc: 0.292969]\n",
"1536: [D loss: 0.687622, acc: 0.562500] [A loss: 0.933267, acc: 0.109375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1537: [D loss: 0.685996, acc: 0.550781] [A loss: 0.815844, acc: 0.292969]\n",
"1538: [D loss: 0.684187, acc: 0.556641] [A loss: 0.872226, acc: 0.195312]\n",
"1539: [D loss: 0.674181, acc: 0.587891] [A loss: 0.844418, acc: 0.195312]\n",
"1540: [D loss: 0.695027, acc: 0.554688] [A loss: 0.830900, acc: 0.261719]\n",
"1541: [D loss: 0.694259, acc: 0.550781] [A loss: 0.934874, acc: 0.121094]\n",
"1542: [D loss: 0.682255, acc: 0.572266] [A loss: 0.829398, acc: 0.265625]\n",
"1543: [D loss: 0.693046, acc: 0.533203] [A loss: 0.869728, acc: 0.207031]\n",
"1544: [D loss: 0.692125, acc: 0.550781] [A loss: 0.848983, acc: 0.234375]\n",
"1545: [D loss: 0.685024, acc: 0.572266] [A loss: 0.917399, acc: 0.148438]\n",
"1546: [D loss: 0.685763, acc: 0.525391] [A loss: 0.901537, acc: 0.164062]\n",
"1547: [D loss: 0.675876, acc: 0.578125] [A loss: 0.939976, acc: 0.175781]\n",
"1548: [D loss: 0.697375, acc: 0.527344] [A loss: 0.967823, acc: 0.121094]\n",
"1549: [D loss: 0.672337, acc: 0.572266] [A loss: 0.779519, acc: 0.335938]\n",
"1550: [D loss: 0.701446, acc: 0.515625] [A loss: 1.038918, acc: 0.054688]\n",
"1551: [D loss: 0.696568, acc: 0.515625] [A loss: 0.705341, acc: 0.503906]\n",
"1552: [D loss: 0.719964, acc: 0.521484] [A loss: 1.051846, acc: 0.050781]\n",
"1553: [D loss: 0.676277, acc: 0.554688] [A loss: 0.667793, acc: 0.589844]\n",
"1554: [D loss: 0.715114, acc: 0.558594] [A loss: 1.036093, acc: 0.046875]\n",
"1555: [D loss: 0.684191, acc: 0.554688] [A loss: 0.704026, acc: 0.460938]\n",
"1556: [D loss: 0.715416, acc: 0.486328] [A loss: 0.942562, acc: 0.101562]\n",
"1557: [D loss: 0.684625, acc: 0.550781] [A loss: 0.738554, acc: 0.402344]\n",
"1558: [D loss: 0.703562, acc: 0.523438] [A loss: 0.857686, acc: 0.187500]\n",
"1559: [D loss: 0.689311, acc: 0.560547] [A loss: 0.756836, acc: 0.375000]\n",
"1560: [D loss: 0.672646, acc: 0.597656] [A loss: 0.857910, acc: 0.167969]\n",
"1561: [D loss: 0.678710, acc: 0.580078] [A loss: 0.786787, acc: 0.328125]\n",
"1562: [D loss: 0.704016, acc: 0.531250] [A loss: 0.901755, acc: 0.128906]\n",
"1563: [D loss: 0.696550, acc: 0.519531] [A loss: 0.798480, acc: 0.308594]\n",
"1564: [D loss: 0.683838, acc: 0.568359] [A loss: 0.878059, acc: 0.183594]\n",
"1565: [D loss: 0.694705, acc: 0.537109] [A loss: 0.821387, acc: 0.277344]\n",
"1566: [D loss: 0.669485, acc: 0.582031] [A loss: 0.836516, acc: 0.246094]\n",
"1567: [D loss: 0.673197, acc: 0.578125] [A loss: 0.857568, acc: 0.246094]\n",
"1568: [D loss: 0.674785, acc: 0.580078] [A loss: 0.856733, acc: 0.175781]\n",
"1569: [D loss: 0.683950, acc: 0.554688] [A loss: 0.869377, acc: 0.195312]\n",
"1570: [D loss: 0.683514, acc: 0.564453] [A loss: 0.845819, acc: 0.199219]\n",
"1571: [D loss: 0.684462, acc: 0.564453] [A loss: 0.901200, acc: 0.160156]\n",
"1572: [D loss: 0.694002, acc: 0.539062] [A loss: 0.859823, acc: 0.171875]\n",
"1573: [D loss: 0.702782, acc: 0.486328] [A loss: 0.891087, acc: 0.148438]\n",
"1574: [D loss: 0.689897, acc: 0.554688] [A loss: 0.839352, acc: 0.242188]\n",
"1575: [D loss: 0.698506, acc: 0.529297] [A loss: 0.928612, acc: 0.105469]\n",
"1576: [D loss: 0.681834, acc: 0.591797] [A loss: 0.812636, acc: 0.289062]\n",
"1577: [D loss: 0.697607, acc: 0.531250] [A loss: 0.912916, acc: 0.132812]\n",
"1578: [D loss: 0.684140, acc: 0.552734] [A loss: 0.829298, acc: 0.242188]\n",
"1579: [D loss: 0.692500, acc: 0.570312] [A loss: 0.940705, acc: 0.105469]\n",
"1580: [D loss: 0.703599, acc: 0.537109] [A loss: 0.812227, acc: 0.273438]\n",
"1581: [D loss: 0.699099, acc: 0.572266] [A loss: 0.962655, acc: 0.070312]\n",
"1582: [D loss: 0.678149, acc: 0.542969] [A loss: 0.779846, acc: 0.335938]\n",
"1583: [D loss: 0.690250, acc: 0.554688] [A loss: 0.889659, acc: 0.160156]\n",
"1584: [D loss: 0.696281, acc: 0.546875] [A loss: 0.895047, acc: 0.148438]\n",
"1585: [D loss: 0.678939, acc: 0.566406] [A loss: 0.856616, acc: 0.199219]\n",
"1586: [D loss: 0.698059, acc: 0.525391] [A loss: 0.825626, acc: 0.257812]\n",
"1587: [D loss: 0.688630, acc: 0.554688] [A loss: 0.959206, acc: 0.097656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1588: [D loss: 0.671978, acc: 0.570312] [A loss: 0.778734, acc: 0.355469]\n",
"1589: [D loss: 0.685926, acc: 0.556641] [A loss: 0.982897, acc: 0.109375]\n",
"1590: [D loss: 0.683190, acc: 0.566406] [A loss: 0.712341, acc: 0.500000]\n",
"1591: [D loss: 0.704472, acc: 0.535156] [A loss: 1.056172, acc: 0.042969]\n",
"1592: [D loss: 0.685883, acc: 0.560547] [A loss: 0.667386, acc: 0.617188]\n",
"1593: [D loss: 0.725929, acc: 0.531250] [A loss: 1.015168, acc: 0.078125]\n",
"1594: [D loss: 0.682435, acc: 0.566406] [A loss: 0.693139, acc: 0.515625]\n",
"1595: [D loss: 0.710345, acc: 0.552734] [A loss: 0.895474, acc: 0.113281]\n",
"1596: [D loss: 0.675090, acc: 0.609375] [A loss: 0.747956, acc: 0.421875]\n",
"1597: [D loss: 0.700533, acc: 0.517578] [A loss: 0.907088, acc: 0.117188]\n",
"1598: [D loss: 0.687752, acc: 0.570312] [A loss: 0.766297, acc: 0.378906]\n",
"1599: [D loss: 0.702611, acc: 0.541016] [A loss: 0.876758, acc: 0.187500]\n",
"1600: [D loss: 0.687367, acc: 0.548828] [A loss: 0.769726, acc: 0.339844]\n",
"1601: [D loss: 0.700952, acc: 0.542969] [A loss: 0.882267, acc: 0.156250]\n",
"1602: [D loss: 0.693241, acc: 0.529297] [A loss: 0.836197, acc: 0.242188]\n",
"1603: [D loss: 0.701373, acc: 0.515625] [A loss: 0.868425, acc: 0.191406]\n",
"1604: [D loss: 0.695072, acc: 0.539062] [A loss: 0.845491, acc: 0.207031]\n",
"1605: [D loss: 0.685498, acc: 0.542969] [A loss: 0.853856, acc: 0.226562]\n",
"1606: [D loss: 0.682201, acc: 0.574219] [A loss: 0.929210, acc: 0.109375]\n",
"1607: [D loss: 0.685761, acc: 0.544922] [A loss: 0.784844, acc: 0.324219]\n",
"1608: [D loss: 0.703579, acc: 0.558594] [A loss: 0.926800, acc: 0.125000]\n",
"1609: [D loss: 0.683812, acc: 0.535156] [A loss: 0.780920, acc: 0.363281]\n",
"1610: [D loss: 0.687716, acc: 0.562500] [A loss: 0.891812, acc: 0.160156]\n",
"1611: [D loss: 0.677544, acc: 0.585938] [A loss: 0.810284, acc: 0.292969]\n",
"1612: [D loss: 0.680756, acc: 0.574219] [A loss: 0.854092, acc: 0.199219]\n",
"1613: [D loss: 0.685635, acc: 0.548828] [A loss: 0.874699, acc: 0.179688]\n",
"1614: [D loss: 0.675571, acc: 0.599609] [A loss: 0.805748, acc: 0.316406]\n",
"1615: [D loss: 0.682152, acc: 0.566406] [A loss: 0.908612, acc: 0.132812]\n",
"1616: [D loss: 0.688344, acc: 0.541016] [A loss: 0.863639, acc: 0.234375]\n",
"1617: [D loss: 0.682838, acc: 0.578125] [A loss: 0.829542, acc: 0.242188]\n",
"1618: [D loss: 0.697134, acc: 0.533203] [A loss: 0.916436, acc: 0.144531]\n",
"1619: [D loss: 0.678367, acc: 0.576172] [A loss: 0.749976, acc: 0.371094]\n",
"1620: [D loss: 0.698725, acc: 0.539062] [A loss: 1.059403, acc: 0.066406]\n",
"1621: [D loss: 0.675748, acc: 0.587891] [A loss: 0.649876, acc: 0.640625]\n",
"1622: [D loss: 0.709885, acc: 0.509766] [A loss: 1.045175, acc: 0.027344]\n",
"1623: [D loss: 0.708770, acc: 0.515625] [A loss: 0.691153, acc: 0.507812]\n",
"1624: [D loss: 0.714253, acc: 0.525391] [A loss: 0.927726, acc: 0.152344]\n",
"1625: [D loss: 0.693400, acc: 0.544922] [A loss: 0.778914, acc: 0.308594]\n",
"1626: [D loss: 0.690315, acc: 0.531250] [A loss: 0.829366, acc: 0.234375]\n",
"1627: [D loss: 0.682147, acc: 0.576172] [A loss: 0.843967, acc: 0.218750]\n",
"1628: [D loss: 0.673201, acc: 0.572266] [A loss: 0.876155, acc: 0.144531]\n",
"1629: [D loss: 0.685888, acc: 0.542969] [A loss: 0.827670, acc: 0.246094]\n",
"1630: [D loss: 0.706030, acc: 0.546875] [A loss: 0.808063, acc: 0.242188]\n",
"1631: [D loss: 0.691514, acc: 0.572266] [A loss: 0.871776, acc: 0.210938]\n",
"1632: [D loss: 0.690962, acc: 0.529297] [A loss: 0.851846, acc: 0.203125]\n",
"1633: [D loss: 0.694492, acc: 0.531250] [A loss: 0.814879, acc: 0.273438]\n",
"1634: [D loss: 0.686337, acc: 0.550781] [A loss: 0.828562, acc: 0.222656]\n",
"1635: [D loss: 0.687946, acc: 0.554688] [A loss: 0.834642, acc: 0.234375]\n",
"1636: [D loss: 0.687636, acc: 0.568359] [A loss: 0.848636, acc: 0.207031]\n",
"1637: [D loss: 0.685749, acc: 0.558594] [A loss: 0.836365, acc: 0.214844]\n",
"1638: [D loss: 0.693885, acc: 0.523438] [A loss: 0.866811, acc: 0.195312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1639: [D loss: 0.690664, acc: 0.535156] [A loss: 0.881214, acc: 0.195312]\n",
"1640: [D loss: 0.679772, acc: 0.564453] [A loss: 0.829868, acc: 0.234375]\n",
"1641: [D loss: 0.691146, acc: 0.542969] [A loss: 0.913030, acc: 0.140625]\n",
"1642: [D loss: 0.689582, acc: 0.531250] [A loss: 0.785830, acc: 0.324219]\n",
"1643: [D loss: 0.697476, acc: 0.537109] [A loss: 0.977357, acc: 0.089844]\n",
"1644: [D loss: 0.689909, acc: 0.558594] [A loss: 0.741197, acc: 0.394531]\n",
"1645: [D loss: 0.694078, acc: 0.556641] [A loss: 0.878213, acc: 0.132812]\n",
"1646: [D loss: 0.689237, acc: 0.554688] [A loss: 0.811135, acc: 0.265625]\n",
"1647: [D loss: 0.693459, acc: 0.531250] [A loss: 0.908154, acc: 0.156250]\n",
"1648: [D loss: 0.688668, acc: 0.539062] [A loss: 0.780841, acc: 0.332031]\n",
"1649: [D loss: 0.699293, acc: 0.535156] [A loss: 0.972554, acc: 0.085938]\n",
"1650: [D loss: 0.678554, acc: 0.578125] [A loss: 0.776413, acc: 0.367188]\n",
"1651: [D loss: 0.689598, acc: 0.552734] [A loss: 0.922656, acc: 0.152344]\n",
"1652: [D loss: 0.684279, acc: 0.556641] [A loss: 0.726265, acc: 0.457031]\n",
"1653: [D loss: 0.716354, acc: 0.525391] [A loss: 0.945826, acc: 0.089844]\n",
"1654: [D loss: 0.692309, acc: 0.552734] [A loss: 0.798385, acc: 0.261719]\n",
"1655: [D loss: 0.685902, acc: 0.568359] [A loss: 0.859851, acc: 0.246094]\n",
"1656: [D loss: 0.689849, acc: 0.564453] [A loss: 0.848958, acc: 0.199219]\n",
"1657: [D loss: 0.687366, acc: 0.550781] [A loss: 0.857255, acc: 0.234375]\n",
"1658: [D loss: 0.694189, acc: 0.554688] [A loss: 0.862146, acc: 0.160156]\n",
"1659: [D loss: 0.681444, acc: 0.574219] [A loss: 0.873290, acc: 0.191406]\n",
"1660: [D loss: 0.668834, acc: 0.595703] [A loss: 0.800944, acc: 0.296875]\n",
"1661: [D loss: 0.701856, acc: 0.503906] [A loss: 0.966140, acc: 0.070312]\n",
"1662: [D loss: 0.688315, acc: 0.541016] [A loss: 0.760168, acc: 0.355469]\n",
"1663: [D loss: 0.717431, acc: 0.519531] [A loss: 1.001673, acc: 0.082031]\n",
"1664: [D loss: 0.680666, acc: 0.589844] [A loss: 0.740214, acc: 0.437500]\n",
"1665: [D loss: 0.713292, acc: 0.541016] [A loss: 1.079549, acc: 0.054688]\n",
"1666: [D loss: 0.679270, acc: 0.554688] [A loss: 0.663980, acc: 0.621094]\n",
"1667: [D loss: 0.715195, acc: 0.531250] [A loss: 1.015745, acc: 0.062500]\n",
"1668: [D loss: 0.673748, acc: 0.580078] [A loss: 0.714442, acc: 0.515625]\n",
"1669: [D loss: 0.715488, acc: 0.535156] [A loss: 0.940037, acc: 0.093750]\n",
"1670: [D loss: 0.670512, acc: 0.583984] [A loss: 0.749767, acc: 0.375000]\n",
"1671: [D loss: 0.693793, acc: 0.544922] [A loss: 0.933364, acc: 0.105469]\n",
"1672: [D loss: 0.684758, acc: 0.568359] [A loss: 0.729147, acc: 0.449219]\n",
"1673: [D loss: 0.694390, acc: 0.527344] [A loss: 0.913265, acc: 0.113281]\n",
"1674: [D loss: 0.686450, acc: 0.539062] [A loss: 0.762610, acc: 0.367188]\n",
"1675: [D loss: 0.690389, acc: 0.554688] [A loss: 0.914521, acc: 0.152344]\n",
"1676: [D loss: 0.688449, acc: 0.525391] [A loss: 0.825696, acc: 0.265625]\n",
"1677: [D loss: 0.681495, acc: 0.570312] [A loss: 0.818901, acc: 0.285156]\n",
"1678: [D loss: 0.699588, acc: 0.519531] [A loss: 0.912627, acc: 0.109375]\n",
"1679: [D loss: 0.674945, acc: 0.601562] [A loss: 0.780571, acc: 0.367188]\n",
"1680: [D loss: 0.694939, acc: 0.541016] [A loss: 0.846815, acc: 0.250000]\n",
"1681: [D loss: 0.684662, acc: 0.539062] [A loss: 0.814832, acc: 0.281250]\n",
"1682: [D loss: 0.702670, acc: 0.519531] [A loss: 0.814715, acc: 0.277344]\n",
"1683: [D loss: 0.695666, acc: 0.519531] [A loss: 0.857180, acc: 0.218750]\n",
"1684: [D loss: 0.683561, acc: 0.550781] [A loss: 0.860845, acc: 0.207031]\n",
"1685: [D loss: 0.693187, acc: 0.531250] [A loss: 0.901234, acc: 0.175781]\n",
"1686: [D loss: 0.688731, acc: 0.552734] [A loss: 0.856548, acc: 0.203125]\n",
"1687: [D loss: 0.691262, acc: 0.562500] [A loss: 0.891226, acc: 0.164062]\n",
"1688: [D loss: 0.678239, acc: 0.564453] [A loss: 0.833060, acc: 0.234375]\n",
"1689: [D loss: 0.680906, acc: 0.568359] [A loss: 0.859304, acc: 0.199219]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1690: [D loss: 0.698128, acc: 0.554688] [A loss: 0.922922, acc: 0.128906]\n",
"1691: [D loss: 0.704870, acc: 0.523438] [A loss: 0.779115, acc: 0.343750]\n",
"1692: [D loss: 0.692400, acc: 0.533203] [A loss: 0.922426, acc: 0.093750]\n",
"1693: [D loss: 0.692053, acc: 0.541016] [A loss: 0.758052, acc: 0.402344]\n",
"1694: [D loss: 0.703495, acc: 0.498047] [A loss: 0.978306, acc: 0.121094]\n",
"1695: [D loss: 0.697474, acc: 0.490234] [A loss: 0.744141, acc: 0.394531]\n",
"1696: [D loss: 0.688579, acc: 0.550781] [A loss: 0.949242, acc: 0.132812]\n",
"1697: [D loss: 0.686671, acc: 0.550781] [A loss: 0.760641, acc: 0.371094]\n",
"1698: [D loss: 0.686111, acc: 0.552734] [A loss: 0.937993, acc: 0.117188]\n",
"1699: [D loss: 0.677696, acc: 0.574219] [A loss: 0.733924, acc: 0.425781]\n",
"1700: [D loss: 0.696073, acc: 0.527344] [A loss: 0.927195, acc: 0.113281]\n",
"1701: [D loss: 0.671932, acc: 0.595703] [A loss: 0.810212, acc: 0.265625]\n",
"1702: [D loss: 0.689931, acc: 0.537109] [A loss: 0.851835, acc: 0.214844]\n",
"1703: [D loss: 0.685489, acc: 0.550781] [A loss: 0.852432, acc: 0.199219]\n",
"1704: [D loss: 0.686470, acc: 0.574219] [A loss: 0.898618, acc: 0.199219]\n",
"1705: [D loss: 0.696084, acc: 0.537109] [A loss: 0.832895, acc: 0.250000]\n",
"1706: [D loss: 0.690025, acc: 0.566406] [A loss: 0.861842, acc: 0.187500]\n",
"1707: [D loss: 0.679356, acc: 0.574219] [A loss: 0.867423, acc: 0.191406]\n",
"1708: [D loss: 0.687523, acc: 0.564453] [A loss: 0.896447, acc: 0.140625]\n",
"1709: [D loss: 0.680141, acc: 0.548828] [A loss: 0.813547, acc: 0.277344]\n",
"1710: [D loss: 0.691273, acc: 0.544922] [A loss: 0.910109, acc: 0.148438]\n",
"1711: [D loss: 0.675772, acc: 0.570312] [A loss: 0.780429, acc: 0.328125]\n",
"1712: [D loss: 0.712117, acc: 0.542969] [A loss: 0.996140, acc: 0.062500]\n",
"1713: [D loss: 0.683792, acc: 0.542969] [A loss: 0.703283, acc: 0.480469]\n",
"1714: [D loss: 0.707775, acc: 0.533203] [A loss: 1.123916, acc: 0.058594]\n",
"1715: [D loss: 0.700149, acc: 0.533203] [A loss: 0.670511, acc: 0.578125]\n",
"1716: [D loss: 0.742551, acc: 0.494141] [A loss: 0.932945, acc: 0.128906]\n",
"1717: [D loss: 0.689732, acc: 0.531250] [A loss: 0.791897, acc: 0.300781]\n",
"1718: [D loss: 0.686074, acc: 0.546875] [A loss: 0.857717, acc: 0.242188]\n",
"1719: [D loss: 0.680183, acc: 0.582031] [A loss: 0.833178, acc: 0.250000]\n",
"1720: [D loss: 0.686779, acc: 0.548828] [A loss: 0.922353, acc: 0.121094]\n",
"1721: [D loss: 0.688680, acc: 0.546875] [A loss: 0.824774, acc: 0.285156]\n",
"1722: [D loss: 0.709300, acc: 0.513672] [A loss: 0.921970, acc: 0.121094]\n",
"1723: [D loss: 0.698363, acc: 0.523438] [A loss: 0.783208, acc: 0.324219]\n",
"1724: [D loss: 0.689825, acc: 0.550781] [A loss: 0.939153, acc: 0.109375]\n",
"1725: [D loss: 0.691156, acc: 0.535156] [A loss: 0.736265, acc: 0.421875]\n",
"1726: [D loss: 0.693076, acc: 0.542969] [A loss: 0.914826, acc: 0.121094]\n",
"1727: [D loss: 0.673886, acc: 0.578125] [A loss: 0.752311, acc: 0.414062]\n",
"1728: [D loss: 0.703201, acc: 0.519531] [A loss: 0.971728, acc: 0.105469]\n",
"1729: [D loss: 0.695829, acc: 0.554688] [A loss: 0.729349, acc: 0.410156]\n",
"1730: [D loss: 0.725822, acc: 0.517578] [A loss: 1.018722, acc: 0.058594]\n",
"1731: [D loss: 0.690181, acc: 0.541016] [A loss: 0.747144, acc: 0.375000]\n",
"1732: [D loss: 0.688782, acc: 0.562500] [A loss: 0.915574, acc: 0.121094]\n",
"1733: [D loss: 0.691664, acc: 0.525391] [A loss: 0.775453, acc: 0.351562]\n",
"1734: [D loss: 0.702118, acc: 0.494141] [A loss: 0.856202, acc: 0.234375]\n",
"1735: [D loss: 0.694284, acc: 0.546875] [A loss: 0.879855, acc: 0.164062]\n",
"1736: [D loss: 0.681618, acc: 0.554688] [A loss: 0.806111, acc: 0.312500]\n",
"1737: [D loss: 0.677158, acc: 0.556641] [A loss: 0.894953, acc: 0.187500]\n",
"1738: [D loss: 0.685972, acc: 0.541016] [A loss: 0.802887, acc: 0.277344]\n",
"1739: [D loss: 0.700202, acc: 0.537109] [A loss: 0.858440, acc: 0.191406]\n",
"1740: [D loss: 0.680885, acc: 0.546875] [A loss: 0.928086, acc: 0.152344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1741: [D loss: 0.695881, acc: 0.513672] [A loss: 0.780842, acc: 0.324219]\n",
"1742: [D loss: 0.697977, acc: 0.539062] [A loss: 1.034566, acc: 0.062500]\n",
"1743: [D loss: 0.685511, acc: 0.552734] [A loss: 0.678327, acc: 0.550781]\n",
"1744: [D loss: 0.701990, acc: 0.544922] [A loss: 0.980737, acc: 0.066406]\n",
"1745: [D loss: 0.685927, acc: 0.558594] [A loss: 0.741404, acc: 0.414062]\n",
"1746: [D loss: 0.712399, acc: 0.544922] [A loss: 0.950830, acc: 0.062500]\n",
"1747: [D loss: 0.690497, acc: 0.542969] [A loss: 0.739617, acc: 0.457031]\n",
"1748: [D loss: 0.701708, acc: 0.529297] [A loss: 0.914899, acc: 0.175781]\n",
"1749: [D loss: 0.682977, acc: 0.550781] [A loss: 0.769417, acc: 0.328125]\n",
"1750: [D loss: 0.691025, acc: 0.523438] [A loss: 0.874757, acc: 0.179688]\n",
"1751: [D loss: 0.677030, acc: 0.601562] [A loss: 0.843774, acc: 0.234375]\n",
"1752: [D loss: 0.687645, acc: 0.527344] [A loss: 0.890744, acc: 0.144531]\n",
"1753: [D loss: 0.693290, acc: 0.539062] [A loss: 0.827972, acc: 0.281250]\n",
"1754: [D loss: 0.685165, acc: 0.541016] [A loss: 0.872900, acc: 0.199219]\n",
"1755: [D loss: 0.696694, acc: 0.542969] [A loss: 0.872314, acc: 0.140625]\n",
"1756: [D loss: 0.697838, acc: 0.511719] [A loss: 0.852944, acc: 0.265625]\n",
"1757: [D loss: 0.694966, acc: 0.533203] [A loss: 0.962437, acc: 0.093750]\n",
"1758: [D loss: 0.687993, acc: 0.533203] [A loss: 0.789218, acc: 0.320312]\n",
"1759: [D loss: 0.687679, acc: 0.562500] [A loss: 0.917848, acc: 0.132812]\n",
"1760: [D loss: 0.679538, acc: 0.548828] [A loss: 0.832702, acc: 0.257812]\n",
"1761: [D loss: 0.685941, acc: 0.583984] [A loss: 0.897031, acc: 0.191406]\n",
"1762: [D loss: 0.693141, acc: 0.527344] [A loss: 0.818328, acc: 0.300781]\n",
"1763: [D loss: 0.696390, acc: 0.539062] [A loss: 0.943962, acc: 0.109375]\n",
"1764: [D loss: 0.705287, acc: 0.511719] [A loss: 0.800858, acc: 0.312500]\n",
"1765: [D loss: 0.689946, acc: 0.564453] [A loss: 0.939879, acc: 0.109375]\n",
"1766: [D loss: 0.682623, acc: 0.552734] [A loss: 0.763153, acc: 0.328125]\n",
"1767: [D loss: 0.696216, acc: 0.539062] [A loss: 0.952705, acc: 0.085938]\n",
"1768: [D loss: 0.700795, acc: 0.500000] [A loss: 0.809731, acc: 0.277344]\n",
"1769: [D loss: 0.703385, acc: 0.498047] [A loss: 0.868606, acc: 0.164062]\n",
"1770: [D loss: 0.683579, acc: 0.572266] [A loss: 0.864333, acc: 0.179688]\n",
"1771: [D loss: 0.692366, acc: 0.529297] [A loss: 0.788869, acc: 0.339844]\n",
"1772: [D loss: 0.729019, acc: 0.507812] [A loss: 0.927109, acc: 0.132812]\n",
"1773: [D loss: 0.675742, acc: 0.578125] [A loss: 0.785483, acc: 0.316406]\n",
"1774: [D loss: 0.706547, acc: 0.490234] [A loss: 0.996199, acc: 0.066406]\n",
"1775: [D loss: 0.685960, acc: 0.541016] [A loss: 0.756850, acc: 0.363281]\n",
"1776: [D loss: 0.712110, acc: 0.542969] [A loss: 0.963935, acc: 0.097656]\n",
"1777: [D loss: 0.681712, acc: 0.566406] [A loss: 0.738571, acc: 0.437500]\n",
"1778: [D loss: 0.699307, acc: 0.548828] [A loss: 0.964953, acc: 0.082031]\n",
"1779: [D loss: 0.687143, acc: 0.552734] [A loss: 0.774660, acc: 0.363281]\n",
"1780: [D loss: 0.701651, acc: 0.511719] [A loss: 0.956207, acc: 0.132812]\n",
"1781: [D loss: 0.678973, acc: 0.550781] [A loss: 0.712954, acc: 0.480469]\n",
"1782: [D loss: 0.702369, acc: 0.531250] [A loss: 1.015382, acc: 0.078125]\n",
"1783: [D loss: 0.687301, acc: 0.562500] [A loss: 0.713398, acc: 0.488281]\n",
"1784: [D loss: 0.713913, acc: 0.535156] [A loss: 1.015936, acc: 0.070312]\n",
"1785: [D loss: 0.692958, acc: 0.535156] [A loss: 0.678934, acc: 0.589844]\n",
"1786: [D loss: 0.717966, acc: 0.527344] [A loss: 0.984279, acc: 0.039062]\n",
"1787: [D loss: 0.688597, acc: 0.556641] [A loss: 0.731635, acc: 0.410156]\n",
"1788: [D loss: 0.706308, acc: 0.531250] [A loss: 0.924232, acc: 0.105469]\n",
"1789: [D loss: 0.682245, acc: 0.556641] [A loss: 0.821447, acc: 0.265625]\n",
"1790: [D loss: 0.679171, acc: 0.601562] [A loss: 0.905274, acc: 0.175781]\n",
"1791: [D loss: 0.696198, acc: 0.523438] [A loss: 0.817345, acc: 0.261719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1792: [D loss: 0.698551, acc: 0.548828] [A loss: 0.870600, acc: 0.199219]\n",
"1793: [D loss: 0.689131, acc: 0.544922] [A loss: 0.790500, acc: 0.351562]\n",
"1794: [D loss: 0.700602, acc: 0.517578] [A loss: 0.956449, acc: 0.117188]\n",
"1795: [D loss: 0.681396, acc: 0.544922] [A loss: 0.769087, acc: 0.339844]\n",
"1796: [D loss: 0.704896, acc: 0.511719] [A loss: 0.989226, acc: 0.066406]\n",
"1797: [D loss: 0.682613, acc: 0.558594] [A loss: 0.758221, acc: 0.386719]\n",
"1798: [D loss: 0.689850, acc: 0.562500] [A loss: 0.910120, acc: 0.160156]\n",
"1799: [D loss: 0.674431, acc: 0.605469] [A loss: 0.847785, acc: 0.242188]\n",
"1800: [D loss: 0.698166, acc: 0.548828] [A loss: 0.827530, acc: 0.277344]\n",
"1801: [D loss: 0.682721, acc: 0.566406] [A loss: 0.868878, acc: 0.210938]\n",
"1802: [D loss: 0.695121, acc: 0.531250] [A loss: 0.893261, acc: 0.214844]\n",
"1803: [D loss: 0.696302, acc: 0.542969] [A loss: 0.881800, acc: 0.207031]\n",
"1804: [D loss: 0.677354, acc: 0.582031] [A loss: 0.911963, acc: 0.136719]\n",
"1805: [D loss: 0.681530, acc: 0.564453] [A loss: 0.784636, acc: 0.390625]\n",
"1806: [D loss: 0.697894, acc: 0.521484] [A loss: 1.018356, acc: 0.085938]\n",
"1807: [D loss: 0.695586, acc: 0.513672] [A loss: 0.636899, acc: 0.656250]\n",
"1808: [D loss: 0.711974, acc: 0.519531] [A loss: 1.063777, acc: 0.042969]\n",
"1809: [D loss: 0.687279, acc: 0.550781] [A loss: 0.639704, acc: 0.648438]\n",
"1810: [D loss: 0.704082, acc: 0.517578] [A loss: 0.978885, acc: 0.140625]\n",
"1811: [D loss: 0.683524, acc: 0.560547] [A loss: 0.749596, acc: 0.425781]\n",
"1812: [D loss: 0.714166, acc: 0.544922] [A loss: 0.937755, acc: 0.125000]\n",
"1813: [D loss: 0.674398, acc: 0.578125] [A loss: 0.801395, acc: 0.300781]\n",
"1814: [D loss: 0.695078, acc: 0.535156] [A loss: 0.906440, acc: 0.195312]\n",
"1815: [D loss: 0.698127, acc: 0.519531] [A loss: 0.800272, acc: 0.312500]\n",
"1816: [D loss: 0.684400, acc: 0.570312] [A loss: 0.900944, acc: 0.179688]\n",
"1817: [D loss: 0.682860, acc: 0.562500] [A loss: 0.815537, acc: 0.296875]\n",
"1818: [D loss: 0.698509, acc: 0.542969] [A loss: 0.853273, acc: 0.230469]\n",
"1819: [D loss: 0.700538, acc: 0.544922] [A loss: 0.856710, acc: 0.218750]\n",
"1820: [D loss: 0.681171, acc: 0.583984] [A loss: 0.814542, acc: 0.285156]\n",
"1821: [D loss: 0.698589, acc: 0.535156] [A loss: 0.884087, acc: 0.156250]\n",
"1822: [D loss: 0.674907, acc: 0.589844] [A loss: 0.803734, acc: 0.343750]\n",
"1823: [D loss: 0.693545, acc: 0.541016] [A loss: 0.871870, acc: 0.179688]\n",
"1824: [D loss: 0.690145, acc: 0.550781] [A loss: 0.897211, acc: 0.164062]\n",
"1825: [D loss: 0.699111, acc: 0.541016] [A loss: 0.834072, acc: 0.242188]\n",
"1826: [D loss: 0.691956, acc: 0.529297] [A loss: 0.883048, acc: 0.183594]\n",
"1827: [D loss: 0.703155, acc: 0.521484] [A loss: 0.989579, acc: 0.066406]\n",
"1828: [D loss: 0.695685, acc: 0.501953] [A loss: 0.767578, acc: 0.390625]\n",
"1829: [D loss: 0.697424, acc: 0.527344] [A loss: 0.931379, acc: 0.164062]\n",
"1830: [D loss: 0.690566, acc: 0.544922] [A loss: 0.723191, acc: 0.433594]\n",
"1831: [D loss: 0.697726, acc: 0.525391] [A loss: 0.964611, acc: 0.121094]\n",
"1832: [D loss: 0.680981, acc: 0.552734] [A loss: 0.782720, acc: 0.320312]\n",
"1833: [D loss: 0.699120, acc: 0.544922] [A loss: 0.941211, acc: 0.136719]\n",
"1834: [D loss: 0.692114, acc: 0.531250] [A loss: 0.710395, acc: 0.511719]\n",
"1835: [D loss: 0.698507, acc: 0.519531] [A loss: 0.979785, acc: 0.082031]\n",
"1836: [D loss: 0.698881, acc: 0.541016] [A loss: 0.707919, acc: 0.464844]\n",
"1837: [D loss: 0.706183, acc: 0.533203] [A loss: 1.046796, acc: 0.062500]\n",
"1838: [D loss: 0.688066, acc: 0.552734] [A loss: 0.742466, acc: 0.421875]\n",
"1839: [D loss: 0.702442, acc: 0.521484] [A loss: 0.966409, acc: 0.070312]\n",
"1840: [D loss: 0.691339, acc: 0.527344] [A loss: 0.774617, acc: 0.328125]\n",
"1841: [D loss: 0.705849, acc: 0.521484] [A loss: 0.854641, acc: 0.195312]\n",
"1842: [D loss: 0.677438, acc: 0.548828] [A loss: 0.809398, acc: 0.257812]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1843: [D loss: 0.697508, acc: 0.537109] [A loss: 0.861208, acc: 0.214844]\n",
"1844: [D loss: 0.693153, acc: 0.556641] [A loss: 0.893946, acc: 0.148438]\n",
"1845: [D loss: 0.687054, acc: 0.544922] [A loss: 0.813299, acc: 0.269531]\n",
"1846: [D loss: 0.682555, acc: 0.574219] [A loss: 0.899498, acc: 0.152344]\n",
"1847: [D loss: 0.674217, acc: 0.554688] [A loss: 0.807733, acc: 0.257812]\n",
"1848: [D loss: 0.685891, acc: 0.562500] [A loss: 0.904201, acc: 0.164062]\n",
"1849: [D loss: 0.682940, acc: 0.560547] [A loss: 0.822107, acc: 0.273438]\n",
"1850: [D loss: 0.674424, acc: 0.595703] [A loss: 0.877156, acc: 0.175781]\n",
"1851: [D loss: 0.689851, acc: 0.523438] [A loss: 0.839798, acc: 0.214844]\n",
"1852: [D loss: 0.696119, acc: 0.513672] [A loss: 0.866921, acc: 0.210938]\n",
"1853: [D loss: 0.690883, acc: 0.552734] [A loss: 0.847197, acc: 0.265625]\n",
"1854: [D loss: 0.689479, acc: 0.531250] [A loss: 0.880005, acc: 0.214844]\n",
"1855: [D loss: 0.668792, acc: 0.562500] [A loss: 0.800183, acc: 0.308594]\n",
"1856: [D loss: 0.695647, acc: 0.554688] [A loss: 0.917361, acc: 0.148438]\n",
"1857: [D loss: 0.676951, acc: 0.574219] [A loss: 0.815850, acc: 0.265625]\n",
"1858: [D loss: 0.700606, acc: 0.517578] [A loss: 1.024101, acc: 0.050781]\n",
"1859: [D loss: 0.677964, acc: 0.583984] [A loss: 0.735106, acc: 0.449219]\n",
"1860: [D loss: 0.713339, acc: 0.505859] [A loss: 1.044801, acc: 0.074219]\n",
"1861: [D loss: 0.686902, acc: 0.558594] [A loss: 0.708681, acc: 0.476562]\n",
"1862: [D loss: 0.731640, acc: 0.503906] [A loss: 1.063595, acc: 0.066406]\n",
"1863: [D loss: 0.694969, acc: 0.531250] [A loss: 0.696877, acc: 0.527344]\n",
"1864: [D loss: 0.718428, acc: 0.539062] [A loss: 1.010890, acc: 0.097656]\n",
"1865: [D loss: 0.683636, acc: 0.568359] [A loss: 0.713322, acc: 0.460938]\n",
"1866: [D loss: 0.705794, acc: 0.550781] [A loss: 0.906457, acc: 0.128906]\n",
"1867: [D loss: 0.701219, acc: 0.515625] [A loss: 0.759417, acc: 0.410156]\n",
"1868: [D loss: 0.700350, acc: 0.546875] [A loss: 0.869767, acc: 0.214844]\n",
"1869: [D loss: 0.680104, acc: 0.531250] [A loss: 0.807527, acc: 0.277344]\n",
"1870: [D loss: 0.698676, acc: 0.525391] [A loss: 0.902028, acc: 0.214844]\n",
"1871: [D loss: 0.689545, acc: 0.562500] [A loss: 0.840606, acc: 0.265625]\n",
"1872: [D loss: 0.700268, acc: 0.523438] [A loss: 0.941584, acc: 0.128906]\n",
"1873: [D loss: 0.700404, acc: 0.535156] [A loss: 0.790053, acc: 0.296875]\n",
"1874: [D loss: 0.699886, acc: 0.519531] [A loss: 0.910521, acc: 0.160156]\n",
"1875: [D loss: 0.704447, acc: 0.525391] [A loss: 0.805785, acc: 0.292969]\n",
"1876: [D loss: 0.693613, acc: 0.539062] [A loss: 0.883667, acc: 0.167969]\n",
"1877: [D loss: 0.701789, acc: 0.533203] [A loss: 0.909906, acc: 0.140625]\n",
"1878: [D loss: 0.672661, acc: 0.568359] [A loss: 0.808408, acc: 0.300781]\n",
"1879: [D loss: 0.697560, acc: 0.546875] [A loss: 1.028706, acc: 0.050781]\n",
"1880: [D loss: 0.680066, acc: 0.544922] [A loss: 0.739008, acc: 0.421875]\n",
"1881: [D loss: 0.717218, acc: 0.535156] [A loss: 1.003192, acc: 0.054688]\n",
"1882: [D loss: 0.681580, acc: 0.556641] [A loss: 0.733955, acc: 0.460938]\n",
"1883: [D loss: 0.697514, acc: 0.527344] [A loss: 0.970093, acc: 0.066406]\n",
"1884: [D loss: 0.680073, acc: 0.595703] [A loss: 0.775511, acc: 0.375000]\n",
"1885: [D loss: 0.682242, acc: 0.566406] [A loss: 0.981321, acc: 0.097656]\n",
"1886: [D loss: 0.682992, acc: 0.572266] [A loss: 0.784374, acc: 0.335938]\n",
"1887: [D loss: 0.692751, acc: 0.576172] [A loss: 0.976252, acc: 0.089844]\n",
"1888: [D loss: 0.666217, acc: 0.613281] [A loss: 0.833218, acc: 0.234375]\n",
"1889: [D loss: 0.689152, acc: 0.570312] [A loss: 0.938536, acc: 0.167969]\n",
"1890: [D loss: 0.685110, acc: 0.556641] [A loss: 0.830242, acc: 0.273438]\n",
"1891: [D loss: 0.693477, acc: 0.542969] [A loss: 0.854373, acc: 0.207031]\n",
"1892: [D loss: 0.698262, acc: 0.539062] [A loss: 0.822924, acc: 0.261719]\n",
"1893: [D loss: 0.693559, acc: 0.546875] [A loss: 0.890046, acc: 0.148438]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1894: [D loss: 0.678266, acc: 0.570312] [A loss: 0.825223, acc: 0.257812]\n",
"1895: [D loss: 0.709634, acc: 0.535156] [A loss: 0.942493, acc: 0.117188]\n",
"1896: [D loss: 0.688796, acc: 0.550781] [A loss: 0.802732, acc: 0.324219]\n",
"1897: [D loss: 0.698554, acc: 0.550781] [A loss: 0.954762, acc: 0.109375]\n",
"1898: [D loss: 0.688667, acc: 0.542969] [A loss: 0.755302, acc: 0.375000]\n",
"1899: [D loss: 0.705381, acc: 0.527344] [A loss: 1.020820, acc: 0.078125]\n",
"1900: [D loss: 0.687605, acc: 0.550781] [A loss: 0.704062, acc: 0.539062]\n",
"1901: [D loss: 0.718505, acc: 0.523438] [A loss: 1.084566, acc: 0.058594]\n",
"1902: [D loss: 0.692270, acc: 0.552734] [A loss: 0.712058, acc: 0.484375]\n",
"1903: [D loss: 0.709137, acc: 0.533203] [A loss: 0.929361, acc: 0.148438]\n",
"1904: [D loss: 0.687164, acc: 0.546875] [A loss: 0.749107, acc: 0.390625]\n",
"1905: [D loss: 0.707767, acc: 0.529297] [A loss: 0.955603, acc: 0.164062]\n",
"1906: [D loss: 0.688129, acc: 0.554688] [A loss: 0.773071, acc: 0.351562]\n",
"1907: [D loss: 0.703803, acc: 0.494141] [A loss: 0.946954, acc: 0.121094]\n",
"1908: [D loss: 0.686150, acc: 0.558594] [A loss: 0.764191, acc: 0.367188]\n",
"1909: [D loss: 0.695648, acc: 0.570312] [A loss: 0.960931, acc: 0.117188]\n",
"1910: [D loss: 0.682222, acc: 0.580078] [A loss: 0.763851, acc: 0.332031]\n",
"1911: [D loss: 0.702448, acc: 0.541016] [A loss: 0.888611, acc: 0.203125]\n",
"1912: [D loss: 0.674280, acc: 0.580078] [A loss: 0.798314, acc: 0.332031]\n",
"1913: [D loss: 0.697180, acc: 0.541016] [A loss: 0.924812, acc: 0.167969]\n",
"1914: [D loss: 0.688402, acc: 0.550781] [A loss: 0.792755, acc: 0.289062]\n",
"1915: [D loss: 0.682965, acc: 0.544922] [A loss: 0.906909, acc: 0.132812]\n",
"1916: [D loss: 0.682578, acc: 0.533203] [A loss: 0.843643, acc: 0.238281]\n",
"1917: [D loss: 0.678325, acc: 0.568359] [A loss: 0.837440, acc: 0.265625]\n",
"1918: [D loss: 0.691669, acc: 0.546875] [A loss: 0.932299, acc: 0.132812]\n",
"1919: [D loss: 0.664715, acc: 0.599609] [A loss: 0.818193, acc: 0.304688]\n",
"1920: [D loss: 0.695875, acc: 0.525391] [A loss: 0.920488, acc: 0.136719]\n",
"1921: [D loss: 0.692019, acc: 0.539062] [A loss: 0.837617, acc: 0.253906]\n",
"1922: [D loss: 0.715833, acc: 0.537109] [A loss: 0.907437, acc: 0.199219]\n",
"1923: [D loss: 0.708886, acc: 0.541016] [A loss: 0.975387, acc: 0.121094]\n",
"1924: [D loss: 0.697935, acc: 0.513672] [A loss: 0.773653, acc: 0.347656]\n",
"1925: [D loss: 0.704844, acc: 0.525391] [A loss: 1.036329, acc: 0.093750]\n",
"1926: [D loss: 0.685412, acc: 0.570312] [A loss: 0.749335, acc: 0.417969]\n",
"1927: [D loss: 0.708652, acc: 0.535156] [A loss: 1.001128, acc: 0.113281]\n",
"1928: [D loss: 0.677609, acc: 0.546875] [A loss: 0.732334, acc: 0.406250]\n",
"1929: [D loss: 0.697820, acc: 0.544922] [A loss: 1.055133, acc: 0.078125]\n",
"1930: [D loss: 0.686837, acc: 0.562500] [A loss: 0.658543, acc: 0.597656]\n",
"1931: [D loss: 0.726990, acc: 0.513672] [A loss: 1.035694, acc: 0.070312]\n",
"1932: [D loss: 0.681204, acc: 0.576172] [A loss: 0.708561, acc: 0.496094]\n",
"1933: [D loss: 0.721987, acc: 0.521484] [A loss: 0.950138, acc: 0.121094]\n",
"1934: [D loss: 0.681853, acc: 0.542969] [A loss: 0.749537, acc: 0.421875]\n",
"1935: [D loss: 0.692245, acc: 0.566406] [A loss: 0.845897, acc: 0.214844]\n",
"1936: [D loss: 0.706144, acc: 0.496094] [A loss: 0.836411, acc: 0.257812]\n",
"1937: [D loss: 0.682769, acc: 0.541016] [A loss: 0.806686, acc: 0.292969]\n",
"1938: [D loss: 0.691569, acc: 0.531250] [A loss: 0.835501, acc: 0.230469]\n",
"1939: [D loss: 0.689163, acc: 0.548828] [A loss: 0.819890, acc: 0.257812]\n",
"1940: [D loss: 0.693856, acc: 0.517578] [A loss: 0.931610, acc: 0.148438]\n",
"1941: [D loss: 0.687798, acc: 0.544922] [A loss: 0.804680, acc: 0.285156]\n",
"1942: [D loss: 0.702647, acc: 0.525391] [A loss: 0.907093, acc: 0.140625]\n",
"1943: [D loss: 0.685241, acc: 0.544922] [A loss: 0.817012, acc: 0.250000]\n",
"1944: [D loss: 0.687747, acc: 0.544922] [A loss: 0.882003, acc: 0.179688]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1945: [D loss: 0.683313, acc: 0.576172] [A loss: 0.839064, acc: 0.250000]\n",
"1946: [D loss: 0.699612, acc: 0.507812] [A loss: 0.884653, acc: 0.207031]\n",
"1947: [D loss: 0.695650, acc: 0.513672] [A loss: 0.801534, acc: 0.296875]\n",
"1948: [D loss: 0.704071, acc: 0.519531] [A loss: 0.950908, acc: 0.093750]\n",
"1949: [D loss: 0.680801, acc: 0.576172] [A loss: 0.760296, acc: 0.335938]\n",
"1950: [D loss: 0.706521, acc: 0.521484] [A loss: 1.012260, acc: 0.078125]\n",
"1951: [D loss: 0.676680, acc: 0.591797] [A loss: 0.729997, acc: 0.421875]\n",
"1952: [D loss: 0.729420, acc: 0.511719] [A loss: 1.064131, acc: 0.046875]\n",
"1953: [D loss: 0.691641, acc: 0.537109] [A loss: 0.660101, acc: 0.593750]\n",
"1954: [D loss: 0.749346, acc: 0.507812] [A loss: 0.961568, acc: 0.117188]\n",
"1955: [D loss: 0.681651, acc: 0.556641] [A loss: 0.789982, acc: 0.273438]\n",
"1956: [D loss: 0.709458, acc: 0.513672] [A loss: 0.911005, acc: 0.144531]\n",
"1957: [D loss: 0.687032, acc: 0.552734] [A loss: 0.797090, acc: 0.285156]\n",
"1958: [D loss: 0.696799, acc: 0.535156] [A loss: 0.851181, acc: 0.214844]\n",
"1959: [D loss: 0.691753, acc: 0.533203] [A loss: 0.844492, acc: 0.222656]\n",
"1960: [D loss: 0.683230, acc: 0.568359] [A loss: 0.831171, acc: 0.234375]\n",
"1961: [D loss: 0.694260, acc: 0.537109] [A loss: 0.839666, acc: 0.222656]\n",
"1962: [D loss: 0.700586, acc: 0.511719] [A loss: 0.802525, acc: 0.308594]\n",
"1963: [D loss: 0.701828, acc: 0.523438] [A loss: 0.893123, acc: 0.183594]\n",
"1964: [D loss: 0.692838, acc: 0.515625] [A loss: 0.721860, acc: 0.460938]\n",
"1965: [D loss: 0.692423, acc: 0.552734] [A loss: 0.951997, acc: 0.109375]\n",
"1966: [D loss: 0.684889, acc: 0.541016] [A loss: 0.756767, acc: 0.375000]\n",
"1967: [D loss: 0.703682, acc: 0.539062] [A loss: 0.939893, acc: 0.121094]\n",
"1968: [D loss: 0.682851, acc: 0.552734] [A loss: 0.754413, acc: 0.425781]\n",
"1969: [D loss: 0.684976, acc: 0.562500] [A loss: 0.974195, acc: 0.097656]\n",
"1970: [D loss: 0.702167, acc: 0.560547] [A loss: 0.766710, acc: 0.394531]\n",
"1971: [D loss: 0.691953, acc: 0.539062] [A loss: 0.949603, acc: 0.113281]\n",
"1972: [D loss: 0.691125, acc: 0.562500] [A loss: 0.749679, acc: 0.398438]\n",
"1973: [D loss: 0.691497, acc: 0.542969] [A loss: 0.902155, acc: 0.156250]\n",
"1974: [D loss: 0.687661, acc: 0.574219] [A loss: 0.828043, acc: 0.273438]\n",
"1975: [D loss: 0.687197, acc: 0.548828] [A loss: 0.866138, acc: 0.160156]\n",
"1976: [D loss: 0.691417, acc: 0.527344] [A loss: 0.919359, acc: 0.125000]\n",
"1977: [D loss: 0.685214, acc: 0.525391] [A loss: 0.794356, acc: 0.300781]\n",
"1978: [D loss: 0.683040, acc: 0.564453] [A loss: 0.943598, acc: 0.109375]\n",
"1979: [D loss: 0.683850, acc: 0.558594] [A loss: 0.826587, acc: 0.289062]\n",
"1980: [D loss: 0.690518, acc: 0.548828] [A loss: 0.900247, acc: 0.191406]\n",
"1981: [D loss: 0.678942, acc: 0.599609] [A loss: 0.778632, acc: 0.378906]\n",
"1982: [D loss: 0.718838, acc: 0.509766] [A loss: 1.010208, acc: 0.078125]\n",
"1983: [D loss: 0.688510, acc: 0.568359] [A loss: 0.702731, acc: 0.484375]\n",
"1984: [D loss: 0.705204, acc: 0.527344] [A loss: 1.025702, acc: 0.082031]\n",
"1985: [D loss: 0.699639, acc: 0.527344] [A loss: 0.773812, acc: 0.328125]\n",
"1986: [D loss: 0.701902, acc: 0.521484] [A loss: 0.918920, acc: 0.156250]\n",
"1987: [D loss: 0.691349, acc: 0.515625] [A loss: 0.783113, acc: 0.335938]\n",
"1988: [D loss: 0.690676, acc: 0.542969] [A loss: 0.857743, acc: 0.214844]\n",
"1989: [D loss: 0.700986, acc: 0.515625] [A loss: 0.845880, acc: 0.277344]\n",
"1990: [D loss: 0.676086, acc: 0.574219] [A loss: 0.846998, acc: 0.246094]\n",
"1991: [D loss: 0.697244, acc: 0.527344] [A loss: 0.882027, acc: 0.175781]\n",
"1992: [D loss: 0.689077, acc: 0.548828] [A loss: 0.803377, acc: 0.273438]\n",
"1993: [D loss: 0.684771, acc: 0.583984] [A loss: 0.913252, acc: 0.160156]\n",
"1994: [D loss: 0.677616, acc: 0.578125] [A loss: 0.816305, acc: 0.265625]\n",
"1995: [D loss: 0.688963, acc: 0.550781] [A loss: 0.959241, acc: 0.152344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"1996: [D loss: 0.689491, acc: 0.556641] [A loss: 0.783760, acc: 0.308594]\n",
"1997: [D loss: 0.684714, acc: 0.544922] [A loss: 1.008006, acc: 0.085938]\n",
"1998: [D loss: 0.685898, acc: 0.544922] [A loss: 0.786538, acc: 0.312500]\n",
"1999: [D loss: 0.698475, acc: 0.541016] [A loss: 1.008558, acc: 0.078125]\n",
"2000: [D loss: 0.682161, acc: 0.548828] [A loss: 0.695735, acc: 0.515625]\n",
"2001: [D loss: 0.712185, acc: 0.527344] [A loss: 1.056420, acc: 0.042969]\n",
"2002: [D loss: 0.687017, acc: 0.525391] [A loss: 0.677432, acc: 0.539062]\n",
"2003: [D loss: 0.710696, acc: 0.533203] [A loss: 0.954482, acc: 0.121094]\n",
"2004: [D loss: 0.692395, acc: 0.537109] [A loss: 0.734784, acc: 0.410156]\n",
"2005: [D loss: 0.705966, acc: 0.554688] [A loss: 0.941487, acc: 0.101562]\n",
"2006: [D loss: 0.687803, acc: 0.572266] [A loss: 0.782978, acc: 0.343750]\n",
"2007: [D loss: 0.705850, acc: 0.527344] [A loss: 0.951222, acc: 0.109375]\n",
"2008: [D loss: 0.703111, acc: 0.501953] [A loss: 0.800878, acc: 0.312500]\n",
"2009: [D loss: 0.694639, acc: 0.548828] [A loss: 0.920912, acc: 0.113281]\n",
"2010: [D loss: 0.683615, acc: 0.550781] [A loss: 0.802903, acc: 0.332031]\n",
"2011: [D loss: 0.704574, acc: 0.521484] [A loss: 0.923403, acc: 0.136719]\n",
"2012: [D loss: 0.704321, acc: 0.503906] [A loss: 0.775663, acc: 0.363281]\n",
"2013: [D loss: 0.699981, acc: 0.529297] [A loss: 0.957647, acc: 0.125000]\n",
"2014: [D loss: 0.683529, acc: 0.542969] [A loss: 0.757194, acc: 0.410156]\n",
"2015: [D loss: 0.698106, acc: 0.550781] [A loss: 0.932138, acc: 0.167969]\n",
"2016: [D loss: 0.697710, acc: 0.505859] [A loss: 0.733864, acc: 0.429688]\n",
"2017: [D loss: 0.700103, acc: 0.525391] [A loss: 0.957666, acc: 0.113281]\n",
"2018: [D loss: 0.680151, acc: 0.562500] [A loss: 0.739863, acc: 0.460938]\n",
"2019: [D loss: 0.707686, acc: 0.541016] [A loss: 0.969478, acc: 0.089844]\n",
"2020: [D loss: 0.688408, acc: 0.537109] [A loss: 0.696527, acc: 0.496094]\n",
"2021: [D loss: 0.714582, acc: 0.535156] [A loss: 0.964329, acc: 0.101562]\n",
"2022: [D loss: 0.673423, acc: 0.576172] [A loss: 0.714458, acc: 0.476562]\n",
"2023: [D loss: 0.706868, acc: 0.525391] [A loss: 0.977004, acc: 0.082031]\n",
"2024: [D loss: 0.681415, acc: 0.566406] [A loss: 0.752334, acc: 0.398438]\n",
"2025: [D loss: 0.691661, acc: 0.576172] [A loss: 0.940165, acc: 0.109375]\n",
"2026: [D loss: 0.689929, acc: 0.550781] [A loss: 0.803652, acc: 0.269531]\n",
"2027: [D loss: 0.714914, acc: 0.486328] [A loss: 0.884761, acc: 0.218750]\n",
"2028: [D loss: 0.694774, acc: 0.548828] [A loss: 0.880159, acc: 0.156250]\n",
"2029: [D loss: 0.677388, acc: 0.582031] [A loss: 0.815120, acc: 0.289062]\n",
"2030: [D loss: 0.676976, acc: 0.554688] [A loss: 0.873404, acc: 0.199219]\n",
"2031: [D loss: 0.686967, acc: 0.564453] [A loss: 0.857936, acc: 0.203125]\n",
"2032: [D loss: 0.694080, acc: 0.566406] [A loss: 0.848836, acc: 0.246094]\n",
"2033: [D loss: 0.692336, acc: 0.562500] [A loss: 0.920577, acc: 0.136719]\n",
"2034: [D loss: 0.684594, acc: 0.541016] [A loss: 0.836377, acc: 0.226562]\n",
"2035: [D loss: 0.696052, acc: 0.542969] [A loss: 0.886712, acc: 0.199219]\n",
"2036: [D loss: 0.681742, acc: 0.583984] [A loss: 0.875894, acc: 0.171875]\n",
"2037: [D loss: 0.696349, acc: 0.550781] [A loss: 0.917727, acc: 0.171875]\n",
"2038: [D loss: 0.685179, acc: 0.552734] [A loss: 0.858814, acc: 0.226562]\n",
"2039: [D loss: 0.685702, acc: 0.562500] [A loss: 0.861993, acc: 0.218750]\n",
"2040: [D loss: 0.712684, acc: 0.517578] [A loss: 0.955300, acc: 0.121094]\n",
"2041: [D loss: 0.697311, acc: 0.521484] [A loss: 0.808466, acc: 0.292969]\n",
"2042: [D loss: 0.698139, acc: 0.550781] [A loss: 0.975390, acc: 0.105469]\n",
"2043: [D loss: 0.691282, acc: 0.542969] [A loss: 0.760743, acc: 0.355469]\n",
"2044: [D loss: 0.698376, acc: 0.541016] [A loss: 1.068340, acc: 0.054688]\n",
"2045: [D loss: 0.697222, acc: 0.541016] [A loss: 0.643014, acc: 0.617188]\n",
"2046: [D loss: 0.715459, acc: 0.529297] [A loss: 1.034396, acc: 0.078125]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2047: [D loss: 0.690049, acc: 0.541016] [A loss: 0.724975, acc: 0.425781]\n",
"2048: [D loss: 0.734062, acc: 0.470703] [A loss: 0.890162, acc: 0.187500]\n",
"2049: [D loss: 0.687641, acc: 0.541016] [A loss: 0.830724, acc: 0.246094]\n",
"2050: [D loss: 0.693681, acc: 0.560547] [A loss: 0.780591, acc: 0.355469]\n",
"2051: [D loss: 0.687050, acc: 0.539062] [A loss: 0.886767, acc: 0.187500]\n",
"2052: [D loss: 0.684632, acc: 0.550781] [A loss: 0.810078, acc: 0.250000]\n",
"2053: [D loss: 0.706580, acc: 0.521484] [A loss: 0.907868, acc: 0.171875]\n",
"2054: [D loss: 0.689088, acc: 0.550781] [A loss: 0.797197, acc: 0.289062]\n",
"2055: [D loss: 0.699529, acc: 0.548828] [A loss: 0.854215, acc: 0.222656]\n",
"2056: [D loss: 0.702145, acc: 0.544922] [A loss: 0.868312, acc: 0.203125]\n",
"2057: [D loss: 0.692901, acc: 0.517578] [A loss: 0.838997, acc: 0.238281]\n",
"2058: [D loss: 0.691272, acc: 0.544922] [A loss: 0.903064, acc: 0.140625]\n",
"2059: [D loss: 0.687858, acc: 0.544922] [A loss: 0.780543, acc: 0.375000]\n",
"2060: [D loss: 0.708143, acc: 0.513672] [A loss: 0.919686, acc: 0.152344]\n",
"2061: [D loss: 0.687164, acc: 0.544922] [A loss: 0.832274, acc: 0.257812]\n",
"2062: [D loss: 0.690394, acc: 0.533203] [A loss: 0.917745, acc: 0.132812]\n",
"2063: [D loss: 0.715187, acc: 0.496094] [A loss: 0.774507, acc: 0.351562]\n",
"2064: [D loss: 0.703865, acc: 0.541016] [A loss: 0.920219, acc: 0.183594]\n",
"2065: [D loss: 0.690471, acc: 0.546875] [A loss: 0.838213, acc: 0.253906]\n",
"2066: [D loss: 0.683715, acc: 0.566406] [A loss: 0.771431, acc: 0.332031]\n",
"2067: [D loss: 0.704875, acc: 0.519531] [A loss: 0.938042, acc: 0.089844]\n",
"2068: [D loss: 0.690549, acc: 0.546875] [A loss: 0.779046, acc: 0.300781]\n",
"2069: [D loss: 0.698938, acc: 0.550781] [A loss: 0.939632, acc: 0.132812]\n",
"2070: [D loss: 0.687426, acc: 0.541016] [A loss: 0.758971, acc: 0.386719]\n",
"2071: [D loss: 0.704070, acc: 0.505859] [A loss: 0.988274, acc: 0.074219]\n",
"2072: [D loss: 0.696809, acc: 0.523438] [A loss: 0.680402, acc: 0.539062]\n",
"2073: [D loss: 0.720638, acc: 0.513672] [A loss: 1.074628, acc: 0.054688]\n",
"2074: [D loss: 0.669175, acc: 0.560547] [A loss: 0.685776, acc: 0.554688]\n",
"2075: [D loss: 0.720928, acc: 0.527344] [A loss: 0.886531, acc: 0.187500]\n",
"2076: [D loss: 0.687783, acc: 0.541016] [A loss: 0.836609, acc: 0.250000]\n",
"2077: [D loss: 0.689681, acc: 0.535156] [A loss: 0.850239, acc: 0.222656]\n",
"2078: [D loss: 0.702211, acc: 0.523438] [A loss: 0.902689, acc: 0.132812]\n",
"2079: [D loss: 0.679078, acc: 0.566406] [A loss: 0.786082, acc: 0.332031]\n",
"2080: [D loss: 0.698006, acc: 0.527344] [A loss: 0.836750, acc: 0.246094]\n",
"2081: [D loss: 0.699192, acc: 0.509766] [A loss: 0.914615, acc: 0.164062]\n",
"2082: [D loss: 0.683681, acc: 0.570312] [A loss: 0.806246, acc: 0.277344]\n",
"2083: [D loss: 0.692120, acc: 0.554688] [A loss: 0.973023, acc: 0.097656]\n",
"2084: [D loss: 0.688503, acc: 0.552734] [A loss: 0.819266, acc: 0.320312]\n",
"2085: [D loss: 0.704270, acc: 0.515625] [A loss: 0.911689, acc: 0.207031]\n",
"2086: [D loss: 0.699585, acc: 0.525391] [A loss: 0.786701, acc: 0.320312]\n",
"2087: [D loss: 0.697799, acc: 0.529297] [A loss: 0.854028, acc: 0.207031]\n",
"2088: [D loss: 0.694230, acc: 0.533203] [A loss: 0.808207, acc: 0.273438]\n",
"2089: [D loss: 0.687453, acc: 0.564453] [A loss: 0.907186, acc: 0.160156]\n",
"2090: [D loss: 0.699384, acc: 0.539062] [A loss: 0.907170, acc: 0.152344]\n",
"2091: [D loss: 0.694866, acc: 0.554688] [A loss: 0.825653, acc: 0.253906]\n",
"2092: [D loss: 0.685724, acc: 0.548828] [A loss: 0.857747, acc: 0.167969]\n",
"2093: [D loss: 0.693450, acc: 0.542969] [A loss: 0.842683, acc: 0.210938]\n",
"2094: [D loss: 0.697031, acc: 0.542969] [A loss: 0.856074, acc: 0.246094]\n",
"2095: [D loss: 0.683065, acc: 0.564453] [A loss: 0.845685, acc: 0.253906]\n",
"2096: [D loss: 0.689290, acc: 0.556641] [A loss: 0.894415, acc: 0.183594]\n",
"2097: [D loss: 0.687694, acc: 0.529297] [A loss: 0.780950, acc: 0.347656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2098: [D loss: 0.714795, acc: 0.515625] [A loss: 0.996111, acc: 0.085938]\n",
"2099: [D loss: 0.684298, acc: 0.546875] [A loss: 0.767728, acc: 0.355469]\n",
"2100: [D loss: 0.703368, acc: 0.535156] [A loss: 0.995337, acc: 0.093750]\n",
"2101: [D loss: 0.693475, acc: 0.531250] [A loss: 0.791064, acc: 0.343750]\n",
"2102: [D loss: 0.696406, acc: 0.537109] [A loss: 0.976625, acc: 0.097656]\n",
"2103: [D loss: 0.695977, acc: 0.535156] [A loss: 0.690524, acc: 0.546875]\n",
"2104: [D loss: 0.717534, acc: 0.517578] [A loss: 1.039950, acc: 0.089844]\n",
"2105: [D loss: 0.687703, acc: 0.531250] [A loss: 0.790781, acc: 0.320312]\n",
"2106: [D loss: 0.684972, acc: 0.548828] [A loss: 0.897465, acc: 0.148438]\n",
"2107: [D loss: 0.674308, acc: 0.576172] [A loss: 0.798327, acc: 0.332031]\n",
"2108: [D loss: 0.695033, acc: 0.560547] [A loss: 0.923151, acc: 0.148438]\n",
"2109: [D loss: 0.679466, acc: 0.558594] [A loss: 0.800614, acc: 0.308594]\n",
"2110: [D loss: 0.696057, acc: 0.546875] [A loss: 0.933456, acc: 0.148438]\n",
"2111: [D loss: 0.693397, acc: 0.544922] [A loss: 0.745584, acc: 0.394531]\n",
"2112: [D loss: 0.712140, acc: 0.515625] [A loss: 1.009936, acc: 0.113281]\n",
"2113: [D loss: 0.686924, acc: 0.546875] [A loss: 0.717825, acc: 0.480469]\n",
"2114: [D loss: 0.708770, acc: 0.533203] [A loss: 0.967121, acc: 0.066406]\n",
"2115: [D loss: 0.694404, acc: 0.542969] [A loss: 0.734723, acc: 0.425781]\n",
"2116: [D loss: 0.701747, acc: 0.523438] [A loss: 0.951634, acc: 0.082031]\n",
"2117: [D loss: 0.689939, acc: 0.539062] [A loss: 0.842649, acc: 0.226562]\n",
"2118: [D loss: 0.681129, acc: 0.560547] [A loss: 0.826019, acc: 0.238281]\n",
"2119: [D loss: 0.687281, acc: 0.566406] [A loss: 0.923003, acc: 0.136719]\n",
"2120: [D loss: 0.681466, acc: 0.591797] [A loss: 0.788098, acc: 0.277344]\n",
"2121: [D loss: 0.704132, acc: 0.523438] [A loss: 0.998359, acc: 0.097656]\n",
"2122: [D loss: 0.683510, acc: 0.576172] [A loss: 0.708377, acc: 0.476562]\n",
"2123: [D loss: 0.716439, acc: 0.523438] [A loss: 1.021981, acc: 0.089844]\n",
"2124: [D loss: 0.677516, acc: 0.593750] [A loss: 0.677907, acc: 0.582031]\n",
"2125: [D loss: 0.714333, acc: 0.531250] [A loss: 0.988172, acc: 0.089844]\n",
"2126: [D loss: 0.692941, acc: 0.558594] [A loss: 0.776198, acc: 0.324219]\n",
"2127: [D loss: 0.687368, acc: 0.552734] [A loss: 0.848719, acc: 0.203125]\n",
"2128: [D loss: 0.691612, acc: 0.558594] [A loss: 0.745527, acc: 0.402344]\n",
"2129: [D loss: 0.688135, acc: 0.568359] [A loss: 0.884228, acc: 0.148438]\n",
"2130: [D loss: 0.688557, acc: 0.564453] [A loss: 0.830411, acc: 0.242188]\n",
"2131: [D loss: 0.712964, acc: 0.498047] [A loss: 0.905604, acc: 0.132812]\n",
"2132: [D loss: 0.695622, acc: 0.548828] [A loss: 0.776176, acc: 0.335938]\n",
"2133: [D loss: 0.702486, acc: 0.509766] [A loss: 0.878673, acc: 0.167969]\n",
"2134: [D loss: 0.682923, acc: 0.583984] [A loss: 0.793403, acc: 0.308594]\n",
"2135: [D loss: 0.706345, acc: 0.546875] [A loss: 0.967260, acc: 0.082031]\n",
"2136: [D loss: 0.689543, acc: 0.535156] [A loss: 0.742055, acc: 0.378906]\n",
"2137: [D loss: 0.691263, acc: 0.562500] [A loss: 0.922010, acc: 0.144531]\n",
"2138: [D loss: 0.683834, acc: 0.548828] [A loss: 0.777446, acc: 0.394531]\n",
"2139: [D loss: 0.687438, acc: 0.535156] [A loss: 0.908727, acc: 0.132812]\n",
"2140: [D loss: 0.697196, acc: 0.535156] [A loss: 0.878681, acc: 0.175781]\n",
"2141: [D loss: 0.696526, acc: 0.546875] [A loss: 0.794632, acc: 0.316406]\n",
"2142: [D loss: 0.704151, acc: 0.537109] [A loss: 1.054472, acc: 0.046875]\n",
"2143: [D loss: 0.705753, acc: 0.523438] [A loss: 0.665372, acc: 0.539062]\n",
"2144: [D loss: 0.727521, acc: 0.490234] [A loss: 0.993587, acc: 0.109375]\n",
"2145: [D loss: 0.698621, acc: 0.517578] [A loss: 0.760266, acc: 0.359375]\n",
"2146: [D loss: 0.714606, acc: 0.511719] [A loss: 0.932030, acc: 0.140625]\n",
"2147: [D loss: 0.680997, acc: 0.578125] [A loss: 0.808026, acc: 0.292969]\n",
"2148: [D loss: 0.690752, acc: 0.509766] [A loss: 0.873649, acc: 0.210938]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2149: [D loss: 0.686426, acc: 0.544922] [A loss: 0.789480, acc: 0.324219]\n",
"2150: [D loss: 0.692316, acc: 0.546875] [A loss: 0.839927, acc: 0.226562]\n",
"2151: [D loss: 0.697910, acc: 0.525391] [A loss: 0.856504, acc: 0.195312]\n",
"2152: [D loss: 0.696717, acc: 0.509766] [A loss: 0.770507, acc: 0.347656]\n",
"2153: [D loss: 0.691858, acc: 0.521484] [A loss: 0.864761, acc: 0.203125]\n",
"2154: [D loss: 0.688055, acc: 0.541016] [A loss: 0.813022, acc: 0.281250]\n",
"2155: [D loss: 0.708760, acc: 0.527344] [A loss: 0.902239, acc: 0.152344]\n",
"2156: [D loss: 0.693294, acc: 0.527344] [A loss: 0.751771, acc: 0.410156]\n",
"2157: [D loss: 0.690387, acc: 0.552734] [A loss: 0.938052, acc: 0.128906]\n",
"2158: [D loss: 0.679669, acc: 0.576172] [A loss: 0.786837, acc: 0.332031]\n",
"2159: [D loss: 0.720060, acc: 0.511719] [A loss: 0.941458, acc: 0.113281]\n",
"2160: [D loss: 0.682968, acc: 0.568359] [A loss: 0.769246, acc: 0.320312]\n",
"2161: [D loss: 0.698593, acc: 0.546875] [A loss: 0.966074, acc: 0.089844]\n",
"2162: [D loss: 0.689421, acc: 0.550781] [A loss: 0.723079, acc: 0.445312]\n",
"2163: [D loss: 0.706119, acc: 0.527344] [A loss: 0.976106, acc: 0.097656]\n",
"2164: [D loss: 0.688777, acc: 0.546875] [A loss: 0.691368, acc: 0.519531]\n",
"2165: [D loss: 0.710469, acc: 0.523438] [A loss: 0.994312, acc: 0.085938]\n",
"2166: [D loss: 0.696689, acc: 0.560547] [A loss: 0.747921, acc: 0.414062]\n",
"2167: [D loss: 0.691888, acc: 0.539062] [A loss: 0.900882, acc: 0.164062]\n",
"2168: [D loss: 0.687239, acc: 0.583984] [A loss: 0.788303, acc: 0.308594]\n",
"2169: [D loss: 0.700545, acc: 0.548828] [A loss: 0.859149, acc: 0.187500]\n",
"2170: [D loss: 0.670662, acc: 0.591797] [A loss: 0.864949, acc: 0.183594]\n",
"2171: [D loss: 0.708072, acc: 0.521484] [A loss: 0.919826, acc: 0.156250]\n",
"2172: [D loss: 0.691060, acc: 0.544922] [A loss: 0.733970, acc: 0.417969]\n",
"2173: [D loss: 0.694070, acc: 0.560547] [A loss: 0.968965, acc: 0.125000]\n",
"2174: [D loss: 0.683642, acc: 0.564453] [A loss: 0.778450, acc: 0.308594]\n",
"2175: [D loss: 0.689589, acc: 0.552734] [A loss: 0.918418, acc: 0.140625]\n",
"2176: [D loss: 0.674347, acc: 0.595703] [A loss: 0.689961, acc: 0.507812]\n",
"2177: [D loss: 0.707706, acc: 0.527344] [A loss: 0.985664, acc: 0.101562]\n",
"2178: [D loss: 0.691050, acc: 0.558594] [A loss: 0.727462, acc: 0.449219]\n",
"2179: [D loss: 0.708791, acc: 0.525391] [A loss: 0.937547, acc: 0.101562]\n",
"2180: [D loss: 0.688756, acc: 0.525391] [A loss: 0.748484, acc: 0.417969]\n",
"2181: [D loss: 0.692196, acc: 0.546875] [A loss: 0.905480, acc: 0.175781]\n",
"2182: [D loss: 0.701304, acc: 0.527344] [A loss: 0.745440, acc: 0.406250]\n",
"2183: [D loss: 0.693846, acc: 0.548828] [A loss: 0.906932, acc: 0.179688]\n",
"2184: [D loss: 0.684447, acc: 0.574219] [A loss: 0.848878, acc: 0.226562]\n",
"2185: [D loss: 0.685368, acc: 0.585938] [A loss: 0.859527, acc: 0.214844]\n",
"2186: [D loss: 0.710795, acc: 0.541016] [A loss: 0.870712, acc: 0.191406]\n",
"2187: [D loss: 0.690177, acc: 0.548828] [A loss: 0.789493, acc: 0.304688]\n",
"2188: [D loss: 0.694203, acc: 0.554688] [A loss: 0.918730, acc: 0.148438]\n",
"2189: [D loss: 0.686722, acc: 0.537109] [A loss: 0.798551, acc: 0.292969]\n",
"2190: [D loss: 0.689350, acc: 0.568359] [A loss: 0.929831, acc: 0.140625]\n",
"2191: [D loss: 0.689777, acc: 0.535156] [A loss: 0.811559, acc: 0.312500]\n",
"2192: [D loss: 0.688841, acc: 0.546875] [A loss: 0.825506, acc: 0.285156]\n",
"2193: [D loss: 0.677483, acc: 0.572266] [A loss: 0.816161, acc: 0.285156]\n",
"2194: [D loss: 0.710530, acc: 0.521484] [A loss: 0.906243, acc: 0.140625]\n",
"2195: [D loss: 0.679248, acc: 0.568359] [A loss: 0.775242, acc: 0.324219]\n",
"2196: [D loss: 0.691918, acc: 0.529297] [A loss: 0.943218, acc: 0.109375]\n",
"2197: [D loss: 0.691746, acc: 0.550781] [A loss: 0.791043, acc: 0.277344]\n",
"2198: [D loss: 0.701492, acc: 0.505859] [A loss: 1.009506, acc: 0.093750]\n",
"2199: [D loss: 0.699022, acc: 0.554688] [A loss: 0.719829, acc: 0.449219]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2200: [D loss: 0.710591, acc: 0.521484] [A loss: 0.994358, acc: 0.105469]\n",
"2201: [D loss: 0.692753, acc: 0.531250] [A loss: 0.706884, acc: 0.480469]\n",
"2202: [D loss: 0.715088, acc: 0.537109] [A loss: 0.921508, acc: 0.109375]\n",
"2203: [D loss: 0.687223, acc: 0.544922] [A loss: 0.755193, acc: 0.390625]\n",
"2204: [D loss: 0.694100, acc: 0.544922] [A loss: 0.874575, acc: 0.214844]\n",
"2205: [D loss: 0.686604, acc: 0.546875] [A loss: 0.788623, acc: 0.308594]\n",
"2206: [D loss: 0.694860, acc: 0.533203] [A loss: 0.886042, acc: 0.125000]\n",
"2207: [D loss: 0.679281, acc: 0.572266] [A loss: 0.813353, acc: 0.300781]\n",
"2208: [D loss: 0.690005, acc: 0.546875] [A loss: 0.906623, acc: 0.140625]\n",
"2209: [D loss: 0.684691, acc: 0.570312] [A loss: 0.819012, acc: 0.257812]\n",
"2210: [D loss: 0.715572, acc: 0.496094] [A loss: 0.976643, acc: 0.074219]\n",
"2211: [D loss: 0.676748, acc: 0.591797] [A loss: 0.736214, acc: 0.402344]\n",
"2212: [D loss: 0.693439, acc: 0.541016] [A loss: 0.952924, acc: 0.140625]\n",
"2213: [D loss: 0.680619, acc: 0.552734] [A loss: 0.752127, acc: 0.378906]\n",
"2214: [D loss: 0.702377, acc: 0.519531] [A loss: 0.951382, acc: 0.085938]\n",
"2215: [D loss: 0.678195, acc: 0.587891] [A loss: 0.771912, acc: 0.363281]\n",
"2216: [D loss: 0.705054, acc: 0.531250] [A loss: 0.928062, acc: 0.160156]\n",
"2217: [D loss: 0.669919, acc: 0.566406] [A loss: 0.780999, acc: 0.328125]\n",
"2218: [D loss: 0.703659, acc: 0.544922] [A loss: 0.913633, acc: 0.097656]\n",
"2219: [D loss: 0.693874, acc: 0.525391] [A loss: 0.780784, acc: 0.359375]\n",
"2220: [D loss: 0.684040, acc: 0.548828] [A loss: 0.844641, acc: 0.179688]\n",
"2221: [D loss: 0.682302, acc: 0.564453] [A loss: 0.802261, acc: 0.277344]\n",
"2222: [D loss: 0.684380, acc: 0.572266] [A loss: 0.869964, acc: 0.183594]\n",
"2223: [D loss: 0.683978, acc: 0.574219] [A loss: 0.837632, acc: 0.269531]\n",
"2224: [D loss: 0.703736, acc: 0.519531] [A loss: 0.913103, acc: 0.160156]\n",
"2225: [D loss: 0.687852, acc: 0.564453] [A loss: 0.717469, acc: 0.496094]\n",
"2226: [D loss: 0.722933, acc: 0.531250] [A loss: 1.011672, acc: 0.062500]\n",
"2227: [D loss: 0.710705, acc: 0.509766] [A loss: 0.714523, acc: 0.425781]\n",
"2228: [D loss: 0.701064, acc: 0.521484] [A loss: 0.918461, acc: 0.125000]\n",
"2229: [D loss: 0.682085, acc: 0.556641] [A loss: 0.747822, acc: 0.437500]\n",
"2230: [D loss: 0.709475, acc: 0.527344] [A loss: 0.865193, acc: 0.183594]\n",
"2231: [D loss: 0.699924, acc: 0.533203] [A loss: 0.798763, acc: 0.312500]\n",
"2232: [D loss: 0.703192, acc: 0.500000] [A loss: 0.853535, acc: 0.203125]\n",
"2233: [D loss: 0.694878, acc: 0.533203] [A loss: 0.854458, acc: 0.210938]\n",
"2234: [D loss: 0.685725, acc: 0.558594] [A loss: 0.830590, acc: 0.214844]\n",
"2235: [D loss: 0.696851, acc: 0.531250] [A loss: 0.831904, acc: 0.242188]\n",
"2236: [D loss: 0.673383, acc: 0.585938] [A loss: 0.813937, acc: 0.273438]\n",
"2237: [D loss: 0.690738, acc: 0.556641] [A loss: 0.805167, acc: 0.285156]\n",
"2238: [D loss: 0.702267, acc: 0.513672] [A loss: 0.919355, acc: 0.128906]\n",
"2239: [D loss: 0.693980, acc: 0.546875] [A loss: 0.904611, acc: 0.132812]\n",
"2240: [D loss: 0.678992, acc: 0.580078] [A loss: 0.723731, acc: 0.433594]\n",
"2241: [D loss: 0.703379, acc: 0.527344] [A loss: 0.983454, acc: 0.105469]\n",
"2242: [D loss: 0.689581, acc: 0.517578] [A loss: 0.743785, acc: 0.414062]\n",
"2243: [D loss: 0.711273, acc: 0.501953] [A loss: 0.930204, acc: 0.132812]\n",
"2244: [D loss: 0.682027, acc: 0.548828] [A loss: 0.726344, acc: 0.453125]\n",
"2245: [D loss: 0.710126, acc: 0.509766] [A loss: 0.979632, acc: 0.070312]\n",
"2246: [D loss: 0.687060, acc: 0.554688] [A loss: 0.706882, acc: 0.484375]\n",
"2247: [D loss: 0.718761, acc: 0.490234] [A loss: 0.929219, acc: 0.152344]\n",
"2248: [D loss: 0.682631, acc: 0.550781] [A loss: 0.769099, acc: 0.351562]\n",
"2249: [D loss: 0.702003, acc: 0.554688] [A loss: 0.958052, acc: 0.144531]\n",
"2250: [D loss: 0.696708, acc: 0.521484] [A loss: 0.731282, acc: 0.457031]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2251: [D loss: 0.714002, acc: 0.521484] [A loss: 0.902338, acc: 0.136719]\n",
"2252: [D loss: 0.695090, acc: 0.542969] [A loss: 0.847560, acc: 0.207031]\n",
"2253: [D loss: 0.688269, acc: 0.550781] [A loss: 0.849612, acc: 0.199219]\n",
"2254: [D loss: 0.693944, acc: 0.537109] [A loss: 0.772028, acc: 0.351562]\n",
"2255: [D loss: 0.702601, acc: 0.513672] [A loss: 0.851803, acc: 0.207031]\n",
"2256: [D loss: 0.698681, acc: 0.544922] [A loss: 0.805041, acc: 0.277344]\n",
"2257: [D loss: 0.688650, acc: 0.576172] [A loss: 0.866949, acc: 0.179688]\n",
"2258: [D loss: 0.674990, acc: 0.578125] [A loss: 0.821052, acc: 0.273438]\n",
"2259: [D loss: 0.708686, acc: 0.521484] [A loss: 0.892478, acc: 0.164062]\n",
"2260: [D loss: 0.689544, acc: 0.541016] [A loss: 0.775156, acc: 0.359375]\n",
"2261: [D loss: 0.698593, acc: 0.521484] [A loss: 0.879649, acc: 0.175781]\n",
"2262: [D loss: 0.699578, acc: 0.529297] [A loss: 0.776370, acc: 0.339844]\n",
"2263: [D loss: 0.686580, acc: 0.578125] [A loss: 0.866823, acc: 0.222656]\n",
"2264: [D loss: 0.688103, acc: 0.568359] [A loss: 0.795140, acc: 0.332031]\n",
"2265: [D loss: 0.692316, acc: 0.544922] [A loss: 0.824692, acc: 0.253906]\n",
"2266: [D loss: 0.693534, acc: 0.541016] [A loss: 0.890235, acc: 0.152344]\n",
"2267: [D loss: 0.687969, acc: 0.558594] [A loss: 0.716371, acc: 0.476562]\n",
"2268: [D loss: 0.709460, acc: 0.492188] [A loss: 1.038121, acc: 0.074219]\n",
"2269: [D loss: 0.687747, acc: 0.546875] [A loss: 0.733485, acc: 0.425781]\n",
"2270: [D loss: 0.715241, acc: 0.501953] [A loss: 0.912547, acc: 0.136719]\n",
"2271: [D loss: 0.681276, acc: 0.572266] [A loss: 0.728639, acc: 0.441406]\n",
"2272: [D loss: 0.700518, acc: 0.523438] [A loss: 0.886314, acc: 0.164062]\n",
"2273: [D loss: 0.690634, acc: 0.531250] [A loss: 0.809057, acc: 0.304688]\n",
"2274: [D loss: 0.717254, acc: 0.515625] [A loss: 0.851993, acc: 0.222656]\n",
"2275: [D loss: 0.693024, acc: 0.541016] [A loss: 0.848948, acc: 0.230469]\n",
"2276: [D loss: 0.689647, acc: 0.537109] [A loss: 0.842214, acc: 0.246094]\n",
"2277: [D loss: 0.692843, acc: 0.574219] [A loss: 0.851111, acc: 0.191406]\n",
"2278: [D loss: 0.688049, acc: 0.544922] [A loss: 0.849325, acc: 0.226562]\n",
"2279: [D loss: 0.697657, acc: 0.535156] [A loss: 0.848937, acc: 0.218750]\n",
"2280: [D loss: 0.696484, acc: 0.560547] [A loss: 0.873751, acc: 0.175781]\n",
"2281: [D loss: 0.691016, acc: 0.542969] [A loss: 0.826136, acc: 0.257812]\n",
"2282: [D loss: 0.696229, acc: 0.537109] [A loss: 0.924848, acc: 0.144531]\n",
"2283: [D loss: 0.685035, acc: 0.558594] [A loss: 0.788721, acc: 0.312500]\n",
"2284: [D loss: 0.701022, acc: 0.542969] [A loss: 0.967912, acc: 0.113281]\n",
"2285: [D loss: 0.679226, acc: 0.578125] [A loss: 0.769232, acc: 0.339844]\n",
"2286: [D loss: 0.705052, acc: 0.519531] [A loss: 0.921736, acc: 0.156250]\n",
"2287: [D loss: 0.685055, acc: 0.572266] [A loss: 0.720162, acc: 0.453125]\n",
"2288: [D loss: 0.696005, acc: 0.542969] [A loss: 0.981473, acc: 0.074219]\n",
"2289: [D loss: 0.673690, acc: 0.578125] [A loss: 0.687832, acc: 0.535156]\n",
"2290: [D loss: 0.720056, acc: 0.525391] [A loss: 0.993017, acc: 0.109375]\n",
"2291: [D loss: 0.687909, acc: 0.546875] [A loss: 0.745993, acc: 0.378906]\n",
"2292: [D loss: 0.699059, acc: 0.560547] [A loss: 0.897856, acc: 0.175781]\n",
"2293: [D loss: 0.681497, acc: 0.583984] [A loss: 0.759504, acc: 0.359375]\n",
"2294: [D loss: 0.695632, acc: 0.572266] [A loss: 0.755696, acc: 0.386719]\n",
"2295: [D loss: 0.694877, acc: 0.525391] [A loss: 0.839629, acc: 0.207031]\n",
"2296: [D loss: 0.692461, acc: 0.531250] [A loss: 0.772301, acc: 0.335938]\n",
"2297: [D loss: 0.694954, acc: 0.552734] [A loss: 0.917158, acc: 0.144531]\n",
"2298: [D loss: 0.691665, acc: 0.564453] [A loss: 0.835814, acc: 0.222656]\n",
"2299: [D loss: 0.705663, acc: 0.546875] [A loss: 0.885766, acc: 0.183594]\n",
"2300: [D loss: 0.705360, acc: 0.507812] [A loss: 0.757220, acc: 0.359375]\n",
"2301: [D loss: 0.710357, acc: 0.539062] [A loss: 0.942031, acc: 0.113281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2302: [D loss: 0.695370, acc: 0.525391] [A loss: 0.757775, acc: 0.347656]\n",
"2303: [D loss: 0.695328, acc: 0.544922] [A loss: 0.898708, acc: 0.125000]\n",
"2304: [D loss: 0.693415, acc: 0.527344] [A loss: 0.780195, acc: 0.289062]\n",
"2305: [D loss: 0.700626, acc: 0.548828] [A loss: 0.906184, acc: 0.144531]\n",
"2306: [D loss: 0.686362, acc: 0.544922] [A loss: 0.809994, acc: 0.285156]\n",
"2307: [D loss: 0.700056, acc: 0.523438] [A loss: 0.968706, acc: 0.093750]\n",
"2308: [D loss: 0.678180, acc: 0.585938] [A loss: 0.735103, acc: 0.398438]\n",
"2309: [D loss: 0.707121, acc: 0.539062] [A loss: 0.922542, acc: 0.156250]\n",
"2310: [D loss: 0.701682, acc: 0.515625] [A loss: 0.743064, acc: 0.460938]\n",
"2311: [D loss: 0.718024, acc: 0.500000] [A loss: 0.933556, acc: 0.097656]\n",
"2312: [D loss: 0.698404, acc: 0.519531] [A loss: 0.803750, acc: 0.324219]\n",
"2313: [D loss: 0.685300, acc: 0.587891] [A loss: 0.913940, acc: 0.156250]\n",
"2314: [D loss: 0.682440, acc: 0.574219] [A loss: 0.779563, acc: 0.320312]\n",
"2315: [D loss: 0.700630, acc: 0.541016] [A loss: 0.849722, acc: 0.210938]\n",
"2316: [D loss: 0.674860, acc: 0.570312] [A loss: 0.776270, acc: 0.343750]\n",
"2317: [D loss: 0.690591, acc: 0.548828] [A loss: 0.879402, acc: 0.179688]\n",
"2318: [D loss: 0.689692, acc: 0.541016] [A loss: 0.829190, acc: 0.234375]\n",
"2319: [D loss: 0.704350, acc: 0.500000] [A loss: 0.869406, acc: 0.207031]\n",
"2320: [D loss: 0.706528, acc: 0.537109] [A loss: 0.876808, acc: 0.214844]\n",
"2321: [D loss: 0.690987, acc: 0.566406] [A loss: 0.823237, acc: 0.269531]\n",
"2322: [D loss: 0.683140, acc: 0.566406] [A loss: 0.885575, acc: 0.171875]\n",
"2323: [D loss: 0.683144, acc: 0.589844] [A loss: 0.802189, acc: 0.281250]\n",
"2324: [D loss: 0.683984, acc: 0.570312] [A loss: 0.899660, acc: 0.152344]\n",
"2325: [D loss: 0.693793, acc: 0.519531] [A loss: 0.769211, acc: 0.386719]\n",
"2326: [D loss: 0.675538, acc: 0.582031] [A loss: 0.911039, acc: 0.187500]\n",
"2327: [D loss: 0.687254, acc: 0.542969] [A loss: 0.784882, acc: 0.316406]\n",
"2328: [D loss: 0.705562, acc: 0.521484] [A loss: 0.928505, acc: 0.121094]\n",
"2329: [D loss: 0.706259, acc: 0.503906] [A loss: 0.728038, acc: 0.425781]\n",
"2330: [D loss: 0.695123, acc: 0.556641] [A loss: 0.921504, acc: 0.125000]\n",
"2331: [D loss: 0.693468, acc: 0.564453] [A loss: 0.769917, acc: 0.363281]\n",
"2332: [D loss: 0.708011, acc: 0.527344] [A loss: 0.869843, acc: 0.238281]\n",
"2333: [D loss: 0.688932, acc: 0.535156] [A loss: 0.800220, acc: 0.285156]\n",
"2334: [D loss: 0.685711, acc: 0.537109] [A loss: 0.893755, acc: 0.160156]\n",
"2335: [D loss: 0.690586, acc: 0.544922] [A loss: 0.819987, acc: 0.246094]\n",
"2336: [D loss: 0.691538, acc: 0.562500] [A loss: 0.964721, acc: 0.136719]\n",
"2337: [D loss: 0.683310, acc: 0.582031] [A loss: 0.743344, acc: 0.406250]\n",
"2338: [D loss: 0.694647, acc: 0.535156] [A loss: 0.962270, acc: 0.105469]\n",
"2339: [D loss: 0.719240, acc: 0.492188] [A loss: 0.699375, acc: 0.472656]\n",
"2340: [D loss: 0.699440, acc: 0.527344] [A loss: 0.994455, acc: 0.082031]\n",
"2341: [D loss: 0.694407, acc: 0.541016] [A loss: 0.700992, acc: 0.488281]\n",
"2342: [D loss: 0.715012, acc: 0.519531] [A loss: 0.903250, acc: 0.132812]\n",
"2343: [D loss: 0.687157, acc: 0.550781] [A loss: 0.834845, acc: 0.230469]\n",
"2344: [D loss: 0.679683, acc: 0.568359] [A loss: 0.856748, acc: 0.246094]\n",
"2345: [D loss: 0.696341, acc: 0.550781] [A loss: 0.856901, acc: 0.210938]\n",
"2346: [D loss: 0.698111, acc: 0.535156] [A loss: 0.851301, acc: 0.226562]\n",
"2347: [D loss: 0.710775, acc: 0.511719] [A loss: 0.909666, acc: 0.160156]\n",
"2348: [D loss: 0.699413, acc: 0.523438] [A loss: 0.789744, acc: 0.308594]\n",
"2349: [D loss: 0.699655, acc: 0.527344] [A loss: 0.945980, acc: 0.117188]\n",
"2350: [D loss: 0.681313, acc: 0.556641] [A loss: 0.730534, acc: 0.433594]\n",
"2351: [D loss: 0.715172, acc: 0.507812] [A loss: 0.963369, acc: 0.097656]\n",
"2352: [D loss: 0.684349, acc: 0.548828] [A loss: 0.751415, acc: 0.402344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2353: [D loss: 0.710531, acc: 0.519531] [A loss: 0.995926, acc: 0.066406]\n",
"2354: [D loss: 0.689968, acc: 0.548828] [A loss: 0.722144, acc: 0.414062]\n",
"2355: [D loss: 0.718727, acc: 0.531250] [A loss: 0.932470, acc: 0.113281]\n",
"2356: [D loss: 0.687875, acc: 0.546875] [A loss: 0.752237, acc: 0.359375]\n",
"2357: [D loss: 0.697961, acc: 0.529297] [A loss: 0.881380, acc: 0.164062]\n",
"2358: [D loss: 0.674127, acc: 0.580078] [A loss: 0.802273, acc: 0.277344]\n",
"2359: [D loss: 0.687189, acc: 0.552734] [A loss: 0.868344, acc: 0.210938]\n",
"2360: [D loss: 0.689909, acc: 0.564453] [A loss: 0.849129, acc: 0.218750]\n",
"2361: [D loss: 0.702564, acc: 0.511719] [A loss: 0.811501, acc: 0.281250]\n",
"2362: [D loss: 0.691574, acc: 0.523438] [A loss: 0.887640, acc: 0.148438]\n",
"2363: [D loss: 0.691631, acc: 0.525391] [A loss: 0.741472, acc: 0.414062]\n",
"2364: [D loss: 0.708942, acc: 0.529297] [A loss: 0.983194, acc: 0.109375]\n",
"2365: [D loss: 0.684190, acc: 0.582031] [A loss: 0.713771, acc: 0.472656]\n",
"2366: [D loss: 0.707902, acc: 0.517578] [A loss: 0.945414, acc: 0.121094]\n",
"2367: [D loss: 0.684899, acc: 0.544922] [A loss: 0.770362, acc: 0.386719]\n",
"2368: [D loss: 0.690013, acc: 0.552734] [A loss: 0.935288, acc: 0.132812]\n",
"2369: [D loss: 0.684284, acc: 0.546875] [A loss: 0.719200, acc: 0.464844]\n",
"2370: [D loss: 0.725997, acc: 0.515625] [A loss: 0.953895, acc: 0.113281]\n",
"2371: [D loss: 0.693941, acc: 0.550781] [A loss: 0.741335, acc: 0.398438]\n",
"2372: [D loss: 0.702342, acc: 0.544922] [A loss: 0.888626, acc: 0.175781]\n",
"2373: [D loss: 0.700742, acc: 0.548828] [A loss: 0.858627, acc: 0.175781]\n",
"2374: [D loss: 0.705877, acc: 0.521484] [A loss: 0.884616, acc: 0.203125]\n",
"2375: [D loss: 0.685110, acc: 0.533203] [A loss: 0.790295, acc: 0.308594]\n",
"2376: [D loss: 0.690963, acc: 0.548828] [A loss: 0.871496, acc: 0.195312]\n",
"2377: [D loss: 0.682856, acc: 0.591797] [A loss: 0.833811, acc: 0.246094]\n",
"2378: [D loss: 0.696056, acc: 0.529297] [A loss: 0.852316, acc: 0.187500]\n",
"2379: [D loss: 0.686070, acc: 0.589844] [A loss: 0.779827, acc: 0.273438]\n",
"2380: [D loss: 0.696448, acc: 0.556641] [A loss: 0.836766, acc: 0.230469]\n",
"2381: [D loss: 0.716836, acc: 0.478516] [A loss: 0.807242, acc: 0.289062]\n",
"2382: [D loss: 0.688917, acc: 0.548828] [A loss: 0.883172, acc: 0.167969]\n",
"2383: [D loss: 0.689010, acc: 0.552734] [A loss: 0.797267, acc: 0.273438]\n",
"2384: [D loss: 0.691241, acc: 0.544922] [A loss: 0.844931, acc: 0.218750]\n",
"2385: [D loss: 0.692024, acc: 0.541016] [A loss: 0.685592, acc: 0.574219]\n",
"2386: [D loss: 0.722962, acc: 0.511719] [A loss: 1.038278, acc: 0.050781]\n",
"2387: [D loss: 0.695424, acc: 0.523438] [A loss: 0.707434, acc: 0.488281]\n",
"2388: [D loss: 0.705103, acc: 0.531250] [A loss: 0.926320, acc: 0.128906]\n",
"2389: [D loss: 0.685360, acc: 0.574219] [A loss: 0.764639, acc: 0.339844]\n",
"2390: [D loss: 0.705248, acc: 0.505859] [A loss: 0.913197, acc: 0.171875]\n",
"2391: [D loss: 0.685539, acc: 0.558594] [A loss: 0.776399, acc: 0.417969]\n",
"2392: [D loss: 0.719332, acc: 0.498047] [A loss: 0.928314, acc: 0.156250]\n",
"2393: [D loss: 0.687521, acc: 0.560547] [A loss: 0.811862, acc: 0.285156]\n",
"2394: [D loss: 0.710874, acc: 0.507812] [A loss: 0.880900, acc: 0.183594]\n",
"2395: [D loss: 0.685881, acc: 0.560547] [A loss: 0.807010, acc: 0.296875]\n",
"2396: [D loss: 0.694350, acc: 0.548828] [A loss: 0.910417, acc: 0.167969]\n",
"2397: [D loss: 0.682900, acc: 0.556641] [A loss: 0.730654, acc: 0.433594]\n",
"2398: [D loss: 0.704536, acc: 0.529297] [A loss: 0.928586, acc: 0.125000]\n",
"2399: [D loss: 0.686662, acc: 0.535156] [A loss: 0.803431, acc: 0.316406]\n",
"2400: [D loss: 0.705323, acc: 0.501953] [A loss: 0.909586, acc: 0.171875]\n",
"2401: [D loss: 0.699422, acc: 0.500000] [A loss: 0.767635, acc: 0.390625]\n",
"2402: [D loss: 0.697482, acc: 0.550781] [A loss: 0.897449, acc: 0.191406]\n",
"2403: [D loss: 0.681014, acc: 0.587891] [A loss: 0.823963, acc: 0.238281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2404: [D loss: 0.693854, acc: 0.541016] [A loss: 0.869025, acc: 0.164062]\n",
"2405: [D loss: 0.689185, acc: 0.558594] [A loss: 0.887183, acc: 0.179688]\n",
"2406: [D loss: 0.703372, acc: 0.523438] [A loss: 0.853477, acc: 0.234375]\n",
"2407: [D loss: 0.678625, acc: 0.556641] [A loss: 0.837211, acc: 0.207031]\n",
"2408: [D loss: 0.696870, acc: 0.535156] [A loss: 0.854439, acc: 0.195312]\n",
"2409: [D loss: 0.693882, acc: 0.513672] [A loss: 0.846414, acc: 0.222656]\n",
"2410: [D loss: 0.702606, acc: 0.523438] [A loss: 0.913626, acc: 0.125000]\n",
"2411: [D loss: 0.681190, acc: 0.542969] [A loss: 0.762071, acc: 0.363281]\n",
"2412: [D loss: 0.705690, acc: 0.513672] [A loss: 1.028763, acc: 0.074219]\n",
"2413: [D loss: 0.687436, acc: 0.541016] [A loss: 0.700680, acc: 0.507812]\n",
"2414: [D loss: 0.712672, acc: 0.523438] [A loss: 1.079639, acc: 0.062500]\n",
"2415: [D loss: 0.698834, acc: 0.513672] [A loss: 0.619385, acc: 0.699219]\n",
"2416: [D loss: 0.721629, acc: 0.513672] [A loss: 1.019433, acc: 0.054688]\n",
"2417: [D loss: 0.700589, acc: 0.505859] [A loss: 0.740210, acc: 0.441406]\n",
"2418: [D loss: 0.712298, acc: 0.509766] [A loss: 0.881671, acc: 0.167969]\n",
"2419: [D loss: 0.692088, acc: 0.527344] [A loss: 0.772164, acc: 0.371094]\n",
"2420: [D loss: 0.703989, acc: 0.507812] [A loss: 0.843028, acc: 0.214844]\n",
"2421: [D loss: 0.696670, acc: 0.525391] [A loss: 0.830227, acc: 0.257812]\n",
"2422: [D loss: 0.700765, acc: 0.539062] [A loss: 0.835167, acc: 0.210938]\n",
"2423: [D loss: 0.691599, acc: 0.554688] [A loss: 0.764245, acc: 0.367188]\n",
"2424: [D loss: 0.700667, acc: 0.505859] [A loss: 0.877480, acc: 0.203125]\n",
"2425: [D loss: 0.699172, acc: 0.523438] [A loss: 0.785888, acc: 0.332031]\n",
"2426: [D loss: 0.713781, acc: 0.517578] [A loss: 0.871022, acc: 0.148438]\n",
"2427: [D loss: 0.686598, acc: 0.574219] [A loss: 0.793163, acc: 0.304688]\n",
"2428: [D loss: 0.699587, acc: 0.544922] [A loss: 0.884897, acc: 0.156250]\n",
"2429: [D loss: 0.672616, acc: 0.583984] [A loss: 0.811204, acc: 0.261719]\n",
"2430: [D loss: 0.699163, acc: 0.548828] [A loss: 0.862344, acc: 0.191406]\n",
"2431: [D loss: 0.678044, acc: 0.572266] [A loss: 0.825831, acc: 0.253906]\n",
"2432: [D loss: 0.699322, acc: 0.539062] [A loss: 0.849734, acc: 0.222656]\n",
"2433: [D loss: 0.684291, acc: 0.552734] [A loss: 0.776276, acc: 0.339844]\n",
"2434: [D loss: 0.698382, acc: 0.546875] [A loss: 0.903490, acc: 0.160156]\n",
"2435: [D loss: 0.703170, acc: 0.503906] [A loss: 0.785837, acc: 0.324219]\n",
"2436: [D loss: 0.697119, acc: 0.539062] [A loss: 0.939782, acc: 0.148438]\n",
"2437: [D loss: 0.681047, acc: 0.552734] [A loss: 0.751524, acc: 0.398438]\n",
"2438: [D loss: 0.706646, acc: 0.519531] [A loss: 0.904231, acc: 0.136719]\n",
"2439: [D loss: 0.694063, acc: 0.513672] [A loss: 0.874081, acc: 0.179688]\n",
"2440: [D loss: 0.700076, acc: 0.527344] [A loss: 0.786968, acc: 0.328125]\n",
"2441: [D loss: 0.704294, acc: 0.509766] [A loss: 1.028191, acc: 0.074219]\n",
"2442: [D loss: 0.681034, acc: 0.546875] [A loss: 0.681977, acc: 0.554688]\n",
"2443: [D loss: 0.703931, acc: 0.542969] [A loss: 0.970656, acc: 0.093750]\n",
"2444: [D loss: 0.693129, acc: 0.519531] [A loss: 0.734771, acc: 0.437500]\n",
"2445: [D loss: 0.708358, acc: 0.537109] [A loss: 0.945270, acc: 0.136719]\n",
"2446: [D loss: 0.697628, acc: 0.523438] [A loss: 0.763566, acc: 0.355469]\n",
"2447: [D loss: 0.694896, acc: 0.539062] [A loss: 0.830019, acc: 0.250000]\n",
"2448: [D loss: 0.684257, acc: 0.572266] [A loss: 0.807155, acc: 0.269531]\n",
"2449: [D loss: 0.685472, acc: 0.568359] [A loss: 0.861081, acc: 0.207031]\n",
"2450: [D loss: 0.684690, acc: 0.562500] [A loss: 0.830036, acc: 0.269531]\n",
"2451: [D loss: 0.709318, acc: 0.519531] [A loss: 0.790459, acc: 0.316406]\n",
"2452: [D loss: 0.697322, acc: 0.554688] [A loss: 0.972448, acc: 0.097656]\n",
"2453: [D loss: 0.699958, acc: 0.548828] [A loss: 0.686671, acc: 0.523438]\n",
"2454: [D loss: 0.720304, acc: 0.517578] [A loss: 1.000154, acc: 0.093750]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2455: [D loss: 0.701834, acc: 0.529297] [A loss: 0.718864, acc: 0.457031]\n",
"2456: [D loss: 0.715739, acc: 0.492188] [A loss: 0.905630, acc: 0.128906]\n",
"2457: [D loss: 0.686739, acc: 0.548828] [A loss: 0.774869, acc: 0.339844]\n",
"2458: [D loss: 0.686789, acc: 0.585938] [A loss: 0.852692, acc: 0.222656]\n",
"2459: [D loss: 0.679903, acc: 0.564453] [A loss: 0.816661, acc: 0.246094]\n",
"2460: [D loss: 0.691587, acc: 0.541016] [A loss: 0.818488, acc: 0.265625]\n",
"2461: [D loss: 0.695544, acc: 0.544922] [A loss: 0.828083, acc: 0.246094]\n",
"2462: [D loss: 0.682665, acc: 0.554688] [A loss: 0.833625, acc: 0.257812]\n",
"2463: [D loss: 0.694997, acc: 0.513672] [A loss: 0.784900, acc: 0.335938]\n",
"2464: [D loss: 0.701487, acc: 0.533203] [A loss: 0.993455, acc: 0.121094]\n",
"2465: [D loss: 0.679189, acc: 0.554688] [A loss: 0.733443, acc: 0.437500]\n",
"2466: [D loss: 0.692469, acc: 0.548828] [A loss: 0.973490, acc: 0.109375]\n",
"2467: [D loss: 0.694207, acc: 0.537109] [A loss: 0.726319, acc: 0.445312]\n",
"2468: [D loss: 0.707903, acc: 0.513672] [A loss: 0.909468, acc: 0.121094]\n",
"2469: [D loss: 0.704334, acc: 0.527344] [A loss: 0.728561, acc: 0.453125]\n",
"2470: [D loss: 0.692701, acc: 0.550781] [A loss: 0.868854, acc: 0.164062]\n",
"2471: [D loss: 0.683609, acc: 0.570312] [A loss: 0.712986, acc: 0.468750]\n",
"2472: [D loss: 0.701561, acc: 0.539062] [A loss: 0.943183, acc: 0.109375]\n",
"2473: [D loss: 0.687732, acc: 0.552734] [A loss: 0.716885, acc: 0.468750]\n",
"2474: [D loss: 0.726460, acc: 0.513672] [A loss: 0.968290, acc: 0.066406]\n",
"2475: [D loss: 0.687646, acc: 0.558594] [A loss: 0.776146, acc: 0.324219]\n",
"2476: [D loss: 0.702362, acc: 0.533203] [A loss: 0.840065, acc: 0.179688]\n",
"2477: [D loss: 0.693860, acc: 0.529297] [A loss: 0.783155, acc: 0.332031]\n",
"2478: [D loss: 0.693368, acc: 0.527344] [A loss: 0.853645, acc: 0.203125]\n",
"2479: [D loss: 0.686322, acc: 0.560547] [A loss: 0.835110, acc: 0.203125]\n",
"2480: [D loss: 0.692767, acc: 0.533203] [A loss: 0.831860, acc: 0.253906]\n",
"2481: [D loss: 0.713308, acc: 0.509766] [A loss: 0.794081, acc: 0.281250]\n",
"2482: [D loss: 0.710549, acc: 0.509766] [A loss: 0.935832, acc: 0.117188]\n",
"2483: [D loss: 0.695277, acc: 0.511719] [A loss: 0.760480, acc: 0.398438]\n",
"2484: [D loss: 0.691971, acc: 0.529297] [A loss: 0.899311, acc: 0.164062]\n",
"2485: [D loss: 0.702970, acc: 0.503906] [A loss: 0.752161, acc: 0.355469]\n",
"2486: [D loss: 0.707851, acc: 0.525391] [A loss: 0.909359, acc: 0.160156]\n",
"2487: [D loss: 0.697369, acc: 0.517578] [A loss: 0.816956, acc: 0.281250]\n",
"2488: [D loss: 0.697052, acc: 0.517578] [A loss: 0.890183, acc: 0.144531]\n",
"2489: [D loss: 0.696183, acc: 0.521484] [A loss: 0.852852, acc: 0.210938]\n",
"2490: [D loss: 0.693710, acc: 0.554688] [A loss: 0.859857, acc: 0.187500]\n",
"2491: [D loss: 0.701392, acc: 0.511719] [A loss: 0.825505, acc: 0.257812]\n",
"2492: [D loss: 0.689953, acc: 0.541016] [A loss: 0.804419, acc: 0.265625]\n",
"2493: [D loss: 0.707191, acc: 0.542969] [A loss: 0.896534, acc: 0.175781]\n",
"2494: [D loss: 0.675305, acc: 0.576172] [A loss: 0.853286, acc: 0.242188]\n",
"2495: [D loss: 0.712156, acc: 0.498047] [A loss: 0.752127, acc: 0.410156]\n",
"2496: [D loss: 0.736468, acc: 0.486328] [A loss: 1.047346, acc: 0.046875]\n",
"2497: [D loss: 0.710445, acc: 0.515625] [A loss: 0.641070, acc: 0.664062]\n",
"2498: [D loss: 0.730258, acc: 0.501953] [A loss: 1.020695, acc: 0.070312]\n",
"2499: [D loss: 0.683611, acc: 0.542969] [A loss: 0.727576, acc: 0.453125]\n",
"2500: [D loss: 0.698206, acc: 0.515625] [A loss: 0.839294, acc: 0.238281]\n",
"2501: [D loss: 0.690676, acc: 0.541016] [A loss: 0.823234, acc: 0.238281]\n",
"2502: [D loss: 0.689901, acc: 0.554688] [A loss: 0.752750, acc: 0.390625]\n",
"2503: [D loss: 0.692262, acc: 0.550781] [A loss: 0.874483, acc: 0.183594]\n",
"2504: [D loss: 0.696676, acc: 0.525391] [A loss: 0.769153, acc: 0.351562]\n",
"2505: [D loss: 0.706495, acc: 0.503906] [A loss: 0.821134, acc: 0.242188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2506: [D loss: 0.691328, acc: 0.531250] [A loss: 0.809278, acc: 0.273438]\n",
"2507: [D loss: 0.696387, acc: 0.529297] [A loss: 0.859134, acc: 0.187500]\n",
"2508: [D loss: 0.678143, acc: 0.560547] [A loss: 0.829963, acc: 0.261719]\n",
"2509: [D loss: 0.701763, acc: 0.525391] [A loss: 0.870665, acc: 0.179688]\n",
"2510: [D loss: 0.689245, acc: 0.546875] [A loss: 0.841017, acc: 0.250000]\n",
"2511: [D loss: 0.707803, acc: 0.490234] [A loss: 0.834689, acc: 0.222656]\n",
"2512: [D loss: 0.705798, acc: 0.503906] [A loss: 0.879963, acc: 0.152344]\n",
"2513: [D loss: 0.699232, acc: 0.521484] [A loss: 0.851962, acc: 0.199219]\n",
"2514: [D loss: 0.699117, acc: 0.537109] [A loss: 0.774672, acc: 0.316406]\n",
"2515: [D loss: 0.697513, acc: 0.529297] [A loss: 0.938252, acc: 0.113281]\n",
"2516: [D loss: 0.683989, acc: 0.568359] [A loss: 0.709280, acc: 0.488281]\n",
"2517: [D loss: 0.703271, acc: 0.523438] [A loss: 1.001047, acc: 0.066406]\n",
"2518: [D loss: 0.684720, acc: 0.548828] [A loss: 0.698031, acc: 0.492188]\n",
"2519: [D loss: 0.707515, acc: 0.527344] [A loss: 0.962256, acc: 0.097656]\n",
"2520: [D loss: 0.674765, acc: 0.558594] [A loss: 0.713374, acc: 0.460938]\n",
"2521: [D loss: 0.701295, acc: 0.523438] [A loss: 0.959449, acc: 0.105469]\n",
"2522: [D loss: 0.694431, acc: 0.529297] [A loss: 0.713509, acc: 0.472656]\n",
"2523: [D loss: 0.699995, acc: 0.537109] [A loss: 0.903671, acc: 0.136719]\n",
"2524: [D loss: 0.683062, acc: 0.558594] [A loss: 0.836793, acc: 0.257812]\n",
"2525: [D loss: 0.692078, acc: 0.572266] [A loss: 0.818670, acc: 0.261719]\n",
"2526: [D loss: 0.706083, acc: 0.531250] [A loss: 0.858542, acc: 0.218750]\n",
"2527: [D loss: 0.690381, acc: 0.539062] [A loss: 0.837201, acc: 0.242188]\n",
"2528: [D loss: 0.709758, acc: 0.496094] [A loss: 0.820180, acc: 0.246094]\n",
"2529: [D loss: 0.698787, acc: 0.537109] [A loss: 0.887327, acc: 0.210938]\n",
"2530: [D loss: 0.694470, acc: 0.507812] [A loss: 0.824357, acc: 0.300781]\n",
"2531: [D loss: 0.706904, acc: 0.519531] [A loss: 0.826464, acc: 0.261719]\n",
"2532: [D loss: 0.707439, acc: 0.521484] [A loss: 0.929019, acc: 0.136719]\n",
"2533: [D loss: 0.692411, acc: 0.529297] [A loss: 0.774764, acc: 0.332031]\n",
"2534: [D loss: 0.696587, acc: 0.541016] [A loss: 0.913058, acc: 0.140625]\n",
"2535: [D loss: 0.699153, acc: 0.533203] [A loss: 0.763327, acc: 0.351562]\n",
"2536: [D loss: 0.692937, acc: 0.533203] [A loss: 0.951600, acc: 0.097656]\n",
"2537: [D loss: 0.686113, acc: 0.558594] [A loss: 0.724544, acc: 0.457031]\n",
"2538: [D loss: 0.710948, acc: 0.515625] [A loss: 0.898359, acc: 0.171875]\n",
"2539: [D loss: 0.694616, acc: 0.535156] [A loss: 0.824277, acc: 0.250000]\n",
"2540: [D loss: 0.706556, acc: 0.519531] [A loss: 0.841867, acc: 0.203125]\n",
"2541: [D loss: 0.695782, acc: 0.509766] [A loss: 0.862515, acc: 0.207031]\n",
"2542: [D loss: 0.697150, acc: 0.535156] [A loss: 0.765827, acc: 0.332031]\n",
"2543: [D loss: 0.710940, acc: 0.542969] [A loss: 0.944422, acc: 0.089844]\n",
"2544: [D loss: 0.692975, acc: 0.527344] [A loss: 0.736586, acc: 0.441406]\n",
"2545: [D loss: 0.711183, acc: 0.552734] [A loss: 0.950844, acc: 0.125000]\n",
"2546: [D loss: 0.689167, acc: 0.546875] [A loss: 0.719749, acc: 0.425781]\n",
"2547: [D loss: 0.694251, acc: 0.552734] [A loss: 0.879279, acc: 0.167969]\n",
"2548: [D loss: 0.691073, acc: 0.519531] [A loss: 0.709874, acc: 0.476562]\n",
"2549: [D loss: 0.714198, acc: 0.533203] [A loss: 0.893174, acc: 0.132812]\n",
"2550: [D loss: 0.688540, acc: 0.550781] [A loss: 0.746958, acc: 0.402344]\n",
"2551: [D loss: 0.691635, acc: 0.546875] [A loss: 0.834386, acc: 0.238281]\n",
"2552: [D loss: 0.688744, acc: 0.539062] [A loss: 0.875500, acc: 0.171875]\n",
"2553: [D loss: 0.693142, acc: 0.550781] [A loss: 0.789917, acc: 0.320312]\n",
"2554: [D loss: 0.682225, acc: 0.576172] [A loss: 0.853397, acc: 0.250000]\n",
"2555: [D loss: 0.707343, acc: 0.509766] [A loss: 0.780627, acc: 0.351562]\n",
"2556: [D loss: 0.699953, acc: 0.531250] [A loss: 0.876212, acc: 0.164062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2557: [D loss: 0.690248, acc: 0.542969] [A loss: 0.879618, acc: 0.210938]\n",
"2558: [D loss: 0.689618, acc: 0.548828] [A loss: 0.884434, acc: 0.167969]\n",
"2559: [D loss: 0.679191, acc: 0.568359] [A loss: 0.784506, acc: 0.312500]\n",
"2560: [D loss: 0.697324, acc: 0.527344] [A loss: 0.906669, acc: 0.117188]\n",
"2561: [D loss: 0.681496, acc: 0.568359] [A loss: 0.793063, acc: 0.300781]\n",
"2562: [D loss: 0.707377, acc: 0.501953] [A loss: 0.935792, acc: 0.074219]\n",
"2563: [D loss: 0.682244, acc: 0.546875] [A loss: 0.767283, acc: 0.394531]\n",
"2564: [D loss: 0.708759, acc: 0.515625] [A loss: 1.038873, acc: 0.082031]\n",
"2565: [D loss: 0.703429, acc: 0.531250] [A loss: 0.681871, acc: 0.531250]\n",
"2566: [D loss: 0.711318, acc: 0.515625] [A loss: 0.991128, acc: 0.093750]\n",
"2567: [D loss: 0.712862, acc: 0.501953] [A loss: 0.740313, acc: 0.378906]\n",
"2568: [D loss: 0.702075, acc: 0.500000] [A loss: 0.832286, acc: 0.230469]\n",
"2569: [D loss: 0.686161, acc: 0.560547] [A loss: 0.798116, acc: 0.308594]\n",
"2570: [D loss: 0.690773, acc: 0.541016] [A loss: 0.796526, acc: 0.300781]\n",
"2571: [D loss: 0.695928, acc: 0.558594] [A loss: 0.832341, acc: 0.218750]\n",
"2572: [D loss: 0.688756, acc: 0.562500] [A loss: 0.749768, acc: 0.355469]\n",
"2573: [D loss: 0.694465, acc: 0.550781] [A loss: 0.933097, acc: 0.117188]\n",
"2574: [D loss: 0.702419, acc: 0.515625] [A loss: 0.775434, acc: 0.316406]\n",
"2575: [D loss: 0.712309, acc: 0.494141] [A loss: 0.972716, acc: 0.097656]\n",
"2576: [D loss: 0.693216, acc: 0.533203] [A loss: 0.761287, acc: 0.375000]\n",
"2577: [D loss: 0.716653, acc: 0.498047] [A loss: 0.942736, acc: 0.144531]\n",
"2578: [D loss: 0.702507, acc: 0.494141] [A loss: 0.733942, acc: 0.437500]\n",
"2579: [D loss: 0.702741, acc: 0.525391] [A loss: 0.917185, acc: 0.121094]\n",
"2580: [D loss: 0.687817, acc: 0.566406] [A loss: 0.723794, acc: 0.457031]\n",
"2581: [D loss: 0.708257, acc: 0.517578] [A loss: 0.826376, acc: 0.265625]\n",
"2582: [D loss: 0.692123, acc: 0.529297] [A loss: 0.865682, acc: 0.210938]\n",
"2583: [D loss: 0.691490, acc: 0.531250] [A loss: 0.741225, acc: 0.390625]\n",
"2584: [D loss: 0.702379, acc: 0.523438] [A loss: 0.883209, acc: 0.179688]\n",
"2585: [D loss: 0.693700, acc: 0.535156] [A loss: 0.826940, acc: 0.210938]\n",
"2586: [D loss: 0.696941, acc: 0.544922] [A loss: 0.865349, acc: 0.199219]\n",
"2587: [D loss: 0.691548, acc: 0.544922] [A loss: 0.723650, acc: 0.421875]\n",
"2588: [D loss: 0.716507, acc: 0.519531] [A loss: 1.013957, acc: 0.085938]\n",
"2589: [D loss: 0.687227, acc: 0.539062] [A loss: 0.699346, acc: 0.523438]\n",
"2590: [D loss: 0.714948, acc: 0.519531] [A loss: 0.932413, acc: 0.121094]\n",
"2591: [D loss: 0.699835, acc: 0.546875] [A loss: 0.751086, acc: 0.359375]\n",
"2592: [D loss: 0.713229, acc: 0.500000] [A loss: 0.878810, acc: 0.132812]\n",
"2593: [D loss: 0.678562, acc: 0.568359] [A loss: 0.789848, acc: 0.281250]\n",
"2594: [D loss: 0.696091, acc: 0.542969] [A loss: 0.896251, acc: 0.179688]\n",
"2595: [D loss: 0.715417, acc: 0.478516] [A loss: 0.801490, acc: 0.273438]\n",
"2596: [D loss: 0.697936, acc: 0.525391] [A loss: 0.858946, acc: 0.207031]\n",
"2597: [D loss: 0.675539, acc: 0.599609] [A loss: 0.810263, acc: 0.269531]\n",
"2598: [D loss: 0.697097, acc: 0.554688] [A loss: 0.835580, acc: 0.214844]\n",
"2599: [D loss: 0.688210, acc: 0.529297] [A loss: 0.814129, acc: 0.265625]\n",
"2600: [D loss: 0.692747, acc: 0.531250] [A loss: 0.924277, acc: 0.125000]\n",
"2601: [D loss: 0.698955, acc: 0.542969] [A loss: 0.769438, acc: 0.335938]\n",
"2602: [D loss: 0.680538, acc: 0.537109] [A loss: 0.897263, acc: 0.156250]\n",
"2603: [D loss: 0.681688, acc: 0.556641] [A loss: 0.752685, acc: 0.386719]\n",
"2604: [D loss: 0.707093, acc: 0.548828] [A loss: 0.945055, acc: 0.109375]\n",
"2605: [D loss: 0.675639, acc: 0.580078] [A loss: 0.767742, acc: 0.371094]\n",
"2606: [D loss: 0.706637, acc: 0.517578] [A loss: 0.944900, acc: 0.164062]\n",
"2607: [D loss: 0.702925, acc: 0.494141] [A loss: 0.705680, acc: 0.492188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2608: [D loss: 0.714671, acc: 0.507812] [A loss: 0.923822, acc: 0.132812]\n",
"2609: [D loss: 0.682379, acc: 0.550781] [A loss: 0.747975, acc: 0.410156]\n",
"2610: [D loss: 0.693952, acc: 0.550781] [A loss: 0.864434, acc: 0.183594]\n",
"2611: [D loss: 0.694050, acc: 0.513672] [A loss: 0.772660, acc: 0.378906]\n",
"2612: [D loss: 0.701860, acc: 0.505859] [A loss: 0.912088, acc: 0.144531]\n",
"2613: [D loss: 0.700968, acc: 0.527344] [A loss: 0.824086, acc: 0.285156]\n",
"2614: [D loss: 0.700857, acc: 0.531250] [A loss: 0.889979, acc: 0.128906]\n",
"2615: [D loss: 0.694975, acc: 0.542969] [A loss: 0.771380, acc: 0.359375]\n",
"2616: [D loss: 0.693680, acc: 0.535156] [A loss: 0.908810, acc: 0.171875]\n",
"2617: [D loss: 0.688262, acc: 0.564453] [A loss: 0.788952, acc: 0.304688]\n",
"2618: [D loss: 0.706737, acc: 0.531250] [A loss: 0.894777, acc: 0.144531]\n",
"2619: [D loss: 0.693635, acc: 0.542969] [A loss: 0.757889, acc: 0.390625]\n",
"2620: [D loss: 0.711056, acc: 0.505859] [A loss: 0.887297, acc: 0.175781]\n",
"2621: [D loss: 0.701065, acc: 0.531250] [A loss: 0.790537, acc: 0.328125]\n",
"2622: [D loss: 0.703629, acc: 0.542969] [A loss: 0.981291, acc: 0.070312]\n",
"2623: [D loss: 0.690762, acc: 0.546875] [A loss: 0.701571, acc: 0.503906]\n",
"2624: [D loss: 0.707960, acc: 0.519531] [A loss: 0.989792, acc: 0.070312]\n",
"2625: [D loss: 0.694417, acc: 0.523438] [A loss: 0.737406, acc: 0.402344]\n",
"2626: [D loss: 0.697181, acc: 0.531250] [A loss: 0.862445, acc: 0.183594]\n",
"2627: [D loss: 0.707030, acc: 0.496094] [A loss: 0.814532, acc: 0.246094]\n",
"2628: [D loss: 0.696745, acc: 0.529297] [A loss: 0.788352, acc: 0.328125]\n",
"2629: [D loss: 0.700427, acc: 0.507812] [A loss: 0.884647, acc: 0.144531]\n",
"2630: [D loss: 0.693750, acc: 0.529297] [A loss: 0.837089, acc: 0.257812]\n",
"2631: [D loss: 0.696251, acc: 0.535156] [A loss: 0.847446, acc: 0.234375]\n",
"2632: [D loss: 0.686437, acc: 0.539062] [A loss: 0.817309, acc: 0.281250]\n",
"2633: [D loss: 0.687783, acc: 0.529297] [A loss: 0.855476, acc: 0.187500]\n",
"2634: [D loss: 0.700316, acc: 0.513672] [A loss: 0.851239, acc: 0.203125]\n",
"2635: [D loss: 0.684740, acc: 0.548828] [A loss: 0.854928, acc: 0.234375]\n",
"2636: [D loss: 0.697862, acc: 0.539062] [A loss: 0.865786, acc: 0.218750]\n",
"2637: [D loss: 0.689090, acc: 0.541016] [A loss: 0.841828, acc: 0.242188]\n",
"2638: [D loss: 0.692532, acc: 0.542969] [A loss: 0.810285, acc: 0.277344]\n",
"2639: [D loss: 0.703834, acc: 0.507812] [A loss: 0.872287, acc: 0.214844]\n",
"2640: [D loss: 0.695776, acc: 0.546875] [A loss: 0.805278, acc: 0.281250]\n",
"2641: [D loss: 0.692187, acc: 0.542969] [A loss: 0.868696, acc: 0.199219]\n",
"2642: [D loss: 0.693309, acc: 0.541016] [A loss: 0.872879, acc: 0.160156]\n",
"2643: [D loss: 0.713607, acc: 0.505859] [A loss: 0.866715, acc: 0.214844]\n",
"2644: [D loss: 0.684107, acc: 0.593750] [A loss: 0.802376, acc: 0.292969]\n",
"2645: [D loss: 0.703441, acc: 0.548828] [A loss: 0.949215, acc: 0.121094]\n",
"2646: [D loss: 0.684678, acc: 0.578125] [A loss: 0.725638, acc: 0.457031]\n",
"2647: [D loss: 0.705291, acc: 0.505859] [A loss: 0.983820, acc: 0.101562]\n",
"2648: [D loss: 0.694488, acc: 0.554688] [A loss: 0.658047, acc: 0.636719]\n",
"2649: [D loss: 0.758851, acc: 0.513672] [A loss: 1.032478, acc: 0.070312]\n",
"2650: [D loss: 0.718817, acc: 0.500000] [A loss: 0.713125, acc: 0.484375]\n",
"2651: [D loss: 0.715689, acc: 0.509766] [A loss: 0.831099, acc: 0.234375]\n",
"2652: [D loss: 0.705719, acc: 0.511719] [A loss: 0.799962, acc: 0.296875]\n",
"2653: [D loss: 0.708158, acc: 0.500000] [A loss: 0.862503, acc: 0.171875]\n",
"2654: [D loss: 0.691866, acc: 0.531250] [A loss: 0.767508, acc: 0.328125]\n",
"2655: [D loss: 0.694718, acc: 0.546875] [A loss: 0.863517, acc: 0.179688]\n",
"2656: [D loss: 0.697733, acc: 0.509766] [A loss: 0.778129, acc: 0.308594]\n",
"2657: [D loss: 0.692578, acc: 0.521484] [A loss: 0.887482, acc: 0.144531]\n",
"2658: [D loss: 0.681050, acc: 0.546875] [A loss: 0.751922, acc: 0.363281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2659: [D loss: 0.703244, acc: 0.537109] [A loss: 0.882999, acc: 0.203125]\n",
"2660: [D loss: 0.682624, acc: 0.548828] [A loss: 0.799685, acc: 0.265625]\n",
"2661: [D loss: 0.703786, acc: 0.523438] [A loss: 0.908939, acc: 0.195312]\n",
"2662: [D loss: 0.689839, acc: 0.531250] [A loss: 0.811740, acc: 0.234375]\n",
"2663: [D loss: 0.691924, acc: 0.519531] [A loss: 0.866206, acc: 0.179688]\n",
"2664: [D loss: 0.682538, acc: 0.548828] [A loss: 0.806128, acc: 0.269531]\n",
"2665: [D loss: 0.704179, acc: 0.509766] [A loss: 0.952984, acc: 0.117188]\n",
"2666: [D loss: 0.696527, acc: 0.556641] [A loss: 0.744990, acc: 0.382812]\n",
"2667: [D loss: 0.694851, acc: 0.564453] [A loss: 0.888882, acc: 0.160156]\n",
"2668: [D loss: 0.692698, acc: 0.535156] [A loss: 0.827078, acc: 0.250000]\n",
"2669: [D loss: 0.692780, acc: 0.544922] [A loss: 0.862425, acc: 0.250000]\n",
"2670: [D loss: 0.706290, acc: 0.503906] [A loss: 0.915411, acc: 0.187500]\n",
"2671: [D loss: 0.714731, acc: 0.494141] [A loss: 0.812406, acc: 0.253906]\n",
"2672: [D loss: 0.709899, acc: 0.480469] [A loss: 0.911301, acc: 0.136719]\n",
"2673: [D loss: 0.705594, acc: 0.517578] [A loss: 0.774145, acc: 0.359375]\n",
"2674: [D loss: 0.704314, acc: 0.511719] [A loss: 0.921012, acc: 0.144531]\n",
"2675: [D loss: 0.697831, acc: 0.529297] [A loss: 0.865432, acc: 0.187500]\n",
"2676: [D loss: 0.695709, acc: 0.548828] [A loss: 0.841289, acc: 0.226562]\n",
"2677: [D loss: 0.681942, acc: 0.552734] [A loss: 0.842860, acc: 0.242188]\n",
"2678: [D loss: 0.680447, acc: 0.570312] [A loss: 0.814965, acc: 0.253906]\n",
"2679: [D loss: 0.716143, acc: 0.490234] [A loss: 0.827456, acc: 0.265625]\n",
"2680: [D loss: 0.675437, acc: 0.589844] [A loss: 0.853223, acc: 0.199219]\n",
"2681: [D loss: 0.699452, acc: 0.515625] [A loss: 0.837184, acc: 0.230469]\n",
"2682: [D loss: 0.674529, acc: 0.582031] [A loss: 0.888082, acc: 0.171875]\n",
"2683: [D loss: 0.693660, acc: 0.554688] [A loss: 0.807676, acc: 0.257812]\n",
"2684: [D loss: 0.702801, acc: 0.535156] [A loss: 0.912060, acc: 0.164062]\n",
"2685: [D loss: 0.689159, acc: 0.552734] [A loss: 0.733910, acc: 0.417969]\n",
"2686: [D loss: 0.707603, acc: 0.533203] [A loss: 0.976903, acc: 0.085938]\n",
"2687: [D loss: 0.678432, acc: 0.582031] [A loss: 0.682572, acc: 0.558594]\n",
"2688: [D loss: 0.714183, acc: 0.513672] [A loss: 0.919766, acc: 0.113281]\n",
"2689: [D loss: 0.700904, acc: 0.546875] [A loss: 0.734203, acc: 0.417969]\n",
"2690: [D loss: 0.696709, acc: 0.541016] [A loss: 0.902547, acc: 0.144531]\n",
"2691: [D loss: 0.684710, acc: 0.541016] [A loss: 0.765296, acc: 0.343750]\n",
"2692: [D loss: 0.699432, acc: 0.544922] [A loss: 0.944345, acc: 0.167969]\n",
"2693: [D loss: 0.703089, acc: 0.519531] [A loss: 0.725446, acc: 0.437500]\n",
"2694: [D loss: 0.705136, acc: 0.529297] [A loss: 1.000546, acc: 0.085938]\n",
"2695: [D loss: 0.693283, acc: 0.521484] [A loss: 0.710124, acc: 0.527344]\n",
"2696: [D loss: 0.709331, acc: 0.521484] [A loss: 0.976755, acc: 0.117188]\n",
"2697: [D loss: 0.695702, acc: 0.542969] [A loss: 0.755660, acc: 0.351562]\n",
"2698: [D loss: 0.704285, acc: 0.542969] [A loss: 0.916002, acc: 0.136719]\n",
"2699: [D loss: 0.698317, acc: 0.521484] [A loss: 0.785820, acc: 0.285156]\n",
"2700: [D loss: 0.688899, acc: 0.564453] [A loss: 0.835370, acc: 0.250000]\n",
"2701: [D loss: 0.687702, acc: 0.525391] [A loss: 0.865488, acc: 0.191406]\n",
"2702: [D loss: 0.696101, acc: 0.513672] [A loss: 0.898802, acc: 0.128906]\n",
"2703: [D loss: 0.686872, acc: 0.541016] [A loss: 0.797939, acc: 0.316406]\n",
"2704: [D loss: 0.695668, acc: 0.539062] [A loss: 0.928419, acc: 0.152344]\n",
"2705: [D loss: 0.689018, acc: 0.535156] [A loss: 0.767545, acc: 0.355469]\n",
"2706: [D loss: 0.698462, acc: 0.539062] [A loss: 0.894448, acc: 0.156250]\n",
"2707: [D loss: 0.698018, acc: 0.515625] [A loss: 0.803269, acc: 0.308594]\n",
"2708: [D loss: 0.720095, acc: 0.503906] [A loss: 0.985285, acc: 0.093750]\n",
"2709: [D loss: 0.690620, acc: 0.568359] [A loss: 0.672693, acc: 0.554688]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2710: [D loss: 0.721831, acc: 0.494141] [A loss: 0.941926, acc: 0.109375]\n",
"2711: [D loss: 0.706393, acc: 0.498047] [A loss: 0.781433, acc: 0.339844]\n",
"2712: [D loss: 0.708199, acc: 0.500000] [A loss: 0.894551, acc: 0.171875]\n",
"2713: [D loss: 0.704962, acc: 0.515625] [A loss: 0.791719, acc: 0.304688]\n",
"2714: [D loss: 0.703287, acc: 0.525391] [A loss: 0.847596, acc: 0.214844]\n",
"2715: [D loss: 0.694447, acc: 0.523438] [A loss: 0.780899, acc: 0.320312]\n",
"2716: [D loss: 0.687952, acc: 0.550781] [A loss: 0.856080, acc: 0.203125]\n",
"2717: [D loss: 0.704610, acc: 0.523438] [A loss: 0.925112, acc: 0.148438]\n",
"2718: [D loss: 0.689144, acc: 0.537109] [A loss: 0.759465, acc: 0.351562]\n",
"2719: [D loss: 0.698861, acc: 0.535156] [A loss: 0.918604, acc: 0.109375]\n",
"2720: [D loss: 0.704759, acc: 0.521484] [A loss: 0.733419, acc: 0.441406]\n",
"2721: [D loss: 0.706322, acc: 0.519531] [A loss: 0.913476, acc: 0.160156]\n",
"2722: [D loss: 0.688608, acc: 0.546875] [A loss: 0.701335, acc: 0.457031]\n",
"2723: [D loss: 0.715234, acc: 0.515625] [A loss: 0.942025, acc: 0.105469]\n",
"2724: [D loss: 0.701785, acc: 0.531250] [A loss: 0.804947, acc: 0.300781]\n",
"2725: [D loss: 0.699393, acc: 0.519531] [A loss: 0.787595, acc: 0.332031]\n",
"2726: [D loss: 0.696050, acc: 0.533203] [A loss: 0.767254, acc: 0.367188]\n",
"2727: [D loss: 0.708106, acc: 0.519531] [A loss: 0.924023, acc: 0.136719]\n",
"2728: [D loss: 0.678080, acc: 0.552734] [A loss: 0.761591, acc: 0.394531]\n",
"2729: [D loss: 0.707250, acc: 0.541016] [A loss: 0.940004, acc: 0.140625]\n",
"2730: [D loss: 0.696657, acc: 0.509766] [A loss: 0.722386, acc: 0.414062]\n",
"2731: [D loss: 0.729984, acc: 0.482422] [A loss: 0.893392, acc: 0.167969]\n",
"2732: [D loss: 0.672117, acc: 0.560547] [A loss: 0.777168, acc: 0.339844]\n",
"2733: [D loss: 0.718240, acc: 0.515625] [A loss: 0.982205, acc: 0.074219]\n",
"2734: [D loss: 0.709524, acc: 0.515625] [A loss: 0.702442, acc: 0.500000]\n",
"2735: [D loss: 0.719073, acc: 0.509766] [A loss: 0.944142, acc: 0.125000]\n",
"2736: [D loss: 0.678495, acc: 0.568359] [A loss: 0.776978, acc: 0.324219]\n",
"2737: [D loss: 0.696078, acc: 0.558594] [A loss: 0.838973, acc: 0.246094]\n",
"2738: [D loss: 0.691123, acc: 0.541016] [A loss: 0.805801, acc: 0.292969]\n",
"2739: [D loss: 0.715083, acc: 0.513672] [A loss: 0.867933, acc: 0.187500]\n",
"2740: [D loss: 0.684989, acc: 0.542969] [A loss: 0.796091, acc: 0.304688]\n",
"2741: [D loss: 0.697149, acc: 0.507812] [A loss: 0.814723, acc: 0.281250]\n",
"2742: [D loss: 0.694428, acc: 0.552734] [A loss: 0.801892, acc: 0.281250]\n",
"2743: [D loss: 0.717766, acc: 0.503906] [A loss: 0.833390, acc: 0.238281]\n",
"2744: [D loss: 0.697445, acc: 0.525391] [A loss: 0.814477, acc: 0.269531]\n",
"2745: [D loss: 0.713424, acc: 0.498047] [A loss: 0.899491, acc: 0.128906]\n",
"2746: [D loss: 0.698953, acc: 0.525391] [A loss: 0.833442, acc: 0.214844]\n",
"2747: [D loss: 0.699573, acc: 0.503906] [A loss: 0.880524, acc: 0.179688]\n",
"2748: [D loss: 0.691767, acc: 0.531250] [A loss: 0.806093, acc: 0.292969]\n",
"2749: [D loss: 0.680913, acc: 0.574219] [A loss: 0.910731, acc: 0.175781]\n",
"2750: [D loss: 0.686721, acc: 0.568359] [A loss: 0.766966, acc: 0.363281]\n",
"2751: [D loss: 0.710936, acc: 0.507812] [A loss: 1.026409, acc: 0.074219]\n",
"2752: [D loss: 0.693091, acc: 0.556641] [A loss: 0.659154, acc: 0.582031]\n",
"2753: [D loss: 0.739393, acc: 0.503906] [A loss: 0.879570, acc: 0.183594]\n",
"2754: [D loss: 0.692342, acc: 0.507812] [A loss: 0.840271, acc: 0.238281]\n",
"2755: [D loss: 0.725980, acc: 0.498047] [A loss: 0.892944, acc: 0.136719]\n",
"2756: [D loss: 0.692897, acc: 0.556641] [A loss: 0.755376, acc: 0.375000]\n",
"2757: [D loss: 0.691166, acc: 0.517578] [A loss: 0.926985, acc: 0.148438]\n",
"2758: [D loss: 0.695347, acc: 0.521484] [A loss: 0.727305, acc: 0.464844]\n",
"2759: [D loss: 0.722853, acc: 0.531250] [A loss: 1.044084, acc: 0.078125]\n",
"2760: [D loss: 0.704932, acc: 0.505859] [A loss: 0.713705, acc: 0.476562]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2761: [D loss: 0.712920, acc: 0.509766] [A loss: 0.918482, acc: 0.152344]\n",
"2762: [D loss: 0.698735, acc: 0.523438] [A loss: 0.756583, acc: 0.367188]\n",
"2763: [D loss: 0.720329, acc: 0.488281] [A loss: 0.889411, acc: 0.183594]\n",
"2764: [D loss: 0.703405, acc: 0.507812] [A loss: 0.754708, acc: 0.378906]\n",
"2765: [D loss: 0.716457, acc: 0.496094] [A loss: 0.859241, acc: 0.207031]\n",
"2766: [D loss: 0.703037, acc: 0.517578] [A loss: 0.825521, acc: 0.273438]\n",
"2767: [D loss: 0.691293, acc: 0.544922] [A loss: 0.790517, acc: 0.257812]\n",
"2768: [D loss: 0.691460, acc: 0.527344] [A loss: 0.848990, acc: 0.214844]\n",
"2769: [D loss: 0.694625, acc: 0.527344] [A loss: 0.833597, acc: 0.230469]\n",
"2770: [D loss: 0.696594, acc: 0.517578] [A loss: 0.836014, acc: 0.246094]\n",
"2771: [D loss: 0.693917, acc: 0.527344] [A loss: 0.822098, acc: 0.269531]\n",
"2772: [D loss: 0.699135, acc: 0.513672] [A loss: 0.816404, acc: 0.304688]\n",
"2773: [D loss: 0.694085, acc: 0.544922] [A loss: 0.799883, acc: 0.320312]\n",
"2774: [D loss: 0.697164, acc: 0.515625] [A loss: 0.796348, acc: 0.312500]\n",
"2775: [D loss: 0.692861, acc: 0.564453] [A loss: 0.825675, acc: 0.269531]\n",
"2776: [D loss: 0.694866, acc: 0.527344] [A loss: 0.874121, acc: 0.179688]\n",
"2777: [D loss: 0.686811, acc: 0.535156] [A loss: 0.887340, acc: 0.191406]\n",
"2778: [D loss: 0.698510, acc: 0.482422] [A loss: 0.829497, acc: 0.273438]\n",
"2779: [D loss: 0.707781, acc: 0.523438] [A loss: 0.876271, acc: 0.187500]\n",
"2780: [D loss: 0.696360, acc: 0.556641] [A loss: 0.927027, acc: 0.136719]\n",
"2781: [D loss: 0.688515, acc: 0.560547] [A loss: 0.767985, acc: 0.359375]\n",
"2782: [D loss: 0.712596, acc: 0.496094] [A loss: 1.096957, acc: 0.046875]\n",
"2783: [D loss: 0.687933, acc: 0.539062] [A loss: 0.677299, acc: 0.550781]\n",
"2784: [D loss: 0.731486, acc: 0.515625] [A loss: 0.973029, acc: 0.097656]\n",
"2785: [D loss: 0.690178, acc: 0.550781] [A loss: 0.687421, acc: 0.531250]\n",
"2786: [D loss: 0.730015, acc: 0.503906] [A loss: 1.030843, acc: 0.039062]\n",
"2787: [D loss: 0.697688, acc: 0.537109] [A loss: 0.692598, acc: 0.468750]\n",
"2788: [D loss: 0.708758, acc: 0.517578] [A loss: 0.878517, acc: 0.167969]\n",
"2789: [D loss: 0.698812, acc: 0.529297] [A loss: 0.735602, acc: 0.460938]\n",
"2790: [D loss: 0.715857, acc: 0.503906] [A loss: 0.871625, acc: 0.199219]\n",
"2791: [D loss: 0.683022, acc: 0.556641] [A loss: 0.797705, acc: 0.289062]\n",
"2792: [D loss: 0.692633, acc: 0.539062] [A loss: 0.772596, acc: 0.351562]\n",
"2793: [D loss: 0.721605, acc: 0.474609] [A loss: 0.846280, acc: 0.238281]\n",
"2794: [D loss: 0.679884, acc: 0.570312] [A loss: 0.834993, acc: 0.253906]\n",
"2795: [D loss: 0.688478, acc: 0.548828] [A loss: 0.786733, acc: 0.316406]\n",
"2796: [D loss: 0.708478, acc: 0.501953] [A loss: 0.863644, acc: 0.218750]\n",
"2797: [D loss: 0.694543, acc: 0.519531] [A loss: 0.811639, acc: 0.277344]\n",
"2798: [D loss: 0.682244, acc: 0.544922] [A loss: 0.783185, acc: 0.312500]\n",
"2799: [D loss: 0.711116, acc: 0.507812] [A loss: 0.824002, acc: 0.242188]\n",
"2800: [D loss: 0.706901, acc: 0.519531] [A loss: 0.840885, acc: 0.226562]\n",
"2801: [D loss: 0.704568, acc: 0.519531] [A loss: 0.860361, acc: 0.246094]\n",
"2802: [D loss: 0.701936, acc: 0.505859] [A loss: 0.838010, acc: 0.250000]\n",
"2803: [D loss: 0.693704, acc: 0.572266] [A loss: 0.880042, acc: 0.195312]\n",
"2804: [D loss: 0.687926, acc: 0.560547] [A loss: 0.838847, acc: 0.199219]\n",
"2805: [D loss: 0.710321, acc: 0.507812] [A loss: 0.861795, acc: 0.214844]\n",
"2806: [D loss: 0.697746, acc: 0.539062] [A loss: 0.846067, acc: 0.222656]\n",
"2807: [D loss: 0.699635, acc: 0.523438] [A loss: 0.827977, acc: 0.308594]\n",
"2808: [D loss: 0.705069, acc: 0.513672] [A loss: 0.901155, acc: 0.167969]\n",
"2809: [D loss: 0.694122, acc: 0.535156] [A loss: 0.771596, acc: 0.339844]\n",
"2810: [D loss: 0.709126, acc: 0.525391] [A loss: 0.991882, acc: 0.070312]\n",
"2811: [D loss: 0.689174, acc: 0.544922] [A loss: 0.700268, acc: 0.515625]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2812: [D loss: 0.715851, acc: 0.517578] [A loss: 0.994947, acc: 0.109375]\n",
"2813: [D loss: 0.703146, acc: 0.513672] [A loss: 0.748118, acc: 0.410156]\n",
"2814: [D loss: 0.708010, acc: 0.539062] [A loss: 0.893235, acc: 0.152344]\n",
"2815: [D loss: 0.709835, acc: 0.496094] [A loss: 0.765859, acc: 0.347656]\n",
"2816: [D loss: 0.699163, acc: 0.552734] [A loss: 0.904809, acc: 0.152344]\n",
"2817: [D loss: 0.686345, acc: 0.544922] [A loss: 0.795461, acc: 0.308594]\n",
"2818: [D loss: 0.710669, acc: 0.501953] [A loss: 0.852306, acc: 0.203125]\n",
"2819: [D loss: 0.700851, acc: 0.525391] [A loss: 0.761289, acc: 0.375000]\n",
"2820: [D loss: 0.693840, acc: 0.550781] [A loss: 0.939975, acc: 0.125000]\n",
"2821: [D loss: 0.705647, acc: 0.496094] [A loss: 0.744039, acc: 0.359375]\n",
"2822: [D loss: 0.712595, acc: 0.535156] [A loss: 0.934142, acc: 0.113281]\n",
"2823: [D loss: 0.708088, acc: 0.511719] [A loss: 0.729768, acc: 0.449219]\n",
"2824: [D loss: 0.723455, acc: 0.490234] [A loss: 0.965507, acc: 0.101562]\n",
"2825: [D loss: 0.695300, acc: 0.515625] [A loss: 0.697434, acc: 0.531250]\n",
"2826: [D loss: 0.717694, acc: 0.507812] [A loss: 0.927670, acc: 0.140625]\n",
"2827: [D loss: 0.690446, acc: 0.583984] [A loss: 0.752389, acc: 0.367188]\n",
"2828: [D loss: 0.703625, acc: 0.529297] [A loss: 0.860797, acc: 0.218750]\n",
"2829: [D loss: 0.708075, acc: 0.521484] [A loss: 0.736074, acc: 0.441406]\n",
"2830: [D loss: 0.703722, acc: 0.546875] [A loss: 0.852647, acc: 0.210938]\n",
"2831: [D loss: 0.689260, acc: 0.531250] [A loss: 0.816777, acc: 0.289062]\n",
"2832: [D loss: 0.691140, acc: 0.533203] [A loss: 0.860311, acc: 0.218750]\n",
"2833: [D loss: 0.688725, acc: 0.548828] [A loss: 0.859532, acc: 0.203125]\n",
"2834: [D loss: 0.688930, acc: 0.539062] [A loss: 0.787906, acc: 0.328125]\n",
"2835: [D loss: 0.690664, acc: 0.515625] [A loss: 0.807059, acc: 0.324219]\n",
"2836: [D loss: 0.706028, acc: 0.527344] [A loss: 0.853409, acc: 0.218750]\n",
"2837: [D loss: 0.681499, acc: 0.548828] [A loss: 0.767603, acc: 0.367188]\n",
"2838: [D loss: 0.713166, acc: 0.492188] [A loss: 0.959736, acc: 0.093750]\n",
"2839: [D loss: 0.685861, acc: 0.568359] [A loss: 0.763828, acc: 0.417969]\n",
"2840: [D loss: 0.724669, acc: 0.500000] [A loss: 0.941838, acc: 0.117188]\n",
"2841: [D loss: 0.682604, acc: 0.539062] [A loss: 0.746001, acc: 0.378906]\n",
"2842: [D loss: 0.697719, acc: 0.541016] [A loss: 1.043798, acc: 0.058594]\n",
"2843: [D loss: 0.696925, acc: 0.539062] [A loss: 0.668846, acc: 0.562500]\n",
"2844: [D loss: 0.726033, acc: 0.501953] [A loss: 1.021949, acc: 0.117188]\n",
"2845: [D loss: 0.705832, acc: 0.519531] [A loss: 0.695604, acc: 0.562500]\n",
"2846: [D loss: 0.715774, acc: 0.517578] [A loss: 0.906099, acc: 0.171875]\n",
"2847: [D loss: 0.691074, acc: 0.556641] [A loss: 0.751592, acc: 0.390625]\n",
"2848: [D loss: 0.702096, acc: 0.546875] [A loss: 0.852551, acc: 0.195312]\n",
"2849: [D loss: 0.690407, acc: 0.570312] [A loss: 0.783789, acc: 0.343750]\n",
"2850: [D loss: 0.717929, acc: 0.507812] [A loss: 0.875151, acc: 0.179688]\n",
"2851: [D loss: 0.687845, acc: 0.556641] [A loss: 0.794876, acc: 0.304688]\n",
"2852: [D loss: 0.702514, acc: 0.537109] [A loss: 0.862296, acc: 0.210938]\n",
"2853: [D loss: 0.699787, acc: 0.523438] [A loss: 0.763426, acc: 0.386719]\n",
"2854: [D loss: 0.702931, acc: 0.529297] [A loss: 0.882490, acc: 0.171875]\n",
"2855: [D loss: 0.703495, acc: 0.517578] [A loss: 0.781446, acc: 0.285156]\n",
"2856: [D loss: 0.717203, acc: 0.503906] [A loss: 0.898022, acc: 0.160156]\n",
"2857: [D loss: 0.683322, acc: 0.550781] [A loss: 0.800589, acc: 0.304688]\n",
"2858: [D loss: 0.695319, acc: 0.546875] [A loss: 0.822112, acc: 0.246094]\n",
"2859: [D loss: 0.704252, acc: 0.515625] [A loss: 0.812204, acc: 0.281250]\n",
"2860: [D loss: 0.693425, acc: 0.541016] [A loss: 0.819607, acc: 0.265625]\n",
"2861: [D loss: 0.687801, acc: 0.542969] [A loss: 0.884273, acc: 0.164062]\n",
"2862: [D loss: 0.688492, acc: 0.556641] [A loss: 0.851079, acc: 0.238281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2863: [D loss: 0.703885, acc: 0.527344] [A loss: 0.821462, acc: 0.269531]\n",
"2864: [D loss: 0.693781, acc: 0.537109] [A loss: 0.853057, acc: 0.195312]\n",
"2865: [D loss: 0.722968, acc: 0.500000] [A loss: 0.901862, acc: 0.191406]\n",
"2866: [D loss: 0.689926, acc: 0.539062] [A loss: 0.810465, acc: 0.292969]\n",
"2867: [D loss: 0.694049, acc: 0.535156] [A loss: 0.919110, acc: 0.179688]\n",
"2868: [D loss: 0.703237, acc: 0.492188] [A loss: 0.752821, acc: 0.398438]\n",
"2869: [D loss: 0.718682, acc: 0.527344] [A loss: 0.892317, acc: 0.187500]\n",
"2870: [D loss: 0.687469, acc: 0.568359] [A loss: 0.758303, acc: 0.335938]\n",
"2871: [D loss: 0.696796, acc: 0.542969] [A loss: 0.896391, acc: 0.160156]\n",
"2872: [D loss: 0.699861, acc: 0.515625] [A loss: 0.832737, acc: 0.281250]\n",
"2873: [D loss: 0.691109, acc: 0.552734] [A loss: 0.840872, acc: 0.234375]\n",
"2874: [D loss: 0.699165, acc: 0.519531] [A loss: 0.888492, acc: 0.183594]\n",
"2875: [D loss: 0.706159, acc: 0.511719] [A loss: 0.799924, acc: 0.273438]\n",
"2876: [D loss: 0.712708, acc: 0.500000] [A loss: 0.892855, acc: 0.140625]\n",
"2877: [D loss: 0.694670, acc: 0.533203] [A loss: 0.827563, acc: 0.285156]\n",
"2878: [D loss: 0.703794, acc: 0.531250] [A loss: 0.912654, acc: 0.160156]\n",
"2879: [D loss: 0.690987, acc: 0.552734] [A loss: 0.716574, acc: 0.457031]\n",
"2880: [D loss: 0.709142, acc: 0.533203] [A loss: 1.003371, acc: 0.089844]\n",
"2881: [D loss: 0.703642, acc: 0.511719] [A loss: 0.719135, acc: 0.464844]\n",
"2882: [D loss: 0.733799, acc: 0.531250] [A loss: 0.990522, acc: 0.113281]\n",
"2883: [D loss: 0.710063, acc: 0.515625] [A loss: 0.747863, acc: 0.355469]\n",
"2884: [D loss: 0.697731, acc: 0.552734] [A loss: 0.857888, acc: 0.218750]\n",
"2885: [D loss: 0.693760, acc: 0.554688] [A loss: 0.774937, acc: 0.312500]\n",
"2886: [D loss: 0.714879, acc: 0.484375] [A loss: 0.939883, acc: 0.097656]\n",
"2887: [D loss: 0.699735, acc: 0.511719] [A loss: 0.705745, acc: 0.464844]\n",
"2888: [D loss: 0.711561, acc: 0.511719] [A loss: 0.858733, acc: 0.214844]\n",
"2889: [D loss: 0.698503, acc: 0.501953] [A loss: 0.755563, acc: 0.371094]\n",
"2890: [D loss: 0.719128, acc: 0.472656] [A loss: 0.912058, acc: 0.156250]\n",
"2891: [D loss: 0.709785, acc: 0.505859] [A loss: 0.812564, acc: 0.261719]\n",
"2892: [D loss: 0.700535, acc: 0.546875] [A loss: 0.882439, acc: 0.167969]\n",
"2893: [D loss: 0.695443, acc: 0.542969] [A loss: 0.851538, acc: 0.238281]\n",
"2894: [D loss: 0.700095, acc: 0.535156] [A loss: 0.725005, acc: 0.468750]\n",
"2895: [D loss: 0.695721, acc: 0.539062] [A loss: 0.896069, acc: 0.175781]\n",
"2896: [D loss: 0.689420, acc: 0.554688] [A loss: 0.749109, acc: 0.441406]\n",
"2897: [D loss: 0.711901, acc: 0.525391] [A loss: 0.956749, acc: 0.105469]\n",
"2898: [D loss: 0.699124, acc: 0.523438] [A loss: 0.750611, acc: 0.375000]\n",
"2899: [D loss: 0.711864, acc: 0.521484] [A loss: 0.927244, acc: 0.128906]\n",
"2900: [D loss: 0.684670, acc: 0.578125] [A loss: 0.757147, acc: 0.386719]\n",
"2901: [D loss: 0.695543, acc: 0.537109] [A loss: 0.904435, acc: 0.175781]\n",
"2902: [D loss: 0.693858, acc: 0.541016] [A loss: 0.728280, acc: 0.457031]\n",
"2903: [D loss: 0.693244, acc: 0.539062] [A loss: 0.876268, acc: 0.167969]\n",
"2904: [D loss: 0.712965, acc: 0.474609] [A loss: 0.799662, acc: 0.300781]\n",
"2905: [D loss: 0.698659, acc: 0.525391] [A loss: 0.880875, acc: 0.218750]\n",
"2906: [D loss: 0.698660, acc: 0.507812] [A loss: 0.762633, acc: 0.343750]\n",
"2907: [D loss: 0.696656, acc: 0.537109] [A loss: 0.940430, acc: 0.105469]\n",
"2908: [D loss: 0.695368, acc: 0.505859] [A loss: 0.728558, acc: 0.457031]\n",
"2909: [D loss: 0.715716, acc: 0.501953] [A loss: 1.020293, acc: 0.085938]\n",
"2910: [D loss: 0.687892, acc: 0.521484] [A loss: 0.662065, acc: 0.613281]\n",
"2911: [D loss: 0.733958, acc: 0.501953] [A loss: 0.968283, acc: 0.082031]\n",
"2912: [D loss: 0.697212, acc: 0.519531] [A loss: 0.712561, acc: 0.468750]\n",
"2913: [D loss: 0.726366, acc: 0.511719] [A loss: 0.883301, acc: 0.183594]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2914: [D loss: 0.694335, acc: 0.533203] [A loss: 0.749806, acc: 0.371094]\n",
"2915: [D loss: 0.712837, acc: 0.521484] [A loss: 0.943806, acc: 0.128906]\n",
"2916: [D loss: 0.690987, acc: 0.537109] [A loss: 0.733877, acc: 0.433594]\n",
"2917: [D loss: 0.712053, acc: 0.541016] [A loss: 0.897714, acc: 0.136719]\n",
"2918: [D loss: 0.707338, acc: 0.519531] [A loss: 0.761081, acc: 0.363281]\n",
"2919: [D loss: 0.713592, acc: 0.496094] [A loss: 0.820589, acc: 0.210938]\n",
"2920: [D loss: 0.707207, acc: 0.501953] [A loss: 0.866791, acc: 0.210938]\n",
"2921: [D loss: 0.699968, acc: 0.515625] [A loss: 0.817466, acc: 0.246094]\n",
"2922: [D loss: 0.689575, acc: 0.527344] [A loss: 0.840770, acc: 0.238281]\n",
"2923: [D loss: 0.692632, acc: 0.552734] [A loss: 0.850496, acc: 0.222656]\n",
"2924: [D loss: 0.687222, acc: 0.574219] [A loss: 0.837237, acc: 0.285156]\n",
"2925: [D loss: 0.688698, acc: 0.560547] [A loss: 0.851894, acc: 0.261719]\n",
"2926: [D loss: 0.684864, acc: 0.550781] [A loss: 0.792983, acc: 0.320312]\n",
"2927: [D loss: 0.706469, acc: 0.527344] [A loss: 0.958254, acc: 0.121094]\n",
"2928: [D loss: 0.686793, acc: 0.566406] [A loss: 0.660432, acc: 0.597656]\n",
"2929: [D loss: 0.730386, acc: 0.525391] [A loss: 1.011847, acc: 0.070312]\n",
"2930: [D loss: 0.707849, acc: 0.511719] [A loss: 0.704586, acc: 0.445312]\n",
"2931: [D loss: 0.714190, acc: 0.519531] [A loss: 0.883325, acc: 0.144531]\n",
"2932: [D loss: 0.689533, acc: 0.531250] [A loss: 0.758285, acc: 0.367188]\n",
"2933: [D loss: 0.706094, acc: 0.517578] [A loss: 0.874443, acc: 0.226562]\n",
"2934: [D loss: 0.682699, acc: 0.533203] [A loss: 0.745424, acc: 0.429688]\n",
"2935: [D loss: 0.716392, acc: 0.503906] [A loss: 0.864354, acc: 0.214844]\n",
"2936: [D loss: 0.705745, acc: 0.542969] [A loss: 0.816367, acc: 0.261719]\n",
"2937: [D loss: 0.714665, acc: 0.511719] [A loss: 0.816965, acc: 0.246094]\n",
"2938: [D loss: 0.709403, acc: 0.505859] [A loss: 0.857587, acc: 0.179688]\n",
"2939: [D loss: 0.700739, acc: 0.515625] [A loss: 0.800734, acc: 0.300781]\n",
"2940: [D loss: 0.703392, acc: 0.519531] [A loss: 0.868245, acc: 0.191406]\n",
"2941: [D loss: 0.697340, acc: 0.517578] [A loss: 0.800528, acc: 0.292969]\n",
"2942: [D loss: 0.686852, acc: 0.564453] [A loss: 0.928722, acc: 0.121094]\n",
"2943: [D loss: 0.685331, acc: 0.560547] [A loss: 0.794844, acc: 0.335938]\n",
"2944: [D loss: 0.701762, acc: 0.529297] [A loss: 0.867432, acc: 0.179688]\n",
"2945: [D loss: 0.691022, acc: 0.550781] [A loss: 0.866222, acc: 0.253906]\n",
"2946: [D loss: 0.703174, acc: 0.539062] [A loss: 0.875847, acc: 0.199219]\n",
"2947: [D loss: 0.706709, acc: 0.517578] [A loss: 0.820718, acc: 0.253906]\n",
"2948: [D loss: 0.699146, acc: 0.544922] [A loss: 0.824807, acc: 0.261719]\n",
"2949: [D loss: 0.684298, acc: 0.542969] [A loss: 0.808838, acc: 0.296875]\n",
"2950: [D loss: 0.698345, acc: 0.523438] [A loss: 0.905515, acc: 0.144531]\n",
"2951: [D loss: 0.689704, acc: 0.552734] [A loss: 0.779430, acc: 0.339844]\n",
"2952: [D loss: 0.708390, acc: 0.513672] [A loss: 0.949730, acc: 0.105469]\n",
"2953: [D loss: 0.702991, acc: 0.501953] [A loss: 0.702475, acc: 0.511719]\n",
"2954: [D loss: 0.726728, acc: 0.521484] [A loss: 1.043992, acc: 0.062500]\n",
"2955: [D loss: 0.707563, acc: 0.494141] [A loss: 0.713124, acc: 0.496094]\n",
"2956: [D loss: 0.718590, acc: 0.521484] [A loss: 0.920801, acc: 0.132812]\n",
"2957: [D loss: 0.704098, acc: 0.498047] [A loss: 0.735881, acc: 0.417969]\n",
"2958: [D loss: 0.696828, acc: 0.562500] [A loss: 0.894297, acc: 0.164062]\n",
"2959: [D loss: 0.678388, acc: 0.578125] [A loss: 0.737707, acc: 0.410156]\n",
"2960: [D loss: 0.706974, acc: 0.542969] [A loss: 0.866932, acc: 0.167969]\n",
"2961: [D loss: 0.697908, acc: 0.519531] [A loss: 0.788440, acc: 0.308594]\n",
"2962: [D loss: 0.701789, acc: 0.529297] [A loss: 0.923925, acc: 0.136719]\n",
"2963: [D loss: 0.704615, acc: 0.537109] [A loss: 0.766239, acc: 0.359375]\n",
"2964: [D loss: 0.695473, acc: 0.542969] [A loss: 0.884110, acc: 0.164062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"2965: [D loss: 0.693761, acc: 0.525391] [A loss: 0.801204, acc: 0.269531]\n",
"2966: [D loss: 0.696410, acc: 0.535156] [A loss: 0.873187, acc: 0.183594]\n",
"2967: [D loss: 0.701425, acc: 0.531250] [A loss: 0.763043, acc: 0.375000]\n",
"2968: [D loss: 0.712169, acc: 0.503906] [A loss: 0.955999, acc: 0.136719]\n",
"2969: [D loss: 0.699004, acc: 0.494141] [A loss: 0.735431, acc: 0.398438]\n",
"2970: [D loss: 0.710456, acc: 0.503906] [A loss: 0.910172, acc: 0.156250]\n",
"2971: [D loss: 0.685185, acc: 0.556641] [A loss: 0.807708, acc: 0.292969]\n",
"2972: [D loss: 0.724338, acc: 0.486328] [A loss: 0.908381, acc: 0.152344]\n",
"2973: [D loss: 0.693254, acc: 0.523438] [A loss: 0.834141, acc: 0.230469]\n",
"2974: [D loss: 0.711029, acc: 0.500000] [A loss: 0.809763, acc: 0.281250]\n",
"2975: [D loss: 0.716204, acc: 0.488281] [A loss: 0.864604, acc: 0.195312]\n",
"2976: [D loss: 0.687157, acc: 0.560547] [A loss: 0.847435, acc: 0.207031]\n",
"2977: [D loss: 0.679713, acc: 0.568359] [A loss: 0.824985, acc: 0.292969]\n",
"2978: [D loss: 0.692988, acc: 0.533203] [A loss: 0.915962, acc: 0.152344]\n",
"2979: [D loss: 0.688379, acc: 0.552734] [A loss: 0.764510, acc: 0.339844]\n",
"2980: [D loss: 0.718941, acc: 0.515625] [A loss: 0.997909, acc: 0.078125]\n",
"2981: [D loss: 0.707056, acc: 0.498047] [A loss: 0.648260, acc: 0.636719]\n",
"2982: [D loss: 0.721430, acc: 0.525391] [A loss: 0.969411, acc: 0.117188]\n",
"2983: [D loss: 0.700384, acc: 0.509766] [A loss: 0.697679, acc: 0.519531]\n",
"2984: [D loss: 0.719873, acc: 0.505859] [A loss: 0.923338, acc: 0.105469]\n",
"2985: [D loss: 0.684804, acc: 0.564453] [A loss: 0.756183, acc: 0.367188]\n",
"2986: [D loss: 0.703065, acc: 0.539062] [A loss: 0.896513, acc: 0.175781]\n",
"2987: [D loss: 0.688246, acc: 0.535156] [A loss: 0.743964, acc: 0.437500]\n",
"2988: [D loss: 0.719676, acc: 0.513672] [A loss: 0.875332, acc: 0.199219]\n",
"2989: [D loss: 0.716068, acc: 0.496094] [A loss: 0.763166, acc: 0.324219]\n",
"2990: [D loss: 0.709079, acc: 0.513672] [A loss: 0.993427, acc: 0.054688]\n",
"2991: [D loss: 0.698250, acc: 0.517578] [A loss: 0.732915, acc: 0.464844]\n",
"2992: [D loss: 0.712105, acc: 0.552734] [A loss: 0.888811, acc: 0.179688]\n",
"2993: [D loss: 0.701498, acc: 0.498047] [A loss: 0.797453, acc: 0.308594]\n",
"2994: [D loss: 0.697538, acc: 0.509766] [A loss: 0.886331, acc: 0.175781]\n",
"2995: [D loss: 0.691224, acc: 0.541016] [A loss: 0.811007, acc: 0.277344]\n",
"2996: [D loss: 0.705216, acc: 0.517578] [A loss: 0.841244, acc: 0.242188]\n",
"2997: [D loss: 0.703915, acc: 0.523438] [A loss: 0.814942, acc: 0.261719]\n",
"2998: [D loss: 0.705722, acc: 0.533203] [A loss: 0.887531, acc: 0.160156]\n",
"2999: [D loss: 0.693295, acc: 0.537109] [A loss: 0.724695, acc: 0.421875]\n",
"3000: [D loss: 0.717284, acc: 0.496094] [A loss: 0.941258, acc: 0.140625]\n",
"3001: [D loss: 0.697317, acc: 0.533203] [A loss: 0.803824, acc: 0.296875]\n",
"3002: [D loss: 0.695328, acc: 0.525391] [A loss: 0.868428, acc: 0.195312]\n",
"3003: [D loss: 0.706794, acc: 0.507812] [A loss: 0.796914, acc: 0.281250]\n",
"3004: [D loss: 0.712948, acc: 0.527344] [A loss: 0.953248, acc: 0.136719]\n",
"3005: [D loss: 0.684769, acc: 0.541016] [A loss: 0.716121, acc: 0.449219]\n",
"3006: [D loss: 0.717104, acc: 0.501953] [A loss: 0.975770, acc: 0.105469]\n",
"3007: [D loss: 0.712228, acc: 0.498047] [A loss: 0.754453, acc: 0.398438]\n",
"3008: [D loss: 0.694980, acc: 0.537109] [A loss: 0.859933, acc: 0.195312]\n",
"3009: [D loss: 0.705815, acc: 0.496094] [A loss: 0.814169, acc: 0.277344]\n",
"3010: [D loss: 0.699469, acc: 0.519531] [A loss: 0.830572, acc: 0.238281]\n",
"3011: [D loss: 0.696739, acc: 0.511719] [A loss: 0.929095, acc: 0.128906]\n",
"3012: [D loss: 0.697500, acc: 0.501953] [A loss: 0.811055, acc: 0.300781]\n",
"3013: [D loss: 0.699381, acc: 0.521484] [A loss: 0.878841, acc: 0.195312]\n",
"3014: [D loss: 0.704731, acc: 0.513672] [A loss: 0.821603, acc: 0.253906]\n",
"3015: [D loss: 0.692469, acc: 0.533203] [A loss: 0.943291, acc: 0.117188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3016: [D loss: 0.706940, acc: 0.501953] [A loss: 0.789458, acc: 0.308594]\n",
"3017: [D loss: 0.703823, acc: 0.519531] [A loss: 0.989789, acc: 0.117188]\n",
"3018: [D loss: 0.695807, acc: 0.533203] [A loss: 0.672620, acc: 0.562500]\n",
"3019: [D loss: 0.721934, acc: 0.500000] [A loss: 1.034729, acc: 0.046875]\n",
"3020: [D loss: 0.695339, acc: 0.527344] [A loss: 0.652604, acc: 0.648438]\n",
"3021: [D loss: 0.720682, acc: 0.517578] [A loss: 0.984716, acc: 0.078125]\n",
"3022: [D loss: 0.694726, acc: 0.525391] [A loss: 0.685754, acc: 0.496094]\n",
"3023: [D loss: 0.726887, acc: 0.515625] [A loss: 0.906236, acc: 0.160156]\n",
"3024: [D loss: 0.683908, acc: 0.562500] [A loss: 0.794846, acc: 0.355469]\n",
"3025: [D loss: 0.703424, acc: 0.517578] [A loss: 0.952038, acc: 0.097656]\n",
"3026: [D loss: 0.713138, acc: 0.486328] [A loss: 0.716696, acc: 0.472656]\n",
"3027: [D loss: 0.718094, acc: 0.503906] [A loss: 0.898157, acc: 0.136719]\n",
"3028: [D loss: 0.698229, acc: 0.511719] [A loss: 0.754499, acc: 0.433594]\n",
"3029: [D loss: 0.702657, acc: 0.511719] [A loss: 0.934829, acc: 0.132812]\n",
"3030: [D loss: 0.686096, acc: 0.556641] [A loss: 0.757185, acc: 0.378906]\n",
"3031: [D loss: 0.701648, acc: 0.517578] [A loss: 0.906834, acc: 0.183594]\n",
"3032: [D loss: 0.685972, acc: 0.562500] [A loss: 0.821413, acc: 0.308594]\n",
"3033: [D loss: 0.682207, acc: 0.556641] [A loss: 0.849119, acc: 0.246094]\n",
"3034: [D loss: 0.703088, acc: 0.531250] [A loss: 0.786508, acc: 0.316406]\n",
"3035: [D loss: 0.707999, acc: 0.539062] [A loss: 0.903051, acc: 0.183594]\n",
"3036: [D loss: 0.692503, acc: 0.523438] [A loss: 0.785419, acc: 0.328125]\n",
"3037: [D loss: 0.706026, acc: 0.509766] [A loss: 0.894310, acc: 0.191406]\n",
"3038: [D loss: 0.698452, acc: 0.541016] [A loss: 0.808887, acc: 0.234375]\n",
"3039: [D loss: 0.703025, acc: 0.531250] [A loss: 0.880111, acc: 0.187500]\n",
"3040: [D loss: 0.673885, acc: 0.589844] [A loss: 0.819593, acc: 0.261719]\n",
"3041: [D loss: 0.705319, acc: 0.525391] [A loss: 0.846331, acc: 0.222656]\n",
"3042: [D loss: 0.694404, acc: 0.523438] [A loss: 0.874162, acc: 0.210938]\n",
"3043: [D loss: 0.705008, acc: 0.541016] [A loss: 0.811066, acc: 0.234375]\n",
"3044: [D loss: 0.706448, acc: 0.507812] [A loss: 0.895053, acc: 0.144531]\n",
"3045: [D loss: 0.690731, acc: 0.529297] [A loss: 0.758694, acc: 0.398438]\n",
"3046: [D loss: 0.706922, acc: 0.507812] [A loss: 1.057281, acc: 0.046875]\n",
"3047: [D loss: 0.693791, acc: 0.539062] [A loss: 0.701713, acc: 0.519531]\n",
"3048: [D loss: 0.710488, acc: 0.517578] [A loss: 0.956774, acc: 0.109375]\n",
"3049: [D loss: 0.687594, acc: 0.548828] [A loss: 0.705862, acc: 0.476562]\n",
"3050: [D loss: 0.707960, acc: 0.509766] [A loss: 0.909118, acc: 0.164062]\n",
"3051: [D loss: 0.697666, acc: 0.519531] [A loss: 0.778672, acc: 0.355469]\n",
"3052: [D loss: 0.698423, acc: 0.533203] [A loss: 0.852622, acc: 0.218750]\n",
"3053: [D loss: 0.707092, acc: 0.515625] [A loss: 0.850303, acc: 0.199219]\n",
"3054: [D loss: 0.689673, acc: 0.525391] [A loss: 0.838641, acc: 0.242188]\n",
"3055: [D loss: 0.689791, acc: 0.533203] [A loss: 0.890046, acc: 0.179688]\n",
"3056: [D loss: 0.699624, acc: 0.542969] [A loss: 0.860364, acc: 0.230469]\n",
"3057: [D loss: 0.697530, acc: 0.507812] [A loss: 0.830677, acc: 0.234375]\n",
"3058: [D loss: 0.711865, acc: 0.527344] [A loss: 0.872127, acc: 0.210938]\n",
"3059: [D loss: 0.694880, acc: 0.550781] [A loss: 0.776238, acc: 0.347656]\n",
"3060: [D loss: 0.711697, acc: 0.511719] [A loss: 1.065984, acc: 0.039062]\n",
"3061: [D loss: 0.703004, acc: 0.542969] [A loss: 0.670345, acc: 0.554688]\n",
"3062: [D loss: 0.704383, acc: 0.521484] [A loss: 0.932183, acc: 0.148438]\n",
"3063: [D loss: 0.698005, acc: 0.541016] [A loss: 0.767851, acc: 0.347656]\n",
"3064: [D loss: 0.710021, acc: 0.513672] [A loss: 0.925678, acc: 0.121094]\n",
"3065: [D loss: 0.692134, acc: 0.539062] [A loss: 0.761670, acc: 0.339844]\n",
"3066: [D loss: 0.700365, acc: 0.503906] [A loss: 0.883352, acc: 0.148438]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3067: [D loss: 0.707568, acc: 0.500000] [A loss: 0.817903, acc: 0.300781]\n",
"3068: [D loss: 0.702428, acc: 0.527344] [A loss: 0.852489, acc: 0.214844]\n",
"3069: [D loss: 0.691285, acc: 0.544922] [A loss: 0.851031, acc: 0.226562]\n",
"3070: [D loss: 0.696320, acc: 0.519531] [A loss: 0.825754, acc: 0.242188]\n",
"3071: [D loss: 0.698373, acc: 0.537109] [A loss: 0.865114, acc: 0.199219]\n",
"3072: [D loss: 0.696614, acc: 0.533203] [A loss: 0.829115, acc: 0.269531]\n",
"3073: [D loss: 0.706279, acc: 0.539062] [A loss: 0.849790, acc: 0.222656]\n",
"3074: [D loss: 0.697423, acc: 0.546875] [A loss: 0.858649, acc: 0.218750]\n",
"3075: [D loss: 0.695795, acc: 0.523438] [A loss: 0.799429, acc: 0.289062]\n",
"3076: [D loss: 0.699034, acc: 0.519531] [A loss: 0.952596, acc: 0.089844]\n",
"3077: [D loss: 0.703627, acc: 0.511719] [A loss: 0.753902, acc: 0.335938]\n",
"3078: [D loss: 0.728558, acc: 0.488281] [A loss: 1.080783, acc: 0.035156]\n",
"3079: [D loss: 0.695656, acc: 0.527344] [A loss: 0.654864, acc: 0.648438]\n",
"3080: [D loss: 0.733803, acc: 0.511719] [A loss: 1.062333, acc: 0.050781]\n",
"3081: [D loss: 0.694293, acc: 0.533203] [A loss: 0.699988, acc: 0.500000]\n",
"3082: [D loss: 0.716364, acc: 0.492188] [A loss: 0.923764, acc: 0.121094]\n",
"3083: [D loss: 0.692938, acc: 0.546875] [A loss: 0.746648, acc: 0.378906]\n",
"3084: [D loss: 0.702025, acc: 0.541016] [A loss: 0.904207, acc: 0.140625]\n",
"3085: [D loss: 0.689505, acc: 0.535156] [A loss: 0.724884, acc: 0.445312]\n",
"3086: [D loss: 0.717346, acc: 0.517578] [A loss: 0.898991, acc: 0.156250]\n",
"3087: [D loss: 0.693267, acc: 0.544922] [A loss: 0.757428, acc: 0.398438]\n",
"3088: [D loss: 0.697301, acc: 0.550781] [A loss: 0.906937, acc: 0.199219]\n",
"3089: [D loss: 0.692879, acc: 0.519531] [A loss: 0.801429, acc: 0.355469]\n",
"3090: [D loss: 0.730583, acc: 0.480469] [A loss: 0.998795, acc: 0.070312]\n",
"3091: [D loss: 0.701183, acc: 0.527344] [A loss: 0.788883, acc: 0.335938]\n",
"3092: [D loss: 0.711106, acc: 0.515625] [A loss: 0.917176, acc: 0.113281]\n",
"3093: [D loss: 0.693152, acc: 0.535156] [A loss: 0.791372, acc: 0.355469]\n",
"3094: [D loss: 0.712799, acc: 0.494141] [A loss: 0.837026, acc: 0.230469]\n",
"3095: [D loss: 0.705917, acc: 0.531250] [A loss: 0.837138, acc: 0.210938]\n",
"3096: [D loss: 0.696172, acc: 0.542969] [A loss: 0.867539, acc: 0.226562]\n",
"3097: [D loss: 0.700718, acc: 0.513672] [A loss: 0.844353, acc: 0.238281]\n",
"3098: [D loss: 0.719158, acc: 0.474609] [A loss: 0.889435, acc: 0.167969]\n",
"3099: [D loss: 0.698792, acc: 0.525391] [A loss: 0.869824, acc: 0.222656]\n",
"3100: [D loss: 0.701858, acc: 0.533203] [A loss: 0.900842, acc: 0.171875]\n",
"3101: [D loss: 0.713138, acc: 0.498047] [A loss: 0.855417, acc: 0.214844]\n",
"3102: [D loss: 0.684678, acc: 0.542969] [A loss: 0.849666, acc: 0.242188]\n",
"3103: [D loss: 0.702794, acc: 0.501953] [A loss: 0.864334, acc: 0.199219]\n",
"3104: [D loss: 0.710406, acc: 0.519531] [A loss: 0.887164, acc: 0.148438]\n",
"3105: [D loss: 0.699181, acc: 0.529297] [A loss: 0.804714, acc: 0.281250]\n",
"3106: [D loss: 0.712437, acc: 0.515625] [A loss: 1.061239, acc: 0.050781]\n",
"3107: [D loss: 0.704114, acc: 0.505859] [A loss: 0.635490, acc: 0.687500]\n",
"3108: [D loss: 0.745443, acc: 0.486328] [A loss: 1.119289, acc: 0.039062]\n",
"3109: [D loss: 0.704740, acc: 0.531250] [A loss: 0.644530, acc: 0.625000]\n",
"3110: [D loss: 0.745971, acc: 0.484375] [A loss: 0.922177, acc: 0.105469]\n",
"3111: [D loss: 0.687152, acc: 0.541016] [A loss: 0.761802, acc: 0.386719]\n",
"3112: [D loss: 0.711108, acc: 0.482422] [A loss: 0.826485, acc: 0.230469]\n",
"3113: [D loss: 0.708039, acc: 0.503906] [A loss: 0.845089, acc: 0.250000]\n",
"3114: [D loss: 0.699812, acc: 0.515625] [A loss: 0.814109, acc: 0.273438]\n",
"3115: [D loss: 0.702189, acc: 0.519531] [A loss: 0.878016, acc: 0.187500]\n",
"3116: [D loss: 0.690778, acc: 0.552734] [A loss: 0.818945, acc: 0.273438]\n",
"3117: [D loss: 0.701606, acc: 0.529297] [A loss: 0.879099, acc: 0.171875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3118: [D loss: 0.699335, acc: 0.507812] [A loss: 0.856511, acc: 0.218750]\n",
"3119: [D loss: 0.710691, acc: 0.494141] [A loss: 0.788282, acc: 0.296875]\n",
"3120: [D loss: 0.713629, acc: 0.507812] [A loss: 0.865374, acc: 0.195312]\n",
"3121: [D loss: 0.686740, acc: 0.578125] [A loss: 0.796772, acc: 0.308594]\n",
"3122: [D loss: 0.697018, acc: 0.503906] [A loss: 0.934300, acc: 0.152344]\n",
"3123: [D loss: 0.698489, acc: 0.490234] [A loss: 0.832336, acc: 0.265625]\n",
"3124: [D loss: 0.694479, acc: 0.554688] [A loss: 0.871823, acc: 0.191406]\n",
"3125: [D loss: 0.699589, acc: 0.539062] [A loss: 0.818477, acc: 0.292969]\n",
"3126: [D loss: 0.709554, acc: 0.531250] [A loss: 0.872650, acc: 0.191406]\n",
"3127: [D loss: 0.695283, acc: 0.535156] [A loss: 0.870062, acc: 0.195312]\n",
"3128: [D loss: 0.703785, acc: 0.558594] [A loss: 0.983370, acc: 0.078125]\n",
"3129: [D loss: 0.699034, acc: 0.527344] [A loss: 0.728801, acc: 0.453125]\n",
"3130: [D loss: 0.687273, acc: 0.562500] [A loss: 1.050438, acc: 0.062500]\n",
"3131: [D loss: 0.690751, acc: 0.539062] [A loss: 0.632525, acc: 0.648438]\n",
"3132: [D loss: 0.747274, acc: 0.509766] [A loss: 1.095396, acc: 0.035156]\n",
"3133: [D loss: 0.707291, acc: 0.515625] [A loss: 0.710581, acc: 0.480469]\n",
"3134: [D loss: 0.718481, acc: 0.521484] [A loss: 0.857193, acc: 0.191406]\n",
"3135: [D loss: 0.696042, acc: 0.548828] [A loss: 0.819822, acc: 0.277344]\n",
"3136: [D loss: 0.707176, acc: 0.515625] [A loss: 0.898475, acc: 0.175781]\n",
"3137: [D loss: 0.696213, acc: 0.529297] [A loss: 0.787105, acc: 0.332031]\n",
"3138: [D loss: 0.698550, acc: 0.535156] [A loss: 0.849694, acc: 0.203125]\n",
"3139: [D loss: 0.698894, acc: 0.507812] [A loss: 0.830377, acc: 0.214844]\n",
"3140: [D loss: 0.712771, acc: 0.486328] [A loss: 0.866242, acc: 0.191406]\n",
"3141: [D loss: 0.697149, acc: 0.531250] [A loss: 0.797257, acc: 0.289062]\n",
"3142: [D loss: 0.699950, acc: 0.509766] [A loss: 0.810791, acc: 0.304688]\n",
"3143: [D loss: 0.713170, acc: 0.500000] [A loss: 0.864548, acc: 0.257812]\n",
"3144: [D loss: 0.687443, acc: 0.550781] [A loss: 0.773631, acc: 0.339844]\n",
"3145: [D loss: 0.696282, acc: 0.517578] [A loss: 0.915015, acc: 0.144531]\n",
"3146: [D loss: 0.690464, acc: 0.556641] [A loss: 0.785375, acc: 0.328125]\n",
"3147: [D loss: 0.713437, acc: 0.492188] [A loss: 0.941288, acc: 0.128906]\n",
"3148: [D loss: 0.677444, acc: 0.564453] [A loss: 0.870627, acc: 0.199219]\n",
"3149: [D loss: 0.711561, acc: 0.492188] [A loss: 0.892494, acc: 0.160156]\n",
"3150: [D loss: 0.691914, acc: 0.533203] [A loss: 0.873359, acc: 0.222656]\n",
"3151: [D loss: 0.702902, acc: 0.548828] [A loss: 0.834787, acc: 0.261719]\n",
"3152: [D loss: 0.701065, acc: 0.527344] [A loss: 0.904554, acc: 0.160156]\n",
"3153: [D loss: 0.709358, acc: 0.490234] [A loss: 0.824394, acc: 0.261719]\n",
"3154: [D loss: 0.710420, acc: 0.511719] [A loss: 0.853343, acc: 0.207031]\n",
"3155: [D loss: 0.701952, acc: 0.525391] [A loss: 0.925731, acc: 0.136719]\n",
"3156: [D loss: 0.702824, acc: 0.496094] [A loss: 0.750541, acc: 0.421875]\n",
"3157: [D loss: 0.703951, acc: 0.533203] [A loss: 0.896294, acc: 0.175781]\n",
"3158: [D loss: 0.697654, acc: 0.537109] [A loss: 0.822084, acc: 0.238281]\n",
"3159: [D loss: 0.712835, acc: 0.498047] [A loss: 1.005021, acc: 0.089844]\n",
"3160: [D loss: 0.696740, acc: 0.531250] [A loss: 0.663278, acc: 0.609375]\n",
"3161: [D loss: 0.729460, acc: 0.500000] [A loss: 1.021737, acc: 0.074219]\n",
"3162: [D loss: 0.707091, acc: 0.527344] [A loss: 0.636951, acc: 0.660156]\n",
"3163: [D loss: 0.733846, acc: 0.513672] [A loss: 0.976649, acc: 0.101562]\n",
"3164: [D loss: 0.698556, acc: 0.498047] [A loss: 0.746320, acc: 0.445312]\n",
"3165: [D loss: 0.705337, acc: 0.515625] [A loss: 0.906824, acc: 0.144531]\n",
"3166: [D loss: 0.706644, acc: 0.511719] [A loss: 0.778180, acc: 0.347656]\n",
"3167: [D loss: 0.715085, acc: 0.505859] [A loss: 0.913440, acc: 0.148438]\n",
"3168: [D loss: 0.692106, acc: 0.542969] [A loss: 0.783126, acc: 0.335938]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3169: [D loss: 0.700840, acc: 0.539062] [A loss: 0.876822, acc: 0.164062]\n",
"3170: [D loss: 0.697307, acc: 0.523438] [A loss: 0.833904, acc: 0.289062]\n",
"3171: [D loss: 0.696819, acc: 0.513672] [A loss: 0.901818, acc: 0.152344]\n",
"3172: [D loss: 0.684433, acc: 0.550781] [A loss: 0.817792, acc: 0.265625]\n",
"3173: [D loss: 0.694010, acc: 0.535156] [A loss: 0.911394, acc: 0.136719]\n",
"3174: [D loss: 0.692148, acc: 0.531250] [A loss: 0.803229, acc: 0.296875]\n",
"3175: [D loss: 0.721020, acc: 0.488281] [A loss: 0.845323, acc: 0.250000]\n",
"3176: [D loss: 0.689752, acc: 0.531250] [A loss: 0.823541, acc: 0.277344]\n",
"3177: [D loss: 0.701844, acc: 0.513672] [A loss: 0.855937, acc: 0.210938]\n",
"3178: [D loss: 0.696865, acc: 0.548828] [A loss: 0.768215, acc: 0.378906]\n",
"3179: [D loss: 0.721125, acc: 0.501953] [A loss: 0.911323, acc: 0.144531]\n",
"3180: [D loss: 0.691633, acc: 0.552734] [A loss: 0.796588, acc: 0.332031]\n",
"3181: [D loss: 0.699362, acc: 0.539062] [A loss: 1.006989, acc: 0.070312]\n",
"3182: [D loss: 0.682604, acc: 0.554688] [A loss: 0.703624, acc: 0.519531]\n",
"3183: [D loss: 0.726564, acc: 0.511719] [A loss: 1.051761, acc: 0.054688]\n",
"3184: [D loss: 0.699308, acc: 0.550781] [A loss: 0.704781, acc: 0.496094]\n",
"3185: [D loss: 0.695296, acc: 0.541016] [A loss: 0.885314, acc: 0.218750]\n",
"3186: [D loss: 0.707039, acc: 0.525391] [A loss: 0.775464, acc: 0.324219]\n",
"3187: [D loss: 0.716095, acc: 0.509766] [A loss: 0.959431, acc: 0.093750]\n",
"3188: [D loss: 0.695213, acc: 0.550781] [A loss: 0.763247, acc: 0.355469]\n",
"3189: [D loss: 0.713825, acc: 0.525391] [A loss: 0.882012, acc: 0.203125]\n",
"3190: [D loss: 0.707407, acc: 0.498047] [A loss: 0.717569, acc: 0.437500]\n",
"3191: [D loss: 0.729395, acc: 0.527344] [A loss: 1.015062, acc: 0.082031]\n",
"3192: [D loss: 0.696298, acc: 0.529297] [A loss: 0.695760, acc: 0.511719]\n",
"3193: [D loss: 0.720278, acc: 0.509766] [A loss: 0.954231, acc: 0.085938]\n",
"3194: [D loss: 0.692285, acc: 0.523438] [A loss: 0.783920, acc: 0.378906]\n",
"3195: [D loss: 0.713249, acc: 0.500000] [A loss: 0.811659, acc: 0.238281]\n",
"3196: [D loss: 0.701124, acc: 0.494141] [A loss: 0.878298, acc: 0.179688]\n",
"3197: [D loss: 0.694896, acc: 0.527344] [A loss: 0.818040, acc: 0.281250]\n",
"3198: [D loss: 0.704366, acc: 0.498047] [A loss: 0.838963, acc: 0.250000]\n",
"3199: [D loss: 0.697435, acc: 0.531250] [A loss: 0.801337, acc: 0.281250]\n",
"3200: [D loss: 0.704412, acc: 0.523438] [A loss: 0.918410, acc: 0.156250]\n",
"3201: [D loss: 0.686216, acc: 0.529297] [A loss: 0.777234, acc: 0.359375]\n",
"3202: [D loss: 0.714731, acc: 0.486328] [A loss: 0.902047, acc: 0.167969]\n",
"3203: [D loss: 0.691755, acc: 0.535156] [A loss: 0.700519, acc: 0.492188]\n",
"3204: [D loss: 0.704215, acc: 0.541016] [A loss: 0.958547, acc: 0.101562]\n",
"3205: [D loss: 0.688684, acc: 0.542969] [A loss: 0.721751, acc: 0.449219]\n",
"3206: [D loss: 0.709711, acc: 0.507812] [A loss: 0.958695, acc: 0.152344]\n",
"3207: [D loss: 0.700889, acc: 0.501953] [A loss: 0.705189, acc: 0.488281]\n",
"3208: [D loss: 0.726916, acc: 0.494141] [A loss: 0.984040, acc: 0.074219]\n",
"3209: [D loss: 0.697935, acc: 0.542969] [A loss: 0.704902, acc: 0.535156]\n",
"3210: [D loss: 0.721274, acc: 0.525391] [A loss: 0.974548, acc: 0.125000]\n",
"3211: [D loss: 0.696793, acc: 0.511719] [A loss: 0.723496, acc: 0.437500]\n",
"3212: [D loss: 0.721078, acc: 0.501953] [A loss: 0.881023, acc: 0.164062]\n",
"3213: [D loss: 0.698374, acc: 0.511719] [A loss: 0.728122, acc: 0.433594]\n",
"3214: [D loss: 0.693924, acc: 0.533203] [A loss: 0.827881, acc: 0.257812]\n",
"3215: [D loss: 0.700921, acc: 0.505859] [A loss: 0.819846, acc: 0.253906]\n",
"3216: [D loss: 0.693773, acc: 0.544922] [A loss: 0.813545, acc: 0.253906]\n",
"3217: [D loss: 0.701816, acc: 0.515625] [A loss: 0.794974, acc: 0.300781]\n",
"3218: [D loss: 0.691113, acc: 0.542969] [A loss: 0.841200, acc: 0.273438]\n",
"3219: [D loss: 0.708325, acc: 0.494141] [A loss: 0.754817, acc: 0.402344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3220: [D loss: 0.697638, acc: 0.537109] [A loss: 0.901242, acc: 0.148438]\n",
"3221: [D loss: 0.702229, acc: 0.527344] [A loss: 0.726153, acc: 0.441406]\n",
"3222: [D loss: 0.728250, acc: 0.496094] [A loss: 1.048783, acc: 0.050781]\n",
"3223: [D loss: 0.700037, acc: 0.529297] [A loss: 0.670598, acc: 0.605469]\n",
"3224: [D loss: 0.736977, acc: 0.503906] [A loss: 0.882944, acc: 0.136719]\n",
"3225: [D loss: 0.697284, acc: 0.552734] [A loss: 0.755975, acc: 0.378906]\n",
"3226: [D loss: 0.710429, acc: 0.501953] [A loss: 0.891738, acc: 0.191406]\n",
"3227: [D loss: 0.694567, acc: 0.527344] [A loss: 0.703646, acc: 0.503906]\n",
"3228: [D loss: 0.713424, acc: 0.533203] [A loss: 0.906205, acc: 0.144531]\n",
"3229: [D loss: 0.697158, acc: 0.529297] [A loss: 0.774983, acc: 0.339844]\n",
"3230: [D loss: 0.710157, acc: 0.517578] [A loss: 0.904705, acc: 0.136719]\n",
"3231: [D loss: 0.695070, acc: 0.531250] [A loss: 0.767788, acc: 0.351562]\n",
"3232: [D loss: 0.716529, acc: 0.500000] [A loss: 0.821835, acc: 0.246094]\n",
"3233: [D loss: 0.700000, acc: 0.521484] [A loss: 0.816191, acc: 0.269531]\n",
"3234: [D loss: 0.694674, acc: 0.525391] [A loss: 0.865395, acc: 0.175781]\n",
"3235: [D loss: 0.692717, acc: 0.544922] [A loss: 0.799088, acc: 0.292969]\n",
"3236: [D loss: 0.697851, acc: 0.537109] [A loss: 0.854718, acc: 0.207031]\n",
"3237: [D loss: 0.703316, acc: 0.527344] [A loss: 0.781226, acc: 0.347656]\n",
"3238: [D loss: 0.695823, acc: 0.537109] [A loss: 0.898213, acc: 0.171875]\n",
"3239: [D loss: 0.692536, acc: 0.537109] [A loss: 0.736873, acc: 0.417969]\n",
"3240: [D loss: 0.711316, acc: 0.525391] [A loss: 0.982253, acc: 0.089844]\n",
"3241: [D loss: 0.706025, acc: 0.519531] [A loss: 0.721115, acc: 0.410156]\n",
"3242: [D loss: 0.730023, acc: 0.476562] [A loss: 0.870145, acc: 0.207031]\n",
"3243: [D loss: 0.694358, acc: 0.542969] [A loss: 0.886068, acc: 0.160156]\n",
"3244: [D loss: 0.701239, acc: 0.521484] [A loss: 0.879164, acc: 0.160156]\n",
"3245: [D loss: 0.708387, acc: 0.511719] [A loss: 0.849616, acc: 0.207031]\n",
"3246: [D loss: 0.697756, acc: 0.537109] [A loss: 0.832195, acc: 0.246094]\n",
"3247: [D loss: 0.693223, acc: 0.550781] [A loss: 0.920218, acc: 0.109375]\n",
"3248: [D loss: 0.692383, acc: 0.548828] [A loss: 0.746320, acc: 0.386719]\n",
"3249: [D loss: 0.717443, acc: 0.542969] [A loss: 1.047513, acc: 0.074219]\n",
"3250: [D loss: 0.711554, acc: 0.533203] [A loss: 0.620592, acc: 0.675781]\n",
"3251: [D loss: 0.724718, acc: 0.519531] [A loss: 0.921106, acc: 0.183594]\n",
"3252: [D loss: 0.707489, acc: 0.501953] [A loss: 0.737229, acc: 0.429688]\n",
"3253: [D loss: 0.713417, acc: 0.529297] [A loss: 0.937192, acc: 0.109375]\n",
"3254: [D loss: 0.692287, acc: 0.533203] [A loss: 0.723896, acc: 0.453125]\n",
"3255: [D loss: 0.712489, acc: 0.527344] [A loss: 0.894830, acc: 0.164062]\n",
"3256: [D loss: 0.708765, acc: 0.492188] [A loss: 0.716652, acc: 0.460938]\n",
"3257: [D loss: 0.737780, acc: 0.488281] [A loss: 0.975496, acc: 0.097656]\n",
"3258: [D loss: 0.704018, acc: 0.513672] [A loss: 0.673247, acc: 0.558594]\n",
"3259: [D loss: 0.716836, acc: 0.548828] [A loss: 0.864675, acc: 0.175781]\n",
"3260: [D loss: 0.701135, acc: 0.517578] [A loss: 0.738870, acc: 0.402344]\n",
"3261: [D loss: 0.691409, acc: 0.546875] [A loss: 0.797849, acc: 0.308594]\n",
"3262: [D loss: 0.695633, acc: 0.535156] [A loss: 0.836204, acc: 0.253906]\n",
"3263: [D loss: 0.713803, acc: 0.509766] [A loss: 0.886783, acc: 0.187500]\n",
"3264: [D loss: 0.686295, acc: 0.560547] [A loss: 0.796896, acc: 0.324219]\n",
"3265: [D loss: 0.716630, acc: 0.519531] [A loss: 0.788844, acc: 0.320312]\n",
"3266: [D loss: 0.707290, acc: 0.513672] [A loss: 0.859202, acc: 0.207031]\n",
"3267: [D loss: 0.706458, acc: 0.521484] [A loss: 0.789653, acc: 0.332031]\n",
"3268: [D loss: 0.682105, acc: 0.539062] [A loss: 0.849645, acc: 0.226562]\n",
"3269: [D loss: 0.711295, acc: 0.500000] [A loss: 0.841740, acc: 0.214844]\n",
"3270: [D loss: 0.707852, acc: 0.523438] [A loss: 0.839117, acc: 0.234375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3271: [D loss: 0.710932, acc: 0.507812] [A loss: 0.835024, acc: 0.230469]\n",
"3272: [D loss: 0.689967, acc: 0.558594] [A loss: 0.798530, acc: 0.308594]\n",
"3273: [D loss: 0.723899, acc: 0.498047] [A loss: 0.940482, acc: 0.113281]\n",
"3274: [D loss: 0.709383, acc: 0.494141] [A loss: 0.788646, acc: 0.320312]\n",
"3275: [D loss: 0.692101, acc: 0.550781] [A loss: 0.892466, acc: 0.207031]\n",
"3276: [D loss: 0.697214, acc: 0.541016] [A loss: 0.767849, acc: 0.359375]\n",
"3277: [D loss: 0.714737, acc: 0.505859] [A loss: 0.884061, acc: 0.187500]\n",
"3278: [D loss: 0.706218, acc: 0.505859] [A loss: 0.740344, acc: 0.437500]\n",
"3279: [D loss: 0.704109, acc: 0.494141] [A loss: 0.998048, acc: 0.097656]\n",
"3280: [D loss: 0.703268, acc: 0.511719] [A loss: 0.669083, acc: 0.585938]\n",
"3281: [D loss: 0.741690, acc: 0.515625] [A loss: 1.053635, acc: 0.050781]\n",
"3282: [D loss: 0.704576, acc: 0.511719] [A loss: 0.695955, acc: 0.515625]\n",
"3283: [D loss: 0.713870, acc: 0.541016] [A loss: 0.883316, acc: 0.152344]\n",
"3284: [D loss: 0.699226, acc: 0.541016] [A loss: 0.778308, acc: 0.328125]\n",
"3285: [D loss: 0.717891, acc: 0.511719] [A loss: 0.818555, acc: 0.273438]\n",
"3286: [D loss: 0.689749, acc: 0.531250] [A loss: 0.839887, acc: 0.210938]\n",
"3287: [D loss: 0.697320, acc: 0.539062] [A loss: 0.894698, acc: 0.191406]\n",
"3288: [D loss: 0.705730, acc: 0.511719] [A loss: 0.760088, acc: 0.375000]\n",
"3289: [D loss: 0.719354, acc: 0.513672] [A loss: 0.972325, acc: 0.101562]\n",
"3290: [D loss: 0.707192, acc: 0.482422] [A loss: 0.731576, acc: 0.437500]\n",
"3291: [D loss: 0.711764, acc: 0.523438] [A loss: 0.965196, acc: 0.136719]\n",
"3292: [D loss: 0.706358, acc: 0.507812] [A loss: 0.752345, acc: 0.414062]\n",
"3293: [D loss: 0.710745, acc: 0.503906] [A loss: 0.861326, acc: 0.164062]\n",
"3294: [D loss: 0.689190, acc: 0.548828] [A loss: 0.811181, acc: 0.316406]\n",
"3295: [D loss: 0.700187, acc: 0.527344] [A loss: 0.832187, acc: 0.242188]\n",
"3296: [D loss: 0.700394, acc: 0.515625] [A loss: 0.865685, acc: 0.183594]\n",
"3297: [D loss: 0.685274, acc: 0.544922] [A loss: 0.856131, acc: 0.214844]\n",
"3298: [D loss: 0.705251, acc: 0.513672] [A loss: 0.875328, acc: 0.191406]\n",
"3299: [D loss: 0.689834, acc: 0.552734] [A loss: 0.819516, acc: 0.281250]\n",
"3300: [D loss: 0.702669, acc: 0.521484] [A loss: 0.921810, acc: 0.113281]\n",
"3301: [D loss: 0.696471, acc: 0.541016] [A loss: 0.764701, acc: 0.371094]\n",
"3302: [D loss: 0.707175, acc: 0.500000] [A loss: 0.921673, acc: 0.128906]\n",
"3303: [D loss: 0.707302, acc: 0.511719] [A loss: 0.738152, acc: 0.433594]\n",
"3304: [D loss: 0.721499, acc: 0.492188] [A loss: 0.956867, acc: 0.089844]\n",
"3305: [D loss: 0.704138, acc: 0.523438] [A loss: 0.711202, acc: 0.457031]\n",
"3306: [D loss: 0.712864, acc: 0.537109] [A loss: 0.993188, acc: 0.074219]\n",
"3307: [D loss: 0.698784, acc: 0.535156] [A loss: 0.672689, acc: 0.597656]\n",
"3308: [D loss: 0.717031, acc: 0.503906] [A loss: 0.836735, acc: 0.246094]\n",
"3309: [D loss: 0.692196, acc: 0.554688] [A loss: 0.814581, acc: 0.230469]\n",
"3310: [D loss: 0.702066, acc: 0.527344] [A loss: 0.802947, acc: 0.296875]\n",
"3311: [D loss: 0.702120, acc: 0.525391] [A loss: 0.821388, acc: 0.269531]\n",
"3312: [D loss: 0.714219, acc: 0.492188] [A loss: 0.878306, acc: 0.152344]\n",
"3313: [D loss: 0.678770, acc: 0.576172] [A loss: 0.755808, acc: 0.371094]\n",
"3314: [D loss: 0.701846, acc: 0.529297] [A loss: 0.920860, acc: 0.144531]\n",
"3315: [D loss: 0.701369, acc: 0.525391] [A loss: 0.705528, acc: 0.468750]\n",
"3316: [D loss: 0.717030, acc: 0.515625] [A loss: 0.979225, acc: 0.054688]\n",
"3317: [D loss: 0.689267, acc: 0.562500] [A loss: 0.712690, acc: 0.464844]\n",
"3318: [D loss: 0.712172, acc: 0.533203] [A loss: 0.888740, acc: 0.167969]\n",
"3319: [D loss: 0.698016, acc: 0.527344] [A loss: 0.764202, acc: 0.343750]\n",
"3320: [D loss: 0.693605, acc: 0.552734] [A loss: 0.830110, acc: 0.253906]\n",
"3321: [D loss: 0.704681, acc: 0.484375] [A loss: 0.828543, acc: 0.273438]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3322: [D loss: 0.696467, acc: 0.525391] [A loss: 0.784886, acc: 0.332031]\n",
"3323: [D loss: 0.702937, acc: 0.529297] [A loss: 0.925557, acc: 0.140625]\n",
"3324: [D loss: 0.703285, acc: 0.505859] [A loss: 0.786973, acc: 0.300781]\n",
"3325: [D loss: 0.707136, acc: 0.527344] [A loss: 0.900280, acc: 0.144531]\n",
"3326: [D loss: 0.690489, acc: 0.525391] [A loss: 0.770577, acc: 0.367188]\n",
"3327: [D loss: 0.710674, acc: 0.500000] [A loss: 0.892362, acc: 0.160156]\n",
"3328: [D loss: 0.697724, acc: 0.525391] [A loss: 0.744808, acc: 0.386719]\n",
"3329: [D loss: 0.705954, acc: 0.517578] [A loss: 0.989920, acc: 0.097656]\n",
"3330: [D loss: 0.702680, acc: 0.507812] [A loss: 0.682527, acc: 0.519531]\n",
"3331: [D loss: 0.731495, acc: 0.505859] [A loss: 0.972575, acc: 0.082031]\n",
"3332: [D loss: 0.691337, acc: 0.544922] [A loss: 0.678681, acc: 0.582031]\n",
"3333: [D loss: 0.712187, acc: 0.503906] [A loss: 0.971129, acc: 0.132812]\n",
"3334: [D loss: 0.705996, acc: 0.509766] [A loss: 0.789168, acc: 0.304688]\n",
"3335: [D loss: 0.692725, acc: 0.556641] [A loss: 0.830144, acc: 0.246094]\n",
"3336: [D loss: 0.713840, acc: 0.494141] [A loss: 0.801367, acc: 0.285156]\n",
"3337: [D loss: 0.709310, acc: 0.501953] [A loss: 0.858381, acc: 0.187500]\n",
"3338: [D loss: 0.682900, acc: 0.578125] [A loss: 0.734998, acc: 0.429688]\n",
"3339: [D loss: 0.695112, acc: 0.550781] [A loss: 0.893475, acc: 0.160156]\n",
"3340: [D loss: 0.700278, acc: 0.542969] [A loss: 0.779203, acc: 0.335938]\n",
"3341: [D loss: 0.716246, acc: 0.478516] [A loss: 0.913801, acc: 0.128906]\n",
"3342: [D loss: 0.698736, acc: 0.533203] [A loss: 0.771651, acc: 0.335938]\n",
"3343: [D loss: 0.709555, acc: 0.507812] [A loss: 0.876965, acc: 0.164062]\n",
"3344: [D loss: 0.685214, acc: 0.548828] [A loss: 0.785998, acc: 0.347656]\n",
"3345: [D loss: 0.705121, acc: 0.507812] [A loss: 0.965044, acc: 0.105469]\n",
"3346: [D loss: 0.690363, acc: 0.535156] [A loss: 0.687161, acc: 0.492188]\n",
"3347: [D loss: 0.713549, acc: 0.503906] [A loss: 0.928210, acc: 0.113281]\n",
"3348: [D loss: 0.706810, acc: 0.529297] [A loss: 0.730423, acc: 0.414062]\n",
"3349: [D loss: 0.716057, acc: 0.521484] [A loss: 1.024437, acc: 0.027344]\n",
"3350: [D loss: 0.700845, acc: 0.529297] [A loss: 0.703841, acc: 0.468750]\n",
"3351: [D loss: 0.711377, acc: 0.527344] [A loss: 0.866082, acc: 0.199219]\n",
"3352: [D loss: 0.698376, acc: 0.537109] [A loss: 0.741702, acc: 0.437500]\n",
"3353: [D loss: 0.708861, acc: 0.517578] [A loss: 0.926784, acc: 0.136719]\n",
"3354: [D loss: 0.703505, acc: 0.480469] [A loss: 0.755754, acc: 0.386719]\n",
"3355: [D loss: 0.722063, acc: 0.494141] [A loss: 0.873484, acc: 0.175781]\n",
"3356: [D loss: 0.694415, acc: 0.550781] [A loss: 0.747347, acc: 0.417969]\n",
"3357: [D loss: 0.706447, acc: 0.507812] [A loss: 0.876097, acc: 0.171875]\n",
"3358: [D loss: 0.686976, acc: 0.546875] [A loss: 0.714174, acc: 0.453125]\n",
"3359: [D loss: 0.726744, acc: 0.507812] [A loss: 0.941686, acc: 0.105469]\n",
"3360: [D loss: 0.693059, acc: 0.541016] [A loss: 0.778702, acc: 0.324219]\n",
"3361: [D loss: 0.703124, acc: 0.537109] [A loss: 0.842600, acc: 0.218750]\n",
"3362: [D loss: 0.688586, acc: 0.539062] [A loss: 0.826088, acc: 0.257812]\n",
"3363: [D loss: 0.704466, acc: 0.517578] [A loss: 0.767533, acc: 0.367188]\n",
"3364: [D loss: 0.708715, acc: 0.521484] [A loss: 0.800624, acc: 0.285156]\n",
"3365: [D loss: 0.685640, acc: 0.546875] [A loss: 0.810282, acc: 0.332031]\n",
"3366: [D loss: 0.694251, acc: 0.537109] [A loss: 0.836903, acc: 0.199219]\n",
"3367: [D loss: 0.698144, acc: 0.531250] [A loss: 0.848952, acc: 0.214844]\n",
"3368: [D loss: 0.702093, acc: 0.511719] [A loss: 0.757628, acc: 0.363281]\n",
"3369: [D loss: 0.706475, acc: 0.521484] [A loss: 0.974476, acc: 0.105469]\n",
"3370: [D loss: 0.696131, acc: 0.546875] [A loss: 0.678766, acc: 0.574219]\n",
"3371: [D loss: 0.714452, acc: 0.500000] [A loss: 0.932269, acc: 0.113281]\n",
"3372: [D loss: 0.708247, acc: 0.535156] [A loss: 0.696256, acc: 0.527344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3373: [D loss: 0.716966, acc: 0.515625] [A loss: 0.921717, acc: 0.121094]\n",
"3374: [D loss: 0.693065, acc: 0.533203] [A loss: 0.781277, acc: 0.292969]\n",
"3375: [D loss: 0.695194, acc: 0.531250] [A loss: 0.810457, acc: 0.285156]\n",
"3376: [D loss: 0.699882, acc: 0.519531] [A loss: 0.775507, acc: 0.339844]\n",
"3377: [D loss: 0.712968, acc: 0.515625] [A loss: 0.895175, acc: 0.175781]\n",
"3378: [D loss: 0.689106, acc: 0.537109] [A loss: 0.783704, acc: 0.320312]\n",
"3379: [D loss: 0.708463, acc: 0.531250] [A loss: 0.870600, acc: 0.207031]\n",
"3380: [D loss: 0.691246, acc: 0.539062] [A loss: 0.797383, acc: 0.296875]\n",
"3381: [D loss: 0.702995, acc: 0.496094] [A loss: 0.860181, acc: 0.222656]\n",
"3382: [D loss: 0.694970, acc: 0.535156] [A loss: 0.805768, acc: 0.316406]\n",
"3383: [D loss: 0.716779, acc: 0.507812] [A loss: 0.858580, acc: 0.246094]\n",
"3384: [D loss: 0.698539, acc: 0.509766] [A loss: 0.801155, acc: 0.320312]\n",
"3385: [D loss: 0.685677, acc: 0.554688] [A loss: 0.805884, acc: 0.281250]\n",
"3386: [D loss: 0.701497, acc: 0.523438] [A loss: 0.864507, acc: 0.179688]\n",
"3387: [D loss: 0.682951, acc: 0.566406] [A loss: 0.815043, acc: 0.277344]\n",
"3388: [D loss: 0.695881, acc: 0.529297] [A loss: 0.836120, acc: 0.265625]\n",
"3389: [D loss: 0.708172, acc: 0.472656] [A loss: 0.757345, acc: 0.386719]\n",
"3390: [D loss: 0.712132, acc: 0.505859] [A loss: 0.953651, acc: 0.101562]\n",
"3391: [D loss: 0.692252, acc: 0.523438] [A loss: 0.661213, acc: 0.589844]\n",
"3392: [D loss: 0.716121, acc: 0.507812] [A loss: 1.046642, acc: 0.054688]\n",
"3393: [D loss: 0.710706, acc: 0.539062] [A loss: 0.646478, acc: 0.632812]\n",
"3394: [D loss: 0.722341, acc: 0.513672] [A loss: 0.911023, acc: 0.125000]\n",
"3395: [D loss: 0.711459, acc: 0.496094] [A loss: 0.732236, acc: 0.421875]\n",
"3396: [D loss: 0.703826, acc: 0.484375] [A loss: 0.836654, acc: 0.226562]\n",
"3397: [D loss: 0.690409, acc: 0.515625] [A loss: 0.808855, acc: 0.296875]\n",
"3398: [D loss: 0.692349, acc: 0.531250] [A loss: 0.912033, acc: 0.144531]\n",
"3399: [D loss: 0.696659, acc: 0.527344] [A loss: 0.721071, acc: 0.468750]\n",
"3400: [D loss: 0.721429, acc: 0.500000] [A loss: 0.928123, acc: 0.125000]\n",
"3401: [D loss: 0.701959, acc: 0.537109] [A loss: 0.744246, acc: 0.390625]\n",
"3402: [D loss: 0.701561, acc: 0.560547] [A loss: 0.920722, acc: 0.125000]\n",
"3403: [D loss: 0.688578, acc: 0.535156] [A loss: 0.757684, acc: 0.386719]\n",
"3404: [D loss: 0.700448, acc: 0.544922] [A loss: 0.837769, acc: 0.210938]\n",
"3405: [D loss: 0.691771, acc: 0.527344] [A loss: 0.836468, acc: 0.226562]\n",
"3406: [D loss: 0.693543, acc: 0.535156] [A loss: 0.781984, acc: 0.328125]\n",
"3407: [D loss: 0.697347, acc: 0.519531] [A loss: 0.880092, acc: 0.167969]\n",
"3408: [D loss: 0.689190, acc: 0.552734] [A loss: 0.778368, acc: 0.406250]\n",
"3409: [D loss: 0.700885, acc: 0.503906] [A loss: 0.935031, acc: 0.128906]\n",
"3410: [D loss: 0.689345, acc: 0.546875] [A loss: 0.748557, acc: 0.402344]\n",
"3411: [D loss: 0.702018, acc: 0.542969] [A loss: 0.949057, acc: 0.152344]\n",
"3412: [D loss: 0.693834, acc: 0.544922] [A loss: 0.708083, acc: 0.500000]\n",
"3413: [D loss: 0.722830, acc: 0.507812] [A loss: 0.995662, acc: 0.089844]\n",
"3414: [D loss: 0.696223, acc: 0.556641] [A loss: 0.728967, acc: 0.449219]\n",
"3415: [D loss: 0.712301, acc: 0.501953] [A loss: 0.856093, acc: 0.214844]\n",
"3416: [D loss: 0.697559, acc: 0.537109] [A loss: 0.816873, acc: 0.277344]\n",
"3417: [D loss: 0.705318, acc: 0.501953] [A loss: 0.838238, acc: 0.250000]\n",
"3418: [D loss: 0.699223, acc: 0.503906] [A loss: 0.817325, acc: 0.277344]\n",
"3419: [D loss: 0.703948, acc: 0.509766] [A loss: 0.856054, acc: 0.222656]\n",
"3420: [D loss: 0.724445, acc: 0.490234] [A loss: 0.847341, acc: 0.226562]\n",
"3421: [D loss: 0.685946, acc: 0.521484] [A loss: 0.799099, acc: 0.296875]\n",
"3422: [D loss: 0.707382, acc: 0.505859] [A loss: 0.855574, acc: 0.226562]\n",
"3423: [D loss: 0.694825, acc: 0.541016] [A loss: 0.778863, acc: 0.351562]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3424: [D loss: 0.703374, acc: 0.507812] [A loss: 0.844244, acc: 0.218750]\n",
"3425: [D loss: 0.683637, acc: 0.552734] [A loss: 0.766124, acc: 0.378906]\n",
"3426: [D loss: 0.731581, acc: 0.488281] [A loss: 1.027832, acc: 0.074219]\n",
"3427: [D loss: 0.706354, acc: 0.531250] [A loss: 0.657508, acc: 0.628906]\n",
"3428: [D loss: 0.730395, acc: 0.494141] [A loss: 0.865044, acc: 0.210938]\n",
"3429: [D loss: 0.681557, acc: 0.566406] [A loss: 0.784808, acc: 0.359375]\n",
"3430: [D loss: 0.706289, acc: 0.505859] [A loss: 0.983459, acc: 0.066406]\n",
"3431: [D loss: 0.700932, acc: 0.523438] [A loss: 0.664384, acc: 0.605469]\n",
"3432: [D loss: 0.737343, acc: 0.519531] [A loss: 0.994656, acc: 0.089844]\n",
"3433: [D loss: 0.690970, acc: 0.564453] [A loss: 0.722089, acc: 0.472656]\n",
"3434: [D loss: 0.723406, acc: 0.494141] [A loss: 0.892177, acc: 0.203125]\n",
"3435: [D loss: 0.701140, acc: 0.501953] [A loss: 0.754637, acc: 0.378906]\n",
"3436: [D loss: 0.709727, acc: 0.523438] [A loss: 0.894500, acc: 0.175781]\n",
"3437: [D loss: 0.704637, acc: 0.496094] [A loss: 0.735503, acc: 0.445312]\n",
"3438: [D loss: 0.716105, acc: 0.494141] [A loss: 0.947556, acc: 0.121094]\n",
"3439: [D loss: 0.694736, acc: 0.525391] [A loss: 0.755447, acc: 0.390625]\n",
"3440: [D loss: 0.712540, acc: 0.509766] [A loss: 0.883536, acc: 0.210938]\n",
"3441: [D loss: 0.704018, acc: 0.521484] [A loss: 0.780864, acc: 0.378906]\n",
"3442: [D loss: 0.694675, acc: 0.515625] [A loss: 0.856346, acc: 0.214844]\n",
"3443: [D loss: 0.688672, acc: 0.537109] [A loss: 0.825831, acc: 0.261719]\n",
"3444: [D loss: 0.698308, acc: 0.535156] [A loss: 0.836662, acc: 0.273438]\n",
"3445: [D loss: 0.709759, acc: 0.509766] [A loss: 0.820025, acc: 0.253906]\n",
"3446: [D loss: 0.709155, acc: 0.503906] [A loss: 0.883663, acc: 0.156250]\n",
"3447: [D loss: 0.691190, acc: 0.541016] [A loss: 0.835484, acc: 0.261719]\n",
"3448: [D loss: 0.707921, acc: 0.517578] [A loss: 0.825344, acc: 0.250000]\n",
"3449: [D loss: 0.692843, acc: 0.548828] [A loss: 0.849785, acc: 0.199219]\n",
"3450: [D loss: 0.706121, acc: 0.517578] [A loss: 0.834610, acc: 0.238281]\n",
"3451: [D loss: 0.685342, acc: 0.548828] [A loss: 0.808695, acc: 0.285156]\n",
"3452: [D loss: 0.704046, acc: 0.527344] [A loss: 0.879939, acc: 0.167969]\n",
"3453: [D loss: 0.692551, acc: 0.550781] [A loss: 0.905263, acc: 0.156250]\n",
"3454: [D loss: 0.692340, acc: 0.548828] [A loss: 0.784070, acc: 0.324219]\n",
"3455: [D loss: 0.699821, acc: 0.546875] [A loss: 0.902649, acc: 0.160156]\n",
"3456: [D loss: 0.686453, acc: 0.544922] [A loss: 0.708024, acc: 0.488281]\n",
"3457: [D loss: 0.729888, acc: 0.501953] [A loss: 1.040285, acc: 0.054688]\n",
"3458: [D loss: 0.700069, acc: 0.539062] [A loss: 0.703553, acc: 0.507812]\n",
"3459: [D loss: 0.732875, acc: 0.511719] [A loss: 1.026790, acc: 0.070312]\n",
"3460: [D loss: 0.692624, acc: 0.544922] [A loss: 0.661714, acc: 0.589844]\n",
"3461: [D loss: 0.730682, acc: 0.494141] [A loss: 0.925943, acc: 0.125000]\n",
"3462: [D loss: 0.697381, acc: 0.548828] [A loss: 0.683356, acc: 0.554688]\n",
"3463: [D loss: 0.718222, acc: 0.509766] [A loss: 0.829687, acc: 0.238281]\n",
"3464: [D loss: 0.707694, acc: 0.490234] [A loss: 0.723254, acc: 0.468750]\n",
"3465: [D loss: 0.724571, acc: 0.505859] [A loss: 0.952954, acc: 0.128906]\n",
"3466: [D loss: 0.710736, acc: 0.486328] [A loss: 0.789199, acc: 0.320312]\n",
"3467: [D loss: 0.700370, acc: 0.531250] [A loss: 0.884391, acc: 0.175781]\n",
"3468: [D loss: 0.706863, acc: 0.472656] [A loss: 0.769543, acc: 0.347656]\n",
"3469: [D loss: 0.706263, acc: 0.521484] [A loss: 0.840096, acc: 0.265625]\n",
"3470: [D loss: 0.693707, acc: 0.548828] [A loss: 0.726791, acc: 0.449219]\n",
"3471: [D loss: 0.698390, acc: 0.541016] [A loss: 0.902298, acc: 0.171875]\n",
"3472: [D loss: 0.687106, acc: 0.566406] [A loss: 0.736650, acc: 0.468750]\n",
"3473: [D loss: 0.720912, acc: 0.505859] [A loss: 0.909615, acc: 0.093750]\n",
"3474: [D loss: 0.692301, acc: 0.539062] [A loss: 0.783173, acc: 0.316406]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3475: [D loss: 0.709654, acc: 0.501953] [A loss: 0.924642, acc: 0.101562]\n",
"3476: [D loss: 0.690153, acc: 0.527344] [A loss: 0.726682, acc: 0.457031]\n",
"3477: [D loss: 0.722277, acc: 0.494141] [A loss: 1.021166, acc: 0.089844]\n",
"3478: [D loss: 0.693064, acc: 0.542969] [A loss: 0.664669, acc: 0.601562]\n",
"3479: [D loss: 0.736005, acc: 0.500000] [A loss: 0.909576, acc: 0.183594]\n",
"3480: [D loss: 0.704630, acc: 0.515625] [A loss: 0.779536, acc: 0.335938]\n",
"3481: [D loss: 0.692167, acc: 0.556641] [A loss: 0.867487, acc: 0.199219]\n",
"3482: [D loss: 0.677420, acc: 0.585938] [A loss: 0.746217, acc: 0.425781]\n",
"3483: [D loss: 0.714831, acc: 0.507812] [A loss: 0.872578, acc: 0.210938]\n",
"3484: [D loss: 0.699542, acc: 0.511719] [A loss: 0.817353, acc: 0.242188]\n",
"3485: [D loss: 0.676229, acc: 0.583984] [A loss: 0.782604, acc: 0.351562]\n",
"3486: [D loss: 0.710083, acc: 0.484375] [A loss: 0.804253, acc: 0.285156]\n",
"3487: [D loss: 0.700556, acc: 0.529297] [A loss: 0.939034, acc: 0.132812]\n",
"3488: [D loss: 0.701340, acc: 0.511719] [A loss: 0.750907, acc: 0.386719]\n",
"3489: [D loss: 0.694926, acc: 0.535156] [A loss: 0.854832, acc: 0.183594]\n",
"3490: [D loss: 0.695842, acc: 0.541016] [A loss: 0.808145, acc: 0.316406]\n",
"3491: [D loss: 0.691946, acc: 0.517578] [A loss: 0.757847, acc: 0.375000]\n",
"3492: [D loss: 0.706837, acc: 0.519531] [A loss: 0.893940, acc: 0.171875]\n",
"3493: [D loss: 0.700050, acc: 0.501953] [A loss: 0.829630, acc: 0.265625]\n",
"3494: [D loss: 0.712734, acc: 0.486328] [A loss: 0.971925, acc: 0.097656]\n",
"3495: [D loss: 0.704535, acc: 0.511719] [A loss: 0.757013, acc: 0.398438]\n",
"3496: [D loss: 0.715548, acc: 0.503906] [A loss: 0.909572, acc: 0.175781]\n",
"3497: [D loss: 0.704101, acc: 0.519531] [A loss: 0.696458, acc: 0.519531]\n",
"3498: [D loss: 0.712071, acc: 0.501953] [A loss: 0.940351, acc: 0.109375]\n",
"3499: [D loss: 0.695290, acc: 0.533203] [A loss: 0.692807, acc: 0.550781]\n",
"3500: [D loss: 0.714121, acc: 0.548828] [A loss: 1.022050, acc: 0.082031]\n",
"3501: [D loss: 0.711572, acc: 0.498047] [A loss: 0.653982, acc: 0.636719]\n",
"3502: [D loss: 0.732388, acc: 0.503906] [A loss: 0.994391, acc: 0.070312]\n",
"3503: [D loss: 0.708165, acc: 0.484375] [A loss: 0.747133, acc: 0.378906]\n",
"3504: [D loss: 0.706920, acc: 0.525391] [A loss: 0.849805, acc: 0.250000]\n",
"3505: [D loss: 0.689993, acc: 0.537109] [A loss: 0.807234, acc: 0.296875]\n",
"3506: [D loss: 0.713885, acc: 0.507812] [A loss: 0.806000, acc: 0.253906]\n",
"3507: [D loss: 0.708279, acc: 0.511719] [A loss: 0.835857, acc: 0.234375]\n",
"3508: [D loss: 0.695526, acc: 0.560547] [A loss: 0.820774, acc: 0.265625]\n",
"3509: [D loss: 0.699068, acc: 0.546875] [A loss: 0.777460, acc: 0.351562]\n",
"3510: [D loss: 0.701708, acc: 0.552734] [A loss: 1.052204, acc: 0.078125]\n",
"3511: [D loss: 0.710781, acc: 0.519531] [A loss: 0.662028, acc: 0.617188]\n",
"3512: [D loss: 0.723334, acc: 0.496094] [A loss: 0.929487, acc: 0.128906]\n",
"3513: [D loss: 0.705164, acc: 0.515625] [A loss: 0.724016, acc: 0.429688]\n",
"3514: [D loss: 0.708600, acc: 0.511719] [A loss: 0.886843, acc: 0.175781]\n",
"3515: [D loss: 0.708175, acc: 0.503906] [A loss: 0.712252, acc: 0.468750]\n",
"3516: [D loss: 0.717786, acc: 0.519531] [A loss: 0.940942, acc: 0.121094]\n",
"3517: [D loss: 0.684358, acc: 0.566406] [A loss: 0.706758, acc: 0.484375]\n",
"3518: [D loss: 0.699999, acc: 0.529297] [A loss: 0.890161, acc: 0.164062]\n",
"3519: [D loss: 0.704227, acc: 0.509766] [A loss: 0.789101, acc: 0.265625]\n",
"3520: [D loss: 0.692045, acc: 0.539062] [A loss: 0.877643, acc: 0.179688]\n",
"3521: [D loss: 0.698036, acc: 0.546875] [A loss: 0.768809, acc: 0.335938]\n",
"3522: [D loss: 0.711590, acc: 0.513672] [A loss: 0.913013, acc: 0.136719]\n",
"3523: [D loss: 0.699705, acc: 0.498047] [A loss: 0.743350, acc: 0.437500]\n",
"3524: [D loss: 0.728187, acc: 0.527344] [A loss: 1.030400, acc: 0.085938]\n",
"3525: [D loss: 0.695971, acc: 0.542969] [A loss: 0.679555, acc: 0.566406]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3526: [D loss: 0.714973, acc: 0.511719] [A loss: 0.910014, acc: 0.148438]\n",
"3527: [D loss: 0.679674, acc: 0.562500] [A loss: 0.736301, acc: 0.429688]\n",
"3528: [D loss: 0.711250, acc: 0.513672] [A loss: 0.895926, acc: 0.164062]\n",
"3529: [D loss: 0.692490, acc: 0.548828] [A loss: 0.735673, acc: 0.441406]\n",
"3530: [D loss: 0.707826, acc: 0.486328] [A loss: 0.922717, acc: 0.125000]\n",
"3531: [D loss: 0.690783, acc: 0.556641] [A loss: 0.649578, acc: 0.625000]\n",
"3532: [D loss: 0.730303, acc: 0.509766] [A loss: 1.007034, acc: 0.062500]\n",
"3533: [D loss: 0.687959, acc: 0.535156] [A loss: 0.705638, acc: 0.492188]\n",
"3534: [D loss: 0.710872, acc: 0.509766] [A loss: 0.824511, acc: 0.269531]\n",
"3535: [D loss: 0.706883, acc: 0.525391] [A loss: 0.728057, acc: 0.437500]\n",
"3536: [D loss: 0.706064, acc: 0.509766] [A loss: 0.851320, acc: 0.261719]\n",
"3537: [D loss: 0.705313, acc: 0.478516] [A loss: 0.776716, acc: 0.351562]\n",
"3538: [D loss: 0.703538, acc: 0.525391] [A loss: 0.877158, acc: 0.167969]\n",
"3539: [D loss: 0.686660, acc: 0.558594] [A loss: 0.799397, acc: 0.289062]\n",
"3540: [D loss: 0.697907, acc: 0.542969] [A loss: 0.825029, acc: 0.246094]\n",
"3541: [D loss: 0.689361, acc: 0.539062] [A loss: 0.761915, acc: 0.355469]\n",
"3542: [D loss: 0.696977, acc: 0.560547] [A loss: 0.879682, acc: 0.191406]\n",
"3543: [D loss: 0.716684, acc: 0.500000] [A loss: 0.819987, acc: 0.281250]\n",
"3544: [D loss: 0.686157, acc: 0.548828] [A loss: 0.812329, acc: 0.292969]\n",
"3545: [D loss: 0.701176, acc: 0.542969] [A loss: 0.847337, acc: 0.222656]\n",
"3546: [D loss: 0.699771, acc: 0.523438] [A loss: 0.872898, acc: 0.179688]\n",
"3547: [D loss: 0.705407, acc: 0.517578] [A loss: 0.827012, acc: 0.281250]\n",
"3548: [D loss: 0.680997, acc: 0.546875] [A loss: 0.882682, acc: 0.164062]\n",
"3549: [D loss: 0.693224, acc: 0.525391] [A loss: 0.805280, acc: 0.273438]\n",
"3550: [D loss: 0.695721, acc: 0.541016] [A loss: 0.910474, acc: 0.183594]\n",
"3551: [D loss: 0.706154, acc: 0.535156] [A loss: 0.727736, acc: 0.429688]\n",
"3552: [D loss: 0.723247, acc: 0.500000] [A loss: 0.935524, acc: 0.117188]\n",
"3553: [D loss: 0.687814, acc: 0.570312] [A loss: 0.723282, acc: 0.480469]\n",
"3554: [D loss: 0.725680, acc: 0.505859] [A loss: 0.988824, acc: 0.066406]\n",
"3555: [D loss: 0.696524, acc: 0.527344] [A loss: 0.655775, acc: 0.625000]\n",
"3556: [D loss: 0.732467, acc: 0.509766] [A loss: 1.033028, acc: 0.062500]\n",
"3557: [D loss: 0.698187, acc: 0.519531] [A loss: 0.670844, acc: 0.546875]\n",
"3558: [D loss: 0.737701, acc: 0.503906] [A loss: 0.908656, acc: 0.167969]\n",
"3559: [D loss: 0.685368, acc: 0.560547] [A loss: 0.760533, acc: 0.363281]\n",
"3560: [D loss: 0.701276, acc: 0.515625] [A loss: 0.844178, acc: 0.210938]\n",
"3561: [D loss: 0.697152, acc: 0.550781] [A loss: 0.804955, acc: 0.257812]\n",
"3562: [D loss: 0.703851, acc: 0.521484] [A loss: 0.813463, acc: 0.269531]\n",
"3563: [D loss: 0.703678, acc: 0.517578] [A loss: 0.837295, acc: 0.226562]\n",
"3564: [D loss: 0.701611, acc: 0.500000] [A loss: 0.805759, acc: 0.285156]\n",
"3565: [D loss: 0.695212, acc: 0.539062] [A loss: 0.861678, acc: 0.191406]\n",
"3566: [D loss: 0.700937, acc: 0.519531] [A loss: 0.856899, acc: 0.187500]\n",
"3567: [D loss: 0.696949, acc: 0.527344] [A loss: 0.805835, acc: 0.308594]\n",
"3568: [D loss: 0.692312, acc: 0.537109] [A loss: 0.794651, acc: 0.328125]\n",
"3569: [D loss: 0.707965, acc: 0.513672] [A loss: 0.962082, acc: 0.097656]\n",
"3570: [D loss: 0.685722, acc: 0.554688] [A loss: 0.704277, acc: 0.472656]\n",
"3571: [D loss: 0.715602, acc: 0.529297] [A loss: 1.027728, acc: 0.085938]\n",
"3572: [D loss: 0.713835, acc: 0.523438] [A loss: 0.671364, acc: 0.570312]\n",
"3573: [D loss: 0.734432, acc: 0.500000] [A loss: 1.010217, acc: 0.089844]\n",
"3574: [D loss: 0.701930, acc: 0.509766] [A loss: 0.675270, acc: 0.589844]\n",
"3575: [D loss: 0.717510, acc: 0.511719] [A loss: 0.895083, acc: 0.210938]\n",
"3576: [D loss: 0.699181, acc: 0.509766] [A loss: 0.731422, acc: 0.445312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3577: [D loss: 0.722164, acc: 0.511719] [A loss: 0.912695, acc: 0.121094]\n",
"3578: [D loss: 0.697376, acc: 0.531250] [A loss: 0.727021, acc: 0.453125]\n",
"3579: [D loss: 0.715341, acc: 0.519531] [A loss: 0.906551, acc: 0.125000]\n",
"3580: [D loss: 0.687680, acc: 0.541016] [A loss: 0.752895, acc: 0.359375]\n",
"3581: [D loss: 0.700964, acc: 0.515625] [A loss: 0.891166, acc: 0.164062]\n",
"3582: [D loss: 0.694728, acc: 0.525391] [A loss: 0.747936, acc: 0.402344]\n",
"3583: [D loss: 0.710317, acc: 0.523438] [A loss: 0.820680, acc: 0.300781]\n",
"3584: [D loss: 0.691822, acc: 0.539062] [A loss: 0.801575, acc: 0.296875]\n",
"3585: [D loss: 0.702852, acc: 0.529297] [A loss: 0.891600, acc: 0.144531]\n",
"3586: [D loss: 0.674687, acc: 0.591797] [A loss: 0.771954, acc: 0.328125]\n",
"3587: [D loss: 0.710040, acc: 0.511719] [A loss: 0.924399, acc: 0.121094]\n",
"3588: [D loss: 0.700251, acc: 0.517578] [A loss: 0.726312, acc: 0.453125]\n",
"3589: [D loss: 0.719287, acc: 0.496094] [A loss: 0.974879, acc: 0.074219]\n",
"3590: [D loss: 0.710322, acc: 0.511719] [A loss: 0.724043, acc: 0.457031]\n",
"3591: [D loss: 0.701495, acc: 0.496094] [A loss: 0.894250, acc: 0.140625]\n",
"3592: [D loss: 0.690124, acc: 0.537109] [A loss: 0.762935, acc: 0.351562]\n",
"3593: [D loss: 0.702950, acc: 0.560547] [A loss: 0.858414, acc: 0.222656]\n",
"3594: [D loss: 0.694023, acc: 0.541016] [A loss: 0.808533, acc: 0.304688]\n",
"3595: [D loss: 0.687761, acc: 0.568359] [A loss: 0.878456, acc: 0.230469]\n",
"3596: [D loss: 0.692474, acc: 0.562500] [A loss: 0.775685, acc: 0.328125]\n",
"3597: [D loss: 0.694712, acc: 0.539062] [A loss: 0.832938, acc: 0.273438]\n",
"3598: [D loss: 0.692041, acc: 0.548828] [A loss: 0.854356, acc: 0.199219]\n",
"3599: [D loss: 0.702214, acc: 0.541016] [A loss: 0.806302, acc: 0.308594]\n",
"3600: [D loss: 0.706474, acc: 0.533203] [A loss: 0.827856, acc: 0.257812]\n",
"3601: [D loss: 0.705081, acc: 0.500000] [A loss: 0.766307, acc: 0.328125]\n",
"3602: [D loss: 0.700231, acc: 0.531250] [A loss: 1.003200, acc: 0.046875]\n",
"3603: [D loss: 0.686259, acc: 0.572266] [A loss: 0.644757, acc: 0.636719]\n",
"3604: [D loss: 0.727559, acc: 0.498047] [A loss: 1.039462, acc: 0.074219]\n",
"3605: [D loss: 0.701872, acc: 0.525391] [A loss: 0.693035, acc: 0.511719]\n",
"3606: [D loss: 0.714230, acc: 0.519531] [A loss: 0.916019, acc: 0.136719]\n",
"3607: [D loss: 0.695717, acc: 0.511719] [A loss: 0.783284, acc: 0.382812]\n",
"3608: [D loss: 0.699419, acc: 0.535156] [A loss: 0.846899, acc: 0.222656]\n",
"3609: [D loss: 0.695260, acc: 0.511719] [A loss: 0.774116, acc: 0.367188]\n",
"3610: [D loss: 0.682558, acc: 0.552734] [A loss: 0.870211, acc: 0.187500]\n",
"3611: [D loss: 0.688985, acc: 0.523438] [A loss: 0.739094, acc: 0.410156]\n",
"3612: [D loss: 0.723345, acc: 0.509766] [A loss: 0.935964, acc: 0.128906]\n",
"3613: [D loss: 0.700990, acc: 0.521484] [A loss: 0.732482, acc: 0.410156]\n",
"3614: [D loss: 0.709475, acc: 0.513672] [A loss: 0.890512, acc: 0.171875]\n",
"3615: [D loss: 0.684197, acc: 0.554688] [A loss: 0.701680, acc: 0.468750]\n",
"3616: [D loss: 0.720338, acc: 0.505859] [A loss: 1.005457, acc: 0.078125]\n",
"3617: [D loss: 0.681682, acc: 0.576172] [A loss: 0.677513, acc: 0.558594]\n",
"3618: [D loss: 0.720756, acc: 0.517578] [A loss: 0.976589, acc: 0.113281]\n",
"3619: [D loss: 0.698967, acc: 0.517578] [A loss: 0.680660, acc: 0.539062]\n",
"3620: [D loss: 0.707174, acc: 0.500000] [A loss: 0.914749, acc: 0.156250]\n",
"3621: [D loss: 0.699399, acc: 0.544922] [A loss: 0.764524, acc: 0.343750]\n",
"3622: [D loss: 0.709491, acc: 0.529297] [A loss: 0.843306, acc: 0.214844]\n",
"3623: [D loss: 0.698516, acc: 0.548828] [A loss: 0.810465, acc: 0.277344]\n",
"3624: [D loss: 0.705497, acc: 0.507812] [A loss: 0.834970, acc: 0.210938]\n",
"3625: [D loss: 0.701534, acc: 0.513672] [A loss: 0.832924, acc: 0.257812]\n",
"3626: [D loss: 0.707180, acc: 0.521484] [A loss: 0.860627, acc: 0.207031]\n",
"3627: [D loss: 0.716285, acc: 0.470703] [A loss: 0.841145, acc: 0.226562]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3628: [D loss: 0.703873, acc: 0.505859] [A loss: 0.901739, acc: 0.144531]\n",
"3629: [D loss: 0.693369, acc: 0.523438] [A loss: 0.743744, acc: 0.414062]\n",
"3630: [D loss: 0.715634, acc: 0.517578] [A loss: 1.025369, acc: 0.074219]\n",
"3631: [D loss: 0.686719, acc: 0.537109] [A loss: 0.727297, acc: 0.402344]\n",
"3632: [D loss: 0.702187, acc: 0.533203] [A loss: 0.902389, acc: 0.164062]\n",
"3633: [D loss: 0.698351, acc: 0.523438] [A loss: 0.742768, acc: 0.429688]\n",
"3634: [D loss: 0.721890, acc: 0.500000] [A loss: 0.914263, acc: 0.140625]\n",
"3635: [D loss: 0.675954, acc: 0.562500] [A loss: 0.700464, acc: 0.503906]\n",
"3636: [D loss: 0.730301, acc: 0.521484] [A loss: 0.916810, acc: 0.121094]\n",
"3637: [D loss: 0.678587, acc: 0.591797] [A loss: 0.755647, acc: 0.378906]\n",
"3638: [D loss: 0.703281, acc: 0.548828] [A loss: 0.944423, acc: 0.148438]\n",
"3639: [D loss: 0.691335, acc: 0.542969] [A loss: 0.723628, acc: 0.429688]\n",
"3640: [D loss: 0.705934, acc: 0.531250] [A loss: 0.869022, acc: 0.195312]\n",
"3641: [D loss: 0.699423, acc: 0.541016] [A loss: 0.759900, acc: 0.375000]\n",
"3642: [D loss: 0.691875, acc: 0.531250] [A loss: 0.879222, acc: 0.144531]\n",
"3643: [D loss: 0.696411, acc: 0.517578] [A loss: 0.790155, acc: 0.277344]\n",
"3644: [D loss: 0.691119, acc: 0.537109] [A loss: 0.810120, acc: 0.304688]\n",
"3645: [D loss: 0.703409, acc: 0.535156] [A loss: 0.867604, acc: 0.207031]\n",
"3646: [D loss: 0.700203, acc: 0.521484] [A loss: 0.800595, acc: 0.304688]\n",
"3647: [D loss: 0.716893, acc: 0.511719] [A loss: 0.945325, acc: 0.148438]\n",
"3648: [D loss: 0.696319, acc: 0.533203] [A loss: 0.709590, acc: 0.527344]\n",
"3649: [D loss: 0.728268, acc: 0.501953] [A loss: 0.999067, acc: 0.074219]\n",
"3650: [D loss: 0.708506, acc: 0.519531] [A loss: 0.702953, acc: 0.531250]\n",
"3651: [D loss: 0.708028, acc: 0.525391] [A loss: 0.945023, acc: 0.136719]\n",
"3652: [D loss: 0.681796, acc: 0.566406] [A loss: 0.716358, acc: 0.460938]\n",
"3653: [D loss: 0.716878, acc: 0.496094] [A loss: 0.927061, acc: 0.128906]\n",
"3654: [D loss: 0.700245, acc: 0.521484] [A loss: 0.705268, acc: 0.500000]\n",
"3655: [D loss: 0.721862, acc: 0.494141] [A loss: 0.930836, acc: 0.136719]\n",
"3656: [D loss: 0.695285, acc: 0.525391] [A loss: 0.712007, acc: 0.496094]\n",
"3657: [D loss: 0.721581, acc: 0.511719] [A loss: 0.932806, acc: 0.121094]\n",
"3658: [D loss: 0.688994, acc: 0.554688] [A loss: 0.706384, acc: 0.527344]\n",
"3659: [D loss: 0.710725, acc: 0.529297] [A loss: 0.836697, acc: 0.261719]\n",
"3660: [D loss: 0.701265, acc: 0.509766] [A loss: 0.738856, acc: 0.406250]\n",
"3661: [D loss: 0.702787, acc: 0.546875] [A loss: 0.838088, acc: 0.250000]\n",
"3662: [D loss: 0.694929, acc: 0.535156] [A loss: 0.816923, acc: 0.257812]\n",
"3663: [D loss: 0.695662, acc: 0.544922] [A loss: 0.844091, acc: 0.261719]\n",
"3664: [D loss: 0.698869, acc: 0.523438] [A loss: 0.793780, acc: 0.355469]\n",
"3665: [D loss: 0.697470, acc: 0.509766] [A loss: 0.861631, acc: 0.203125]\n",
"3666: [D loss: 0.702177, acc: 0.500000] [A loss: 0.793595, acc: 0.343750]\n",
"3667: [D loss: 0.702603, acc: 0.531250] [A loss: 0.827120, acc: 0.269531]\n",
"3668: [D loss: 0.692409, acc: 0.562500] [A loss: 0.832040, acc: 0.289062]\n",
"3669: [D loss: 0.705651, acc: 0.509766] [A loss: 0.835093, acc: 0.222656]\n",
"3670: [D loss: 0.702805, acc: 0.531250] [A loss: 1.025929, acc: 0.070312]\n",
"3671: [D loss: 0.685690, acc: 0.558594] [A loss: 0.634096, acc: 0.660156]\n",
"3672: [D loss: 0.712526, acc: 0.515625] [A loss: 1.034095, acc: 0.082031]\n",
"3673: [D loss: 0.728131, acc: 0.464844] [A loss: 0.786932, acc: 0.320312]\n",
"3674: [D loss: 0.691496, acc: 0.558594] [A loss: 0.817124, acc: 0.242188]\n",
"3675: [D loss: 0.692368, acc: 0.548828] [A loss: 0.817795, acc: 0.265625]\n",
"3676: [D loss: 0.693510, acc: 0.544922] [A loss: 0.811676, acc: 0.296875]\n",
"3677: [D loss: 0.694600, acc: 0.535156] [A loss: 0.810611, acc: 0.269531]\n",
"3678: [D loss: 0.687536, acc: 0.531250] [A loss: 0.831452, acc: 0.277344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3679: [D loss: 0.706195, acc: 0.484375] [A loss: 0.829374, acc: 0.292969]\n",
"3680: [D loss: 0.696168, acc: 0.531250] [A loss: 0.796947, acc: 0.316406]\n",
"3681: [D loss: 0.708799, acc: 0.519531] [A loss: 0.926935, acc: 0.140625]\n",
"3682: [D loss: 0.696737, acc: 0.517578] [A loss: 0.701925, acc: 0.511719]\n",
"3683: [D loss: 0.705763, acc: 0.523438] [A loss: 0.985795, acc: 0.117188]\n",
"3684: [D loss: 0.695122, acc: 0.527344] [A loss: 0.670340, acc: 0.585938]\n",
"3685: [D loss: 0.708143, acc: 0.533203] [A loss: 0.871970, acc: 0.195312]\n",
"3686: [D loss: 0.701916, acc: 0.535156] [A loss: 0.754879, acc: 0.417969]\n",
"3687: [D loss: 0.710099, acc: 0.519531] [A loss: 0.956784, acc: 0.085938]\n",
"3688: [D loss: 0.700809, acc: 0.496094] [A loss: 0.706827, acc: 0.476562]\n",
"3689: [D loss: 0.730466, acc: 0.503906] [A loss: 0.962538, acc: 0.101562]\n",
"3690: [D loss: 0.704227, acc: 0.501953] [A loss: 0.723640, acc: 0.437500]\n",
"3691: [D loss: 0.720008, acc: 0.529297] [A loss: 0.913736, acc: 0.171875]\n",
"3692: [D loss: 0.698038, acc: 0.505859] [A loss: 0.790289, acc: 0.328125]\n",
"3693: [D loss: 0.703323, acc: 0.517578] [A loss: 0.801462, acc: 0.265625]\n",
"3694: [D loss: 0.700387, acc: 0.523438] [A loss: 0.881144, acc: 0.152344]\n",
"3695: [D loss: 0.679857, acc: 0.572266] [A loss: 0.734233, acc: 0.449219]\n",
"3696: [D loss: 0.704263, acc: 0.527344] [A loss: 0.857242, acc: 0.203125]\n",
"3697: [D loss: 0.708937, acc: 0.505859] [A loss: 0.743707, acc: 0.421875]\n",
"3698: [D loss: 0.700003, acc: 0.564453] [A loss: 0.947092, acc: 0.117188]\n",
"3699: [D loss: 0.690772, acc: 0.521484] [A loss: 0.779989, acc: 0.335938]\n",
"3700: [D loss: 0.697859, acc: 0.515625] [A loss: 0.794051, acc: 0.300781]\n",
"3701: [D loss: 0.689701, acc: 0.541016] [A loss: 0.842534, acc: 0.218750]\n",
"3702: [D loss: 0.687144, acc: 0.544922] [A loss: 0.837613, acc: 0.250000]\n",
"3703: [D loss: 0.703586, acc: 0.523438] [A loss: 0.931448, acc: 0.167969]\n",
"3704: [D loss: 0.707455, acc: 0.498047] [A loss: 0.729256, acc: 0.460938]\n",
"3705: [D loss: 0.714761, acc: 0.511719] [A loss: 0.988898, acc: 0.066406]\n",
"3706: [D loss: 0.702670, acc: 0.525391] [A loss: 0.699963, acc: 0.468750]\n",
"3707: [D loss: 0.731899, acc: 0.509766] [A loss: 1.136192, acc: 0.035156]\n",
"3708: [D loss: 0.705117, acc: 0.529297] [A loss: 0.680354, acc: 0.578125]\n",
"3709: [D loss: 0.717912, acc: 0.519531] [A loss: 0.906471, acc: 0.148438]\n",
"3710: [D loss: 0.681324, acc: 0.580078] [A loss: 0.724118, acc: 0.449219]\n",
"3711: [D loss: 0.709151, acc: 0.539062] [A loss: 0.889508, acc: 0.167969]\n",
"3712: [D loss: 0.694971, acc: 0.517578] [A loss: 0.768962, acc: 0.371094]\n",
"3713: [D loss: 0.719070, acc: 0.507812] [A loss: 0.881552, acc: 0.152344]\n",
"3714: [D loss: 0.690515, acc: 0.535156] [A loss: 0.750720, acc: 0.425781]\n",
"3715: [D loss: 0.722512, acc: 0.482422] [A loss: 0.786932, acc: 0.335938]\n",
"3716: [D loss: 0.690622, acc: 0.556641] [A loss: 0.879984, acc: 0.175781]\n",
"3717: [D loss: 0.700358, acc: 0.544922] [A loss: 0.785451, acc: 0.312500]\n",
"3718: [D loss: 0.702492, acc: 0.541016] [A loss: 0.961327, acc: 0.089844]\n",
"3719: [D loss: 0.686625, acc: 0.533203] [A loss: 0.690331, acc: 0.539062]\n",
"3720: [D loss: 0.731101, acc: 0.496094] [A loss: 0.946779, acc: 0.117188]\n",
"3721: [D loss: 0.689824, acc: 0.544922] [A loss: 0.736890, acc: 0.398438]\n",
"3722: [D loss: 0.709220, acc: 0.496094] [A loss: 0.856393, acc: 0.203125]\n",
"3723: [D loss: 0.707790, acc: 0.542969] [A loss: 0.745811, acc: 0.437500]\n",
"3724: [D loss: 0.697106, acc: 0.529297] [A loss: 0.857003, acc: 0.238281]\n",
"3725: [D loss: 0.693099, acc: 0.539062] [A loss: 0.750455, acc: 0.402344]\n",
"3726: [D loss: 0.707214, acc: 0.492188] [A loss: 0.886501, acc: 0.175781]\n",
"3727: [D loss: 0.690206, acc: 0.544922] [A loss: 0.728830, acc: 0.445312]\n",
"3728: [D loss: 0.710248, acc: 0.501953] [A loss: 0.941228, acc: 0.156250]\n",
"3729: [D loss: 0.686778, acc: 0.546875] [A loss: 0.716058, acc: 0.500000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3730: [D loss: 0.723305, acc: 0.484375] [A loss: 0.935046, acc: 0.105469]\n",
"3731: [D loss: 0.686420, acc: 0.582031] [A loss: 0.709092, acc: 0.511719]\n",
"3732: [D loss: 0.709022, acc: 0.539062] [A loss: 0.951107, acc: 0.117188]\n",
"3733: [D loss: 0.694324, acc: 0.548828] [A loss: 0.748050, acc: 0.386719]\n",
"3734: [D loss: 0.701506, acc: 0.535156] [A loss: 0.907295, acc: 0.148438]\n",
"3735: [D loss: 0.701215, acc: 0.513672] [A loss: 0.706835, acc: 0.503906]\n",
"3736: [D loss: 0.724344, acc: 0.531250] [A loss: 0.986112, acc: 0.089844]\n",
"3737: [D loss: 0.708169, acc: 0.513672] [A loss: 0.708527, acc: 0.488281]\n",
"3738: [D loss: 0.702584, acc: 0.529297] [A loss: 0.856664, acc: 0.222656]\n",
"3739: [D loss: 0.698064, acc: 0.503906] [A loss: 0.842882, acc: 0.218750]\n",
"3740: [D loss: 0.693414, acc: 0.544922] [A loss: 0.859275, acc: 0.210938]\n",
"3741: [D loss: 0.706875, acc: 0.519531] [A loss: 0.802333, acc: 0.253906]\n",
"3742: [D loss: 0.714179, acc: 0.492188] [A loss: 0.841882, acc: 0.246094]\n",
"3743: [D loss: 0.685546, acc: 0.535156] [A loss: 0.820810, acc: 0.296875]\n",
"3744: [D loss: 0.705451, acc: 0.533203] [A loss: 0.879343, acc: 0.195312]\n",
"3745: [D loss: 0.683133, acc: 0.554688] [A loss: 0.735096, acc: 0.414062]\n",
"3746: [D loss: 0.692196, acc: 0.548828] [A loss: 0.908551, acc: 0.136719]\n",
"3747: [D loss: 0.694916, acc: 0.531250] [A loss: 0.750277, acc: 0.406250]\n",
"3748: [D loss: 0.700172, acc: 0.519531] [A loss: 0.956528, acc: 0.132812]\n",
"3749: [D loss: 0.697468, acc: 0.517578] [A loss: 0.671439, acc: 0.578125]\n",
"3750: [D loss: 0.714174, acc: 0.527344] [A loss: 0.899310, acc: 0.148438]\n",
"3751: [D loss: 0.697275, acc: 0.529297] [A loss: 0.732801, acc: 0.410156]\n",
"3752: [D loss: 0.714103, acc: 0.525391] [A loss: 0.985793, acc: 0.082031]\n",
"3753: [D loss: 0.708222, acc: 0.498047] [A loss: 0.704448, acc: 0.527344]\n",
"3754: [D loss: 0.713485, acc: 0.519531] [A loss: 0.957399, acc: 0.117188]\n",
"3755: [D loss: 0.702773, acc: 0.527344] [A loss: 0.689845, acc: 0.562500]\n",
"3756: [D loss: 0.724983, acc: 0.472656] [A loss: 0.936391, acc: 0.109375]\n",
"3757: [D loss: 0.694903, acc: 0.533203] [A loss: 0.693283, acc: 0.539062]\n",
"3758: [D loss: 0.707391, acc: 0.525391] [A loss: 0.836527, acc: 0.234375]\n",
"3759: [D loss: 0.699184, acc: 0.507812] [A loss: 0.772202, acc: 0.332031]\n",
"3760: [D loss: 0.704239, acc: 0.521484] [A loss: 0.860572, acc: 0.183594]\n",
"3761: [D loss: 0.679996, acc: 0.576172] [A loss: 0.775990, acc: 0.332031]\n",
"3762: [D loss: 0.712742, acc: 0.507812] [A loss: 0.863472, acc: 0.167969]\n",
"3763: [D loss: 0.687978, acc: 0.560547] [A loss: 0.811223, acc: 0.269531]\n",
"3764: [D loss: 0.690297, acc: 0.544922] [A loss: 0.880914, acc: 0.203125]\n",
"3765: [D loss: 0.691909, acc: 0.535156] [A loss: 0.785195, acc: 0.328125]\n",
"3766: [D loss: 0.699618, acc: 0.539062] [A loss: 0.836015, acc: 0.214844]\n",
"3767: [D loss: 0.705181, acc: 0.517578] [A loss: 0.774707, acc: 0.343750]\n",
"3768: [D loss: 0.708922, acc: 0.523438] [A loss: 0.835809, acc: 0.246094]\n",
"3769: [D loss: 0.693991, acc: 0.531250] [A loss: 0.758597, acc: 0.398438]\n",
"3770: [D loss: 0.710311, acc: 0.527344] [A loss: 0.999696, acc: 0.093750]\n",
"3771: [D loss: 0.702844, acc: 0.515625] [A loss: 0.695525, acc: 0.539062]\n",
"3772: [D loss: 0.729258, acc: 0.509766] [A loss: 0.998607, acc: 0.074219]\n",
"3773: [D loss: 0.697091, acc: 0.548828] [A loss: 0.713749, acc: 0.453125]\n",
"3774: [D loss: 0.704729, acc: 0.505859] [A loss: 0.896518, acc: 0.195312]\n",
"3775: [D loss: 0.704497, acc: 0.486328] [A loss: 0.712729, acc: 0.437500]\n",
"3776: [D loss: 0.717395, acc: 0.523438] [A loss: 0.954843, acc: 0.097656]\n",
"3777: [D loss: 0.701517, acc: 0.519531] [A loss: 0.711946, acc: 0.488281]\n",
"3778: [D loss: 0.709622, acc: 0.505859] [A loss: 0.899330, acc: 0.132812]\n",
"3779: [D loss: 0.693215, acc: 0.535156] [A loss: 0.739971, acc: 0.410156]\n",
"3780: [D loss: 0.705279, acc: 0.513672] [A loss: 0.905884, acc: 0.156250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3781: [D loss: 0.688335, acc: 0.544922] [A loss: 0.798832, acc: 0.320312]\n",
"3782: [D loss: 0.702879, acc: 0.531250] [A loss: 0.862181, acc: 0.226562]\n",
"3783: [D loss: 0.693630, acc: 0.539062] [A loss: 0.752983, acc: 0.371094]\n",
"3784: [D loss: 0.687556, acc: 0.580078] [A loss: 0.877054, acc: 0.195312]\n",
"3785: [D loss: 0.694678, acc: 0.541016] [A loss: 0.768785, acc: 0.382812]\n",
"3786: [D loss: 0.712051, acc: 0.503906] [A loss: 0.930874, acc: 0.128906]\n",
"3787: [D loss: 0.689066, acc: 0.558594] [A loss: 0.746374, acc: 0.402344]\n",
"3788: [D loss: 0.722843, acc: 0.525391] [A loss: 1.043888, acc: 0.070312]\n",
"3789: [D loss: 0.707592, acc: 0.535156] [A loss: 0.669987, acc: 0.535156]\n",
"3790: [D loss: 0.729807, acc: 0.515625] [A loss: 0.984887, acc: 0.109375]\n",
"3791: [D loss: 0.700247, acc: 0.525391] [A loss: 0.763649, acc: 0.382812]\n",
"3792: [D loss: 0.702579, acc: 0.544922] [A loss: 0.823982, acc: 0.320312]\n",
"3793: [D loss: 0.691320, acc: 0.533203] [A loss: 0.792673, acc: 0.316406]\n",
"3794: [D loss: 0.713439, acc: 0.492188] [A loss: 0.782420, acc: 0.285156]\n",
"3795: [D loss: 0.705486, acc: 0.503906] [A loss: 0.811132, acc: 0.308594]\n",
"3796: [D loss: 0.700359, acc: 0.525391] [A loss: 0.792584, acc: 0.292969]\n",
"3797: [D loss: 0.693922, acc: 0.529297] [A loss: 0.908811, acc: 0.160156]\n",
"3798: [D loss: 0.696966, acc: 0.552734] [A loss: 0.730501, acc: 0.457031]\n",
"3799: [D loss: 0.713524, acc: 0.505859] [A loss: 0.957372, acc: 0.101562]\n",
"3800: [D loss: 0.698349, acc: 0.531250] [A loss: 0.669481, acc: 0.582031]\n",
"3801: [D loss: 0.720872, acc: 0.527344] [A loss: 0.926179, acc: 0.128906]\n",
"3802: [D loss: 0.700697, acc: 0.527344] [A loss: 0.721543, acc: 0.421875]\n",
"3803: [D loss: 0.712994, acc: 0.492188] [A loss: 0.917118, acc: 0.121094]\n",
"3804: [D loss: 0.700667, acc: 0.498047] [A loss: 0.708717, acc: 0.507812]\n",
"3805: [D loss: 0.710583, acc: 0.529297] [A loss: 0.895263, acc: 0.152344]\n",
"3806: [D loss: 0.685564, acc: 0.587891] [A loss: 0.724331, acc: 0.433594]\n",
"3807: [D loss: 0.701918, acc: 0.525391] [A loss: 0.853993, acc: 0.238281]\n",
"3808: [D loss: 0.680706, acc: 0.566406] [A loss: 0.784481, acc: 0.335938]\n",
"3809: [D loss: 0.703066, acc: 0.533203] [A loss: 0.848827, acc: 0.199219]\n",
"3810: [D loss: 0.693019, acc: 0.541016] [A loss: 0.731807, acc: 0.394531]\n",
"3811: [D loss: 0.696355, acc: 0.533203] [A loss: 0.953117, acc: 0.089844]\n",
"3812: [D loss: 0.700099, acc: 0.515625] [A loss: 0.696642, acc: 0.519531]\n",
"3813: [D loss: 0.731017, acc: 0.503906] [A loss: 0.976231, acc: 0.082031]\n",
"3814: [D loss: 0.700637, acc: 0.503906] [A loss: 0.676103, acc: 0.597656]\n",
"3815: [D loss: 0.708659, acc: 0.505859] [A loss: 0.800786, acc: 0.253906]\n",
"3816: [D loss: 0.681231, acc: 0.544922] [A loss: 0.768247, acc: 0.347656]\n",
"3817: [D loss: 0.686063, acc: 0.552734] [A loss: 0.827022, acc: 0.234375]\n",
"3818: [D loss: 0.703088, acc: 0.501953] [A loss: 0.802352, acc: 0.308594]\n",
"3819: [D loss: 0.702062, acc: 0.527344] [A loss: 0.854398, acc: 0.218750]\n",
"3820: [D loss: 0.694526, acc: 0.541016] [A loss: 0.723887, acc: 0.445312]\n",
"3821: [D loss: 0.710941, acc: 0.503906] [A loss: 0.969530, acc: 0.093750]\n",
"3822: [D loss: 0.693873, acc: 0.556641] [A loss: 0.699280, acc: 0.511719]\n",
"3823: [D loss: 0.714234, acc: 0.505859] [A loss: 0.939206, acc: 0.097656]\n",
"3824: [D loss: 0.687624, acc: 0.558594] [A loss: 0.707427, acc: 0.511719]\n",
"3825: [D loss: 0.711567, acc: 0.531250] [A loss: 0.952911, acc: 0.097656]\n",
"3826: [D loss: 0.684344, acc: 0.556641] [A loss: 0.701878, acc: 0.527344]\n",
"3827: [D loss: 0.716595, acc: 0.507812] [A loss: 0.853498, acc: 0.210938]\n",
"3828: [D loss: 0.686763, acc: 0.542969] [A loss: 0.783580, acc: 0.320312]\n",
"3829: [D loss: 0.707524, acc: 0.542969] [A loss: 0.821563, acc: 0.273438]\n",
"3830: [D loss: 0.701339, acc: 0.544922] [A loss: 0.810511, acc: 0.257812]\n",
"3831: [D loss: 0.711272, acc: 0.492188] [A loss: 0.823645, acc: 0.261719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3832: [D loss: 0.693215, acc: 0.537109] [A loss: 0.799774, acc: 0.316406]\n",
"3833: [D loss: 0.695342, acc: 0.539062] [A loss: 0.836950, acc: 0.222656]\n",
"3834: [D loss: 0.707900, acc: 0.509766] [A loss: 0.834932, acc: 0.253906]\n",
"3835: [D loss: 0.705067, acc: 0.521484] [A loss: 0.932567, acc: 0.121094]\n",
"3836: [D loss: 0.686347, acc: 0.556641] [A loss: 0.768165, acc: 0.363281]\n",
"3837: [D loss: 0.702118, acc: 0.537109] [A loss: 0.880656, acc: 0.171875]\n",
"3838: [D loss: 0.707740, acc: 0.490234] [A loss: 0.703761, acc: 0.488281]\n",
"3839: [D loss: 0.734356, acc: 0.500000] [A loss: 1.066485, acc: 0.035156]\n",
"3840: [D loss: 0.701789, acc: 0.500000] [A loss: 0.687552, acc: 0.535156]\n",
"3841: [D loss: 0.718320, acc: 0.505859] [A loss: 0.895964, acc: 0.160156]\n",
"3842: [D loss: 0.685984, acc: 0.554688] [A loss: 0.748299, acc: 0.378906]\n",
"3843: [D loss: 0.702928, acc: 0.539062] [A loss: 0.837965, acc: 0.242188]\n",
"3844: [D loss: 0.697436, acc: 0.541016] [A loss: 0.779491, acc: 0.332031]\n",
"3845: [D loss: 0.705337, acc: 0.515625] [A loss: 0.823798, acc: 0.277344]\n",
"3846: [D loss: 0.696809, acc: 0.533203] [A loss: 0.771413, acc: 0.343750]\n",
"3847: [D loss: 0.695959, acc: 0.535156] [A loss: 0.916747, acc: 0.105469]\n",
"3848: [D loss: 0.700065, acc: 0.494141] [A loss: 0.697357, acc: 0.523438]\n",
"3849: [D loss: 0.708016, acc: 0.531250] [A loss: 0.976727, acc: 0.089844]\n",
"3850: [D loss: 0.697928, acc: 0.537109] [A loss: 0.679433, acc: 0.535156]\n",
"3851: [D loss: 0.710081, acc: 0.511719] [A loss: 0.911344, acc: 0.144531]\n",
"3852: [D loss: 0.694991, acc: 0.527344] [A loss: 0.729175, acc: 0.425781]\n",
"3853: [D loss: 0.711636, acc: 0.501953] [A loss: 0.897511, acc: 0.160156]\n",
"3854: [D loss: 0.694802, acc: 0.527344] [A loss: 0.704991, acc: 0.488281]\n",
"3855: [D loss: 0.725530, acc: 0.511719] [A loss: 0.967441, acc: 0.066406]\n",
"3856: [D loss: 0.687893, acc: 0.539062] [A loss: 0.728816, acc: 0.460938]\n",
"3857: [D loss: 0.712654, acc: 0.529297] [A loss: 0.867743, acc: 0.164062]\n",
"3858: [D loss: 0.696429, acc: 0.527344] [A loss: 0.892610, acc: 0.167969]\n",
"3859: [D loss: 0.701296, acc: 0.509766] [A loss: 0.774748, acc: 0.339844]\n",
"3860: [D loss: 0.713977, acc: 0.484375] [A loss: 0.947815, acc: 0.128906]\n",
"3861: [D loss: 0.697622, acc: 0.541016] [A loss: 0.708945, acc: 0.496094]\n",
"3862: [D loss: 0.710278, acc: 0.519531] [A loss: 0.890660, acc: 0.148438]\n",
"3863: [D loss: 0.691125, acc: 0.527344] [A loss: 0.803766, acc: 0.265625]\n",
"3864: [D loss: 0.705942, acc: 0.517578] [A loss: 0.859736, acc: 0.175781]\n",
"3865: [D loss: 0.691131, acc: 0.521484] [A loss: 0.808179, acc: 0.257812]\n",
"3866: [D loss: 0.699443, acc: 0.533203] [A loss: 0.861721, acc: 0.203125]\n",
"3867: [D loss: 0.707878, acc: 0.490234] [A loss: 0.760176, acc: 0.371094]\n",
"3868: [D loss: 0.709806, acc: 0.548828] [A loss: 0.862468, acc: 0.179688]\n",
"3869: [D loss: 0.706497, acc: 0.496094] [A loss: 0.788009, acc: 0.312500]\n",
"3870: [D loss: 0.694119, acc: 0.560547] [A loss: 0.869878, acc: 0.203125]\n",
"3871: [D loss: 0.696258, acc: 0.546875] [A loss: 0.727368, acc: 0.410156]\n",
"3872: [D loss: 0.717162, acc: 0.509766] [A loss: 1.039315, acc: 0.054688]\n",
"3873: [D loss: 0.695061, acc: 0.542969] [A loss: 0.638165, acc: 0.632812]\n",
"3874: [D loss: 0.721770, acc: 0.521484] [A loss: 0.945826, acc: 0.062500]\n",
"3875: [D loss: 0.694368, acc: 0.525391] [A loss: 0.726546, acc: 0.425781]\n",
"3876: [D loss: 0.710585, acc: 0.482422] [A loss: 0.833034, acc: 0.246094]\n",
"3877: [D loss: 0.685570, acc: 0.572266] [A loss: 0.755164, acc: 0.359375]\n",
"3878: [D loss: 0.690215, acc: 0.548828] [A loss: 0.854713, acc: 0.218750]\n",
"3879: [D loss: 0.688193, acc: 0.546875] [A loss: 0.792526, acc: 0.332031]\n",
"3880: [D loss: 0.697091, acc: 0.523438] [A loss: 0.855580, acc: 0.203125]\n",
"3881: [D loss: 0.688906, acc: 0.558594] [A loss: 0.759453, acc: 0.386719]\n",
"3882: [D loss: 0.711102, acc: 0.507812] [A loss: 0.851338, acc: 0.242188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3883: [D loss: 0.690695, acc: 0.558594] [A loss: 0.837951, acc: 0.218750]\n",
"3884: [D loss: 0.685638, acc: 0.554688] [A loss: 0.793200, acc: 0.339844]\n",
"3885: [D loss: 0.696943, acc: 0.546875] [A loss: 0.946820, acc: 0.089844]\n",
"3886: [D loss: 0.710979, acc: 0.490234] [A loss: 0.762802, acc: 0.363281]\n",
"3887: [D loss: 0.708542, acc: 0.515625] [A loss: 0.974388, acc: 0.070312]\n",
"3888: [D loss: 0.699516, acc: 0.515625] [A loss: 0.751750, acc: 0.386719]\n",
"3889: [D loss: 0.706301, acc: 0.496094] [A loss: 0.905460, acc: 0.136719]\n",
"3890: [D loss: 0.696491, acc: 0.537109] [A loss: 0.790106, acc: 0.328125]\n",
"3891: [D loss: 0.713117, acc: 0.494141] [A loss: 0.849267, acc: 0.210938]\n",
"3892: [D loss: 0.694066, acc: 0.523438] [A loss: 0.893394, acc: 0.156250]\n",
"3893: [D loss: 0.699506, acc: 0.513672] [A loss: 0.796595, acc: 0.289062]\n",
"3894: [D loss: 0.708059, acc: 0.521484] [A loss: 0.892567, acc: 0.160156]\n",
"3895: [D loss: 0.697330, acc: 0.550781] [A loss: 0.725526, acc: 0.449219]\n",
"3896: [D loss: 0.714150, acc: 0.525391] [A loss: 1.140855, acc: 0.027344]\n",
"3897: [D loss: 0.712868, acc: 0.486328] [A loss: 0.685122, acc: 0.535156]\n",
"3898: [D loss: 0.717889, acc: 0.527344] [A loss: 0.872079, acc: 0.152344]\n",
"3899: [D loss: 0.687754, acc: 0.560547] [A loss: 0.751157, acc: 0.410156]\n",
"3900: [D loss: 0.692032, acc: 0.529297] [A loss: 0.866523, acc: 0.183594]\n",
"3901: [D loss: 0.700650, acc: 0.550781] [A loss: 0.753397, acc: 0.378906]\n",
"3902: [D loss: 0.701536, acc: 0.521484] [A loss: 0.815380, acc: 0.277344]\n",
"3903: [D loss: 0.702781, acc: 0.505859] [A loss: 0.810372, acc: 0.238281]\n",
"3904: [D loss: 0.695544, acc: 0.576172] [A loss: 0.820024, acc: 0.246094]\n",
"3905: [D loss: 0.702147, acc: 0.535156] [A loss: 0.829641, acc: 0.234375]\n",
"3906: [D loss: 0.696375, acc: 0.519531] [A loss: 0.830794, acc: 0.246094]\n",
"3907: [D loss: 0.685762, acc: 0.570312] [A loss: 0.835999, acc: 0.234375]\n",
"3908: [D loss: 0.710474, acc: 0.513672] [A loss: 0.879416, acc: 0.183594]\n",
"3909: [D loss: 0.715189, acc: 0.476562] [A loss: 0.837628, acc: 0.246094]\n",
"3910: [D loss: 0.692767, acc: 0.542969] [A loss: 0.801276, acc: 0.300781]\n",
"3911: [D loss: 0.710618, acc: 0.498047] [A loss: 0.950404, acc: 0.105469]\n",
"3912: [D loss: 0.706246, acc: 0.509766] [A loss: 0.739663, acc: 0.402344]\n",
"3913: [D loss: 0.717360, acc: 0.523438] [A loss: 0.994531, acc: 0.042969]\n",
"3914: [D loss: 0.696465, acc: 0.523438] [A loss: 0.661875, acc: 0.593750]\n",
"3915: [D loss: 0.734143, acc: 0.529297] [A loss: 1.053607, acc: 0.050781]\n",
"3916: [D loss: 0.696274, acc: 0.529297] [A loss: 0.661368, acc: 0.605469]\n",
"3917: [D loss: 0.728381, acc: 0.505859] [A loss: 0.922470, acc: 0.117188]\n",
"3918: [D loss: 0.693607, acc: 0.507812] [A loss: 0.748341, acc: 0.398438]\n",
"3919: [D loss: 0.702396, acc: 0.546875] [A loss: 0.861298, acc: 0.187500]\n",
"3920: [D loss: 0.688188, acc: 0.546875] [A loss: 0.747361, acc: 0.382812]\n",
"3921: [D loss: 0.703809, acc: 0.523438] [A loss: 0.917214, acc: 0.140625]\n",
"3922: [D loss: 0.688691, acc: 0.542969] [A loss: 0.786354, acc: 0.285156]\n",
"3923: [D loss: 0.685676, acc: 0.558594] [A loss: 0.922906, acc: 0.148438]\n",
"3924: [D loss: 0.696814, acc: 0.548828] [A loss: 0.752600, acc: 0.398438]\n",
"3925: [D loss: 0.710597, acc: 0.531250] [A loss: 1.037638, acc: 0.039062]\n",
"3926: [D loss: 0.699531, acc: 0.505859] [A loss: 0.749183, acc: 0.390625]\n",
"3927: [D loss: 0.699651, acc: 0.523438] [A loss: 0.939869, acc: 0.109375]\n",
"3928: [D loss: 0.698879, acc: 0.531250] [A loss: 0.749278, acc: 0.398438]\n",
"3929: [D loss: 0.712374, acc: 0.529297] [A loss: 0.902307, acc: 0.125000]\n",
"3930: [D loss: 0.687068, acc: 0.542969] [A loss: 0.675268, acc: 0.558594]\n",
"3931: [D loss: 0.728331, acc: 0.511719] [A loss: 1.007384, acc: 0.058594]\n",
"3932: [D loss: 0.689694, acc: 0.546875] [A loss: 0.683584, acc: 0.539062]\n",
"3933: [D loss: 0.714491, acc: 0.529297] [A loss: 0.855624, acc: 0.207031]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3934: [D loss: 0.698658, acc: 0.521484] [A loss: 0.773996, acc: 0.355469]\n",
"3935: [D loss: 0.713812, acc: 0.507812] [A loss: 0.862845, acc: 0.207031]\n",
"3936: [D loss: 0.683348, acc: 0.564453] [A loss: 0.749564, acc: 0.367188]\n",
"3937: [D loss: 0.701014, acc: 0.544922] [A loss: 0.860660, acc: 0.218750]\n",
"3938: [D loss: 0.698572, acc: 0.527344] [A loss: 0.781253, acc: 0.316406]\n",
"3939: [D loss: 0.702924, acc: 0.527344] [A loss: 0.849758, acc: 0.167969]\n",
"3940: [D loss: 0.691916, acc: 0.529297] [A loss: 0.804021, acc: 0.253906]\n",
"3941: [D loss: 0.701520, acc: 0.511719] [A loss: 0.859097, acc: 0.191406]\n",
"3942: [D loss: 0.698559, acc: 0.527344] [A loss: 0.773174, acc: 0.351562]\n",
"3943: [D loss: 0.704198, acc: 0.492188] [A loss: 0.871803, acc: 0.187500]\n",
"3944: [D loss: 0.693090, acc: 0.519531] [A loss: 0.784974, acc: 0.308594]\n",
"3945: [D loss: 0.690579, acc: 0.552734] [A loss: 0.998708, acc: 0.089844]\n",
"3946: [D loss: 0.686415, acc: 0.546875] [A loss: 0.779078, acc: 0.332031]\n",
"3947: [D loss: 0.703411, acc: 0.539062] [A loss: 0.981287, acc: 0.089844]\n",
"3948: [D loss: 0.706863, acc: 0.496094] [A loss: 0.684912, acc: 0.535156]\n",
"3949: [D loss: 0.714627, acc: 0.519531] [A loss: 1.000363, acc: 0.074219]\n",
"3950: [D loss: 0.700172, acc: 0.537109] [A loss: 0.684572, acc: 0.500000]\n",
"3951: [D loss: 0.715690, acc: 0.519531] [A loss: 0.919130, acc: 0.113281]\n",
"3952: [D loss: 0.702012, acc: 0.498047] [A loss: 0.723994, acc: 0.445312]\n",
"3953: [D loss: 0.711144, acc: 0.507812] [A loss: 0.873065, acc: 0.199219]\n",
"3954: [D loss: 0.708283, acc: 0.482422] [A loss: 0.802964, acc: 0.285156]\n",
"3955: [D loss: 0.699158, acc: 0.525391] [A loss: 0.861982, acc: 0.234375]\n",
"3956: [D loss: 0.692838, acc: 0.580078] [A loss: 0.760763, acc: 0.417969]\n",
"3957: [D loss: 0.713676, acc: 0.486328] [A loss: 0.848966, acc: 0.210938]\n",
"3958: [D loss: 0.692848, acc: 0.537109] [A loss: 0.745968, acc: 0.437500]\n",
"3959: [D loss: 0.713989, acc: 0.501953] [A loss: 0.891369, acc: 0.156250]\n",
"3960: [D loss: 0.714100, acc: 0.511719] [A loss: 0.760348, acc: 0.347656]\n",
"3961: [D loss: 0.711047, acc: 0.501953] [A loss: 0.881868, acc: 0.207031]\n",
"3962: [D loss: 0.722944, acc: 0.490234] [A loss: 0.816190, acc: 0.210938]\n",
"3963: [D loss: 0.707274, acc: 0.507812] [A loss: 0.814068, acc: 0.285156]\n",
"3964: [D loss: 0.686476, acc: 0.552734] [A loss: 0.874046, acc: 0.179688]\n",
"3965: [D loss: 0.706246, acc: 0.519531] [A loss: 0.786241, acc: 0.316406]\n",
"3966: [D loss: 0.707388, acc: 0.513672] [A loss: 0.956038, acc: 0.132812]\n",
"3967: [D loss: 0.694940, acc: 0.537109] [A loss: 0.680961, acc: 0.546875]\n",
"3968: [D loss: 0.721657, acc: 0.494141] [A loss: 0.969274, acc: 0.085938]\n",
"3969: [D loss: 0.704163, acc: 0.515625] [A loss: 0.661787, acc: 0.621094]\n",
"3970: [D loss: 0.718331, acc: 0.511719] [A loss: 0.953438, acc: 0.101562]\n",
"3971: [D loss: 0.695449, acc: 0.527344] [A loss: 0.731867, acc: 0.445312]\n",
"3972: [D loss: 0.718172, acc: 0.494141] [A loss: 0.924395, acc: 0.117188]\n",
"3973: [D loss: 0.688741, acc: 0.533203] [A loss: 0.706793, acc: 0.460938]\n",
"3974: [D loss: 0.700687, acc: 0.552734] [A loss: 0.930888, acc: 0.121094]\n",
"3975: [D loss: 0.683130, acc: 0.566406] [A loss: 0.737337, acc: 0.464844]\n",
"3976: [D loss: 0.706848, acc: 0.525391] [A loss: 0.855636, acc: 0.187500]\n",
"3977: [D loss: 0.694322, acc: 0.515625] [A loss: 0.765650, acc: 0.363281]\n",
"3978: [D loss: 0.709665, acc: 0.513672] [A loss: 0.845100, acc: 0.226562]\n",
"3979: [D loss: 0.692571, acc: 0.537109] [A loss: 0.847895, acc: 0.242188]\n",
"3980: [D loss: 0.689513, acc: 0.562500] [A loss: 0.818446, acc: 0.238281]\n",
"3981: [D loss: 0.694881, acc: 0.505859] [A loss: 0.766006, acc: 0.332031]\n",
"3982: [D loss: 0.705844, acc: 0.539062] [A loss: 0.987076, acc: 0.074219]\n",
"3983: [D loss: 0.692060, acc: 0.552734] [A loss: 0.701907, acc: 0.531250]\n",
"3984: [D loss: 0.716809, acc: 0.500000] [A loss: 0.975126, acc: 0.105469]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"3985: [D loss: 0.686870, acc: 0.552734] [A loss: 0.673290, acc: 0.550781]\n",
"3986: [D loss: 0.733145, acc: 0.484375] [A loss: 0.887718, acc: 0.156250]\n",
"3987: [D loss: 0.712982, acc: 0.505859] [A loss: 0.741928, acc: 0.402344]\n",
"3988: [D loss: 0.705517, acc: 0.539062] [A loss: 0.866179, acc: 0.148438]\n",
"3989: [D loss: 0.692950, acc: 0.546875] [A loss: 0.804969, acc: 0.281250]\n",
"3990: [D loss: 0.684014, acc: 0.564453] [A loss: 0.808548, acc: 0.296875]\n",
"3991: [D loss: 0.694056, acc: 0.568359] [A loss: 0.869490, acc: 0.175781]\n",
"3992: [D loss: 0.682440, acc: 0.548828] [A loss: 0.723332, acc: 0.433594]\n",
"3993: [D loss: 0.726686, acc: 0.511719] [A loss: 1.029493, acc: 0.062500]\n",
"3994: [D loss: 0.703196, acc: 0.521484] [A loss: 0.671027, acc: 0.515625]\n",
"3995: [D loss: 0.715897, acc: 0.519531] [A loss: 0.953168, acc: 0.078125]\n",
"3996: [D loss: 0.684416, acc: 0.548828] [A loss: 0.756850, acc: 0.347656]\n",
"3997: [D loss: 0.718249, acc: 0.509766] [A loss: 0.939374, acc: 0.093750]\n",
"3998: [D loss: 0.697006, acc: 0.535156] [A loss: 0.693042, acc: 0.546875]\n",
"3999: [D loss: 0.717310, acc: 0.507812] [A loss: 0.975975, acc: 0.089844]\n",
"4000: [D loss: 0.695192, acc: 0.539062] [A loss: 0.690276, acc: 0.515625]\n",
"4001: [D loss: 0.713954, acc: 0.521484] [A loss: 0.898457, acc: 0.105469]\n",
"4002: [D loss: 0.700941, acc: 0.537109] [A loss: 0.762219, acc: 0.371094]\n",
"4003: [D loss: 0.702448, acc: 0.519531] [A loss: 0.839487, acc: 0.234375]\n",
"4004: [D loss: 0.695074, acc: 0.544922] [A loss: 0.739965, acc: 0.398438]\n",
"4005: [D loss: 0.710988, acc: 0.492188] [A loss: 0.956641, acc: 0.109375]\n",
"4006: [D loss: 0.693922, acc: 0.523438] [A loss: 0.721602, acc: 0.464844]\n",
"4007: [D loss: 0.730114, acc: 0.505859] [A loss: 0.870138, acc: 0.183594]\n",
"4008: [D loss: 0.698198, acc: 0.523438] [A loss: 0.715966, acc: 0.468750]\n",
"4009: [D loss: 0.696005, acc: 0.548828] [A loss: 0.887188, acc: 0.136719]\n",
"4010: [D loss: 0.687467, acc: 0.531250] [A loss: 0.766567, acc: 0.375000]\n",
"4011: [D loss: 0.694567, acc: 0.556641] [A loss: 0.915995, acc: 0.113281]\n",
"4012: [D loss: 0.698500, acc: 0.505859] [A loss: 0.746858, acc: 0.394531]\n",
"4013: [D loss: 0.725038, acc: 0.496094] [A loss: 0.858877, acc: 0.167969]\n",
"4014: [D loss: 0.692364, acc: 0.566406] [A loss: 0.816909, acc: 0.250000]\n",
"4015: [D loss: 0.704039, acc: 0.519531] [A loss: 0.803852, acc: 0.285156]\n",
"4016: [D loss: 0.680970, acc: 0.566406] [A loss: 0.840685, acc: 0.226562]\n",
"4017: [D loss: 0.707330, acc: 0.496094] [A loss: 0.835565, acc: 0.203125]\n",
"4018: [D loss: 0.699116, acc: 0.527344] [A loss: 0.791002, acc: 0.320312]\n",
"4019: [D loss: 0.693491, acc: 0.560547] [A loss: 0.911969, acc: 0.132812]\n",
"4020: [D loss: 0.684969, acc: 0.548828] [A loss: 0.697765, acc: 0.507812]\n",
"4021: [D loss: 0.733081, acc: 0.490234] [A loss: 1.042772, acc: 0.054688]\n",
"4022: [D loss: 0.704199, acc: 0.525391] [A loss: 0.686794, acc: 0.511719]\n",
"4023: [D loss: 0.707940, acc: 0.505859] [A loss: 0.878742, acc: 0.148438]\n",
"4024: [D loss: 0.688586, acc: 0.535156] [A loss: 0.734136, acc: 0.382812]\n",
"4025: [D loss: 0.689706, acc: 0.537109] [A loss: 0.847553, acc: 0.183594]\n",
"4026: [D loss: 0.704579, acc: 0.500000] [A loss: 0.883232, acc: 0.136719]\n",
"4027: [D loss: 0.703580, acc: 0.498047] [A loss: 0.826141, acc: 0.250000]\n",
"4028: [D loss: 0.696419, acc: 0.527344] [A loss: 0.744468, acc: 0.382812]\n",
"4029: [D loss: 0.704048, acc: 0.550781] [A loss: 0.996151, acc: 0.101562]\n",
"4030: [D loss: 0.706719, acc: 0.494141] [A loss: 0.726083, acc: 0.433594]\n",
"4031: [D loss: 0.713915, acc: 0.501953] [A loss: 0.925270, acc: 0.109375]\n",
"4032: [D loss: 0.699091, acc: 0.525391] [A loss: 0.735398, acc: 0.437500]\n",
"4033: [D loss: 0.708042, acc: 0.509766] [A loss: 0.969637, acc: 0.062500]\n",
"4034: [D loss: 0.698718, acc: 0.531250] [A loss: 0.762454, acc: 0.359375]\n",
"4035: [D loss: 0.702334, acc: 0.525391] [A loss: 0.884946, acc: 0.156250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4036: [D loss: 0.695622, acc: 0.519531] [A loss: 0.751547, acc: 0.386719]\n",
"4037: [D loss: 0.706654, acc: 0.527344] [A loss: 0.883389, acc: 0.164062]\n",
"4038: [D loss: 0.696405, acc: 0.523438] [A loss: 0.775687, acc: 0.328125]\n",
"4039: [D loss: 0.693922, acc: 0.539062] [A loss: 0.876236, acc: 0.199219]\n",
"4040: [D loss: 0.691105, acc: 0.501953] [A loss: 0.790765, acc: 0.269531]\n",
"4041: [D loss: 0.709550, acc: 0.498047] [A loss: 0.922442, acc: 0.097656]\n",
"4042: [D loss: 0.698762, acc: 0.515625] [A loss: 0.727478, acc: 0.449219]\n",
"4043: [D loss: 0.707811, acc: 0.511719] [A loss: 0.886374, acc: 0.144531]\n",
"4044: [D loss: 0.692941, acc: 0.537109] [A loss: 0.740917, acc: 0.433594]\n",
"4045: [D loss: 0.702863, acc: 0.533203] [A loss: 0.992934, acc: 0.105469]\n",
"4046: [D loss: 0.692156, acc: 0.552734] [A loss: 0.672463, acc: 0.601562]\n",
"4047: [D loss: 0.719966, acc: 0.517578] [A loss: 1.030742, acc: 0.097656]\n",
"4048: [D loss: 0.688254, acc: 0.566406] [A loss: 0.741650, acc: 0.402344]\n",
"4049: [D loss: 0.722135, acc: 0.505859] [A loss: 0.868770, acc: 0.164062]\n",
"4050: [D loss: 0.689593, acc: 0.554688] [A loss: 0.720228, acc: 0.453125]\n",
"4051: [D loss: 0.716494, acc: 0.486328] [A loss: 0.960352, acc: 0.085938]\n",
"4052: [D loss: 0.687777, acc: 0.537109] [A loss: 0.667223, acc: 0.566406]\n",
"4053: [D loss: 0.713128, acc: 0.535156] [A loss: 0.949894, acc: 0.085938]\n",
"4054: [D loss: 0.700854, acc: 0.527344] [A loss: 0.675489, acc: 0.582031]\n",
"4055: [D loss: 0.732448, acc: 0.480469] [A loss: 0.891481, acc: 0.171875]\n",
"4056: [D loss: 0.694124, acc: 0.525391] [A loss: 0.717980, acc: 0.468750]\n",
"4057: [D loss: 0.714392, acc: 0.523438] [A loss: 0.899689, acc: 0.144531]\n",
"4058: [D loss: 0.698840, acc: 0.511719] [A loss: 0.785641, acc: 0.332031]\n",
"4059: [D loss: 0.697886, acc: 0.527344] [A loss: 0.847204, acc: 0.230469]\n",
"4060: [D loss: 0.692500, acc: 0.548828] [A loss: 0.793290, acc: 0.292969]\n",
"4061: [D loss: 0.707097, acc: 0.523438] [A loss: 0.917510, acc: 0.132812]\n",
"4062: [D loss: 0.692900, acc: 0.513672] [A loss: 0.729509, acc: 0.421875]\n",
"4063: [D loss: 0.712423, acc: 0.505859] [A loss: 0.870159, acc: 0.164062]\n",
"4064: [D loss: 0.687217, acc: 0.570312] [A loss: 0.733717, acc: 0.429688]\n",
"4065: [D loss: 0.709183, acc: 0.519531] [A loss: 0.918706, acc: 0.152344]\n",
"4066: [D loss: 0.689262, acc: 0.564453] [A loss: 0.750777, acc: 0.378906]\n",
"4067: [D loss: 0.707492, acc: 0.519531] [A loss: 0.886194, acc: 0.164062]\n",
"4068: [D loss: 0.704655, acc: 0.498047] [A loss: 0.811299, acc: 0.250000]\n",
"4069: [D loss: 0.701337, acc: 0.519531] [A loss: 0.884753, acc: 0.167969]\n",
"4070: [D loss: 0.701270, acc: 0.505859] [A loss: 0.718515, acc: 0.488281]\n",
"4071: [D loss: 0.718926, acc: 0.527344] [A loss: 0.968821, acc: 0.050781]\n",
"4072: [D loss: 0.688256, acc: 0.560547] [A loss: 0.682684, acc: 0.562500]\n",
"4073: [D loss: 0.727958, acc: 0.509766] [A loss: 1.003953, acc: 0.062500]\n",
"4074: [D loss: 0.696269, acc: 0.519531] [A loss: 0.694552, acc: 0.570312]\n",
"4075: [D loss: 0.707634, acc: 0.521484] [A loss: 0.926317, acc: 0.093750]\n",
"4076: [D loss: 0.692961, acc: 0.546875] [A loss: 0.770003, acc: 0.363281]\n",
"4077: [D loss: 0.698195, acc: 0.511719] [A loss: 0.925529, acc: 0.113281]\n",
"4078: [D loss: 0.705022, acc: 0.482422] [A loss: 0.704273, acc: 0.492188]\n",
"4079: [D loss: 0.717283, acc: 0.509766] [A loss: 0.963534, acc: 0.078125]\n",
"4080: [D loss: 0.701635, acc: 0.523438] [A loss: 0.721178, acc: 0.476562]\n",
"4081: [D loss: 0.714057, acc: 0.533203] [A loss: 0.959140, acc: 0.105469]\n",
"4082: [D loss: 0.696041, acc: 0.533203] [A loss: 0.698688, acc: 0.523438]\n",
"4083: [D loss: 0.715357, acc: 0.507812] [A loss: 0.907018, acc: 0.105469]\n",
"4084: [D loss: 0.689132, acc: 0.531250] [A loss: 0.700504, acc: 0.480469]\n",
"4085: [D loss: 0.706702, acc: 0.513672] [A loss: 0.904536, acc: 0.132812]\n",
"4086: [D loss: 0.704300, acc: 0.507812] [A loss: 0.756022, acc: 0.367188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4087: [D loss: 0.701811, acc: 0.541016] [A loss: 0.838895, acc: 0.246094]\n",
"4088: [D loss: 0.705600, acc: 0.496094] [A loss: 0.779055, acc: 0.312500]\n",
"4089: [D loss: 0.710647, acc: 0.494141] [A loss: 0.836304, acc: 0.246094]\n",
"4090: [D loss: 0.689054, acc: 0.558594] [A loss: 0.864964, acc: 0.195312]\n",
"4091: [D loss: 0.711047, acc: 0.501953] [A loss: 0.799674, acc: 0.296875]\n",
"4092: [D loss: 0.707965, acc: 0.513672] [A loss: 0.888149, acc: 0.183594]\n",
"4093: [D loss: 0.685001, acc: 0.556641] [A loss: 0.718089, acc: 0.488281]\n",
"4094: [D loss: 0.724057, acc: 0.500000] [A loss: 0.969151, acc: 0.097656]\n",
"4095: [D loss: 0.687905, acc: 0.556641] [A loss: 0.705339, acc: 0.480469]\n",
"4096: [D loss: 0.710658, acc: 0.527344] [A loss: 0.915068, acc: 0.156250]\n",
"4097: [D loss: 0.707481, acc: 0.492188] [A loss: 0.773045, acc: 0.359375]\n",
"4098: [D loss: 0.700698, acc: 0.531250] [A loss: 0.848668, acc: 0.210938]\n",
"4099: [D loss: 0.698892, acc: 0.531250] [A loss: 0.834788, acc: 0.195312]\n",
"4100: [D loss: 0.695692, acc: 0.535156] [A loss: 0.756094, acc: 0.390625]\n",
"4101: [D loss: 0.705915, acc: 0.542969] [A loss: 0.993173, acc: 0.078125]\n",
"4102: [D loss: 0.695549, acc: 0.533203] [A loss: 0.712931, acc: 0.480469]\n",
"4103: [D loss: 0.724137, acc: 0.511719] [A loss: 0.960556, acc: 0.050781]\n",
"4104: [D loss: 0.683025, acc: 0.541016] [A loss: 0.696751, acc: 0.527344]\n",
"4105: [D loss: 0.728736, acc: 0.486328] [A loss: 1.000096, acc: 0.082031]\n",
"4106: [D loss: 0.688844, acc: 0.556641] [A loss: 0.713744, acc: 0.460938]\n",
"4107: [D loss: 0.708899, acc: 0.531250] [A loss: 0.883663, acc: 0.167969]\n",
"4108: [D loss: 0.692435, acc: 0.525391] [A loss: 0.748866, acc: 0.394531]\n",
"4109: [D loss: 0.694287, acc: 0.554688] [A loss: 0.886556, acc: 0.152344]\n",
"4110: [D loss: 0.686975, acc: 0.550781] [A loss: 0.754369, acc: 0.367188]\n",
"4111: [D loss: 0.703435, acc: 0.498047] [A loss: 0.856317, acc: 0.187500]\n",
"4112: [D loss: 0.690893, acc: 0.546875] [A loss: 0.780757, acc: 0.312500]\n",
"4113: [D loss: 0.705027, acc: 0.509766] [A loss: 0.848840, acc: 0.210938]\n",
"4114: [D loss: 0.693644, acc: 0.527344] [A loss: 0.848553, acc: 0.207031]\n",
"4115: [D loss: 0.685393, acc: 0.554688] [A loss: 0.836432, acc: 0.242188]\n",
"4116: [D loss: 0.716615, acc: 0.480469] [A loss: 0.796387, acc: 0.273438]\n",
"4117: [D loss: 0.701817, acc: 0.500000] [A loss: 0.868033, acc: 0.171875]\n",
"4118: [D loss: 0.711280, acc: 0.490234] [A loss: 0.815912, acc: 0.289062]\n",
"4119: [D loss: 0.703321, acc: 0.529297] [A loss: 0.890954, acc: 0.152344]\n",
"4120: [D loss: 0.690931, acc: 0.541016] [A loss: 0.780349, acc: 0.332031]\n",
"4121: [D loss: 0.709589, acc: 0.509766] [A loss: 0.865018, acc: 0.183594]\n",
"4122: [D loss: 0.699154, acc: 0.535156] [A loss: 0.785914, acc: 0.367188]\n",
"4123: [D loss: 0.693212, acc: 0.554688] [A loss: 0.948646, acc: 0.082031]\n",
"4124: [D loss: 0.705002, acc: 0.507812] [A loss: 0.811930, acc: 0.351562]\n",
"4125: [D loss: 0.706424, acc: 0.523438] [A loss: 0.947015, acc: 0.082031]\n",
"4126: [D loss: 0.693922, acc: 0.517578] [A loss: 0.723226, acc: 0.445312]\n",
"4127: [D loss: 0.715537, acc: 0.539062] [A loss: 0.929005, acc: 0.136719]\n",
"4128: [D loss: 0.701993, acc: 0.511719] [A loss: 0.704417, acc: 0.507812]\n",
"4129: [D loss: 0.735277, acc: 0.494141] [A loss: 0.983770, acc: 0.066406]\n",
"4130: [D loss: 0.690562, acc: 0.535156] [A loss: 0.675557, acc: 0.542969]\n",
"4131: [D loss: 0.712697, acc: 0.513672] [A loss: 1.027301, acc: 0.058594]\n",
"4132: [D loss: 0.703851, acc: 0.527344] [A loss: 0.726449, acc: 0.468750]\n",
"4133: [D loss: 0.715857, acc: 0.503906] [A loss: 0.829252, acc: 0.226562]\n",
"4134: [D loss: 0.688644, acc: 0.542969] [A loss: 0.761855, acc: 0.386719]\n",
"4135: [D loss: 0.713151, acc: 0.527344] [A loss: 0.889875, acc: 0.179688]\n",
"4136: [D loss: 0.688260, acc: 0.556641] [A loss: 0.731302, acc: 0.457031]\n",
"4137: [D loss: 0.709877, acc: 0.535156] [A loss: 0.897841, acc: 0.164062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4138: [D loss: 0.699664, acc: 0.523438] [A loss: 0.786213, acc: 0.328125]\n",
"4139: [D loss: 0.701318, acc: 0.525391] [A loss: 0.921601, acc: 0.128906]\n",
"4140: [D loss: 0.694213, acc: 0.521484] [A loss: 0.717088, acc: 0.488281]\n",
"4141: [D loss: 0.718441, acc: 0.525391] [A loss: 0.954663, acc: 0.085938]\n",
"4142: [D loss: 0.696250, acc: 0.515625] [A loss: 0.695064, acc: 0.527344]\n",
"4143: [D loss: 0.724551, acc: 0.507812] [A loss: 0.952680, acc: 0.156250]\n",
"4144: [D loss: 0.703046, acc: 0.490234] [A loss: 0.764260, acc: 0.367188]\n",
"4145: [D loss: 0.711035, acc: 0.500000] [A loss: 0.891736, acc: 0.167969]\n",
"4146: [D loss: 0.691580, acc: 0.562500] [A loss: 0.784635, acc: 0.328125]\n",
"4147: [D loss: 0.696033, acc: 0.533203] [A loss: 0.840367, acc: 0.226562]\n",
"4148: [D loss: 0.710427, acc: 0.478516] [A loss: 0.822461, acc: 0.226562]\n",
"4149: [D loss: 0.694156, acc: 0.527344] [A loss: 0.858675, acc: 0.195312]\n",
"4150: [D loss: 0.699908, acc: 0.552734] [A loss: 0.861280, acc: 0.183594]\n",
"4151: [D loss: 0.692075, acc: 0.531250] [A loss: 0.873132, acc: 0.171875]\n",
"4152: [D loss: 0.707107, acc: 0.496094] [A loss: 0.946065, acc: 0.117188]\n",
"4153: [D loss: 0.713068, acc: 0.488281] [A loss: 0.715959, acc: 0.425781]\n",
"4154: [D loss: 0.716493, acc: 0.525391] [A loss: 0.982813, acc: 0.074219]\n",
"4155: [D loss: 0.704328, acc: 0.519531] [A loss: 0.662921, acc: 0.589844]\n",
"4156: [D loss: 0.720671, acc: 0.496094] [A loss: 0.983593, acc: 0.062500]\n",
"4157: [D loss: 0.692869, acc: 0.562500] [A loss: 0.690833, acc: 0.511719]\n",
"4158: [D loss: 0.707547, acc: 0.527344] [A loss: 0.907788, acc: 0.128906]\n",
"4159: [D loss: 0.688205, acc: 0.539062] [A loss: 0.807754, acc: 0.285156]\n",
"4160: [D loss: 0.689951, acc: 0.554688] [A loss: 0.890702, acc: 0.210938]\n",
"4161: [D loss: 0.696880, acc: 0.507812] [A loss: 0.759098, acc: 0.328125]\n",
"4162: [D loss: 0.698017, acc: 0.519531] [A loss: 0.831711, acc: 0.230469]\n",
"4163: [D loss: 0.698472, acc: 0.546875] [A loss: 0.803720, acc: 0.285156]\n",
"4164: [D loss: 0.690142, acc: 0.554688] [A loss: 0.820326, acc: 0.265625]\n",
"4165: [D loss: 0.701190, acc: 0.535156] [A loss: 0.813699, acc: 0.285156]\n",
"4166: [D loss: 0.699356, acc: 0.539062] [A loss: 0.829190, acc: 0.222656]\n",
"4167: [D loss: 0.711200, acc: 0.482422] [A loss: 0.835779, acc: 0.304688]\n",
"4168: [D loss: 0.707516, acc: 0.480469] [A loss: 0.924713, acc: 0.113281]\n",
"4169: [D loss: 0.686509, acc: 0.542969] [A loss: 0.816513, acc: 0.250000]\n",
"4170: [D loss: 0.701603, acc: 0.533203] [A loss: 0.885524, acc: 0.164062]\n",
"4171: [D loss: 0.687255, acc: 0.560547] [A loss: 0.752556, acc: 0.378906]\n",
"4172: [D loss: 0.702725, acc: 0.531250] [A loss: 1.004750, acc: 0.054688]\n",
"4173: [D loss: 0.696102, acc: 0.523438] [A loss: 0.718714, acc: 0.457031]\n",
"4174: [D loss: 0.723826, acc: 0.498047] [A loss: 0.971841, acc: 0.093750]\n",
"4175: [D loss: 0.697497, acc: 0.521484] [A loss: 0.676536, acc: 0.554688]\n",
"4176: [D loss: 0.708305, acc: 0.541016] [A loss: 0.952055, acc: 0.117188]\n",
"4177: [D loss: 0.683315, acc: 0.552734] [A loss: 0.702689, acc: 0.511719]\n",
"4178: [D loss: 0.711743, acc: 0.533203] [A loss: 0.942099, acc: 0.113281]\n",
"4179: [D loss: 0.702328, acc: 0.494141] [A loss: 0.705335, acc: 0.472656]\n",
"4180: [D loss: 0.734042, acc: 0.511719] [A loss: 0.892709, acc: 0.171875]\n",
"4181: [D loss: 0.703820, acc: 0.507812] [A loss: 0.814124, acc: 0.222656]\n",
"4182: [D loss: 0.719895, acc: 0.486328] [A loss: 0.883693, acc: 0.152344]\n",
"4183: [D loss: 0.690882, acc: 0.560547] [A loss: 0.703897, acc: 0.515625]\n",
"4184: [D loss: 0.714100, acc: 0.498047] [A loss: 0.934671, acc: 0.089844]\n",
"4185: [D loss: 0.690877, acc: 0.541016] [A loss: 0.729919, acc: 0.445312]\n",
"4186: [D loss: 0.706195, acc: 0.525391] [A loss: 0.895745, acc: 0.167969]\n",
"4187: [D loss: 0.703475, acc: 0.498047] [A loss: 0.745195, acc: 0.398438]\n",
"4188: [D loss: 0.723192, acc: 0.513672] [A loss: 0.895974, acc: 0.144531]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4189: [D loss: 0.688145, acc: 0.542969] [A loss: 0.711805, acc: 0.437500]\n",
"4190: [D loss: 0.734604, acc: 0.492188] [A loss: 0.951364, acc: 0.117188]\n",
"4191: [D loss: 0.711127, acc: 0.488281] [A loss: 0.680201, acc: 0.570312]\n",
"4192: [D loss: 0.700059, acc: 0.523438] [A loss: 0.930713, acc: 0.105469]\n",
"4193: [D loss: 0.706042, acc: 0.494141] [A loss: 0.760594, acc: 0.351562]\n",
"4194: [D loss: 0.703705, acc: 0.513672] [A loss: 0.904426, acc: 0.125000]\n",
"4195: [D loss: 0.695188, acc: 0.541016] [A loss: 0.744577, acc: 0.378906]\n",
"4196: [D loss: 0.707990, acc: 0.494141] [A loss: 0.901783, acc: 0.132812]\n",
"4197: [D loss: 0.699825, acc: 0.509766] [A loss: 0.795195, acc: 0.292969]\n",
"4198: [D loss: 0.698359, acc: 0.535156] [A loss: 0.878730, acc: 0.187500]\n",
"4199: [D loss: 0.690819, acc: 0.548828] [A loss: 0.791264, acc: 0.300781]\n",
"4200: [D loss: 0.704407, acc: 0.531250] [A loss: 0.819195, acc: 0.265625]\n",
"4201: [D loss: 0.699583, acc: 0.521484] [A loss: 0.822427, acc: 0.246094]\n",
"4202: [D loss: 0.696104, acc: 0.513672] [A loss: 0.802350, acc: 0.265625]\n",
"4203: [D loss: 0.687462, acc: 0.580078] [A loss: 0.809453, acc: 0.273438]\n",
"4204: [D loss: 0.694561, acc: 0.517578] [A loss: 0.822555, acc: 0.265625]\n",
"4205: [D loss: 0.702151, acc: 0.486328] [A loss: 0.919499, acc: 0.128906]\n",
"4206: [D loss: 0.705518, acc: 0.529297] [A loss: 0.725325, acc: 0.429688]\n",
"4207: [D loss: 0.697128, acc: 0.527344] [A loss: 1.018584, acc: 0.031250]\n",
"4208: [D loss: 0.700465, acc: 0.527344] [A loss: 0.701872, acc: 0.511719]\n",
"4209: [D loss: 0.724734, acc: 0.507812] [A loss: 0.928031, acc: 0.101562]\n",
"4210: [D loss: 0.699787, acc: 0.513672] [A loss: 0.751415, acc: 0.410156]\n",
"4211: [D loss: 0.709867, acc: 0.513672] [A loss: 0.868352, acc: 0.164062]\n",
"4212: [D loss: 0.702055, acc: 0.515625] [A loss: 0.809003, acc: 0.230469]\n",
"4213: [D loss: 0.711417, acc: 0.494141] [A loss: 0.834600, acc: 0.242188]\n",
"4214: [D loss: 0.700987, acc: 0.490234] [A loss: 0.829778, acc: 0.199219]\n",
"4215: [D loss: 0.710078, acc: 0.535156] [A loss: 0.934418, acc: 0.105469]\n",
"4216: [D loss: 0.710295, acc: 0.476562] [A loss: 0.693786, acc: 0.515625]\n",
"4217: [D loss: 0.728039, acc: 0.503906] [A loss: 1.062925, acc: 0.050781]\n",
"4218: [D loss: 0.703849, acc: 0.496094] [A loss: 0.631586, acc: 0.714844]\n",
"4219: [D loss: 0.725006, acc: 0.511719] [A loss: 0.993670, acc: 0.089844]\n",
"4220: [D loss: 0.687397, acc: 0.537109] [A loss: 0.710045, acc: 0.460938]\n",
"4221: [D loss: 0.703922, acc: 0.521484] [A loss: 0.877243, acc: 0.167969]\n",
"4222: [D loss: 0.704272, acc: 0.488281] [A loss: 0.734588, acc: 0.441406]\n",
"4223: [D loss: 0.700144, acc: 0.517578] [A loss: 0.875465, acc: 0.136719]\n",
"4224: [D loss: 0.687119, acc: 0.541016] [A loss: 0.778658, acc: 0.320312]\n",
"4225: [D loss: 0.691671, acc: 0.542969] [A loss: 0.912125, acc: 0.160156]\n",
"4226: [D loss: 0.690849, acc: 0.527344] [A loss: 0.789693, acc: 0.332031]\n",
"4227: [D loss: 0.707170, acc: 0.541016] [A loss: 0.916223, acc: 0.101562]\n",
"4228: [D loss: 0.700765, acc: 0.505859] [A loss: 0.780864, acc: 0.312500]\n",
"4229: [D loss: 0.717523, acc: 0.492188] [A loss: 0.969572, acc: 0.050781]\n",
"4230: [D loss: 0.708281, acc: 0.480469] [A loss: 0.698011, acc: 0.503906]\n",
"4231: [D loss: 0.705440, acc: 0.517578] [A loss: 0.973311, acc: 0.105469]\n",
"4232: [D loss: 0.696580, acc: 0.513672] [A loss: 0.693370, acc: 0.488281]\n",
"4233: [D loss: 0.720054, acc: 0.488281] [A loss: 0.894192, acc: 0.132812]\n",
"4234: [D loss: 0.700119, acc: 0.521484] [A loss: 0.752121, acc: 0.355469]\n",
"4235: [D loss: 0.722054, acc: 0.478516] [A loss: 0.942778, acc: 0.101562]\n",
"4236: [D loss: 0.698743, acc: 0.523438] [A loss: 0.753993, acc: 0.390625]\n",
"4237: [D loss: 0.710639, acc: 0.529297] [A loss: 0.861277, acc: 0.187500]\n",
"4238: [D loss: 0.688562, acc: 0.560547] [A loss: 0.833175, acc: 0.222656]\n",
"4239: [D loss: 0.700822, acc: 0.519531] [A loss: 0.870243, acc: 0.171875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4240: [D loss: 0.703338, acc: 0.525391] [A loss: 0.773187, acc: 0.347656]\n",
"4241: [D loss: 0.704061, acc: 0.525391] [A loss: 0.901023, acc: 0.199219]\n",
"4242: [D loss: 0.689410, acc: 0.552734] [A loss: 0.713974, acc: 0.500000]\n",
"4243: [D loss: 0.723958, acc: 0.482422] [A loss: 0.900474, acc: 0.121094]\n",
"4244: [D loss: 0.692323, acc: 0.521484] [A loss: 0.732200, acc: 0.382812]\n",
"4245: [D loss: 0.711990, acc: 0.513672] [A loss: 1.004219, acc: 0.074219]\n",
"4246: [D loss: 0.700803, acc: 0.503906] [A loss: 0.685981, acc: 0.531250]\n",
"4247: [D loss: 0.709183, acc: 0.517578] [A loss: 0.892184, acc: 0.156250]\n",
"4248: [D loss: 0.698047, acc: 0.513672] [A loss: 0.737787, acc: 0.417969]\n",
"4249: [D loss: 0.715357, acc: 0.492188] [A loss: 0.909776, acc: 0.132812]\n",
"4250: [D loss: 0.690890, acc: 0.548828] [A loss: 0.724790, acc: 0.453125]\n",
"4251: [D loss: 0.708813, acc: 0.525391] [A loss: 0.823170, acc: 0.265625]\n",
"4252: [D loss: 0.695194, acc: 0.488281] [A loss: 0.795853, acc: 0.269531]\n",
"4253: [D loss: 0.709962, acc: 0.503906] [A loss: 0.822826, acc: 0.238281]\n",
"4254: [D loss: 0.688591, acc: 0.535156] [A loss: 0.815032, acc: 0.273438]\n",
"4255: [D loss: 0.698846, acc: 0.531250] [A loss: 0.964320, acc: 0.093750]\n",
"4256: [D loss: 0.680878, acc: 0.558594] [A loss: 0.698418, acc: 0.531250]\n",
"4257: [D loss: 0.718154, acc: 0.496094] [A loss: 0.915757, acc: 0.097656]\n",
"4258: [D loss: 0.689668, acc: 0.539062] [A loss: 0.741970, acc: 0.414062]\n",
"4259: [D loss: 0.698426, acc: 0.562500] [A loss: 0.887375, acc: 0.199219]\n",
"4260: [D loss: 0.720415, acc: 0.486328] [A loss: 0.770124, acc: 0.328125]\n",
"4261: [D loss: 0.710254, acc: 0.513672] [A loss: 1.030905, acc: 0.042969]\n",
"4262: [D loss: 0.694170, acc: 0.505859] [A loss: 0.650786, acc: 0.609375]\n",
"4263: [D loss: 0.722136, acc: 0.507812] [A loss: 0.993481, acc: 0.101562]\n",
"4264: [D loss: 0.708276, acc: 0.519531] [A loss: 0.707171, acc: 0.472656]\n",
"4265: [D loss: 0.722695, acc: 0.523438] [A loss: 0.884241, acc: 0.144531]\n",
"4266: [D loss: 0.704593, acc: 0.519531] [A loss: 0.771313, acc: 0.343750]\n",
"4267: [D loss: 0.706166, acc: 0.525391] [A loss: 0.810210, acc: 0.304688]\n",
"4268: [D loss: 0.701165, acc: 0.523438] [A loss: 0.804373, acc: 0.304688]\n",
"4269: [D loss: 0.695223, acc: 0.539062] [A loss: 0.856584, acc: 0.160156]\n",
"4270: [D loss: 0.691412, acc: 0.568359] [A loss: 0.795785, acc: 0.277344]\n",
"4271: [D loss: 0.708596, acc: 0.511719] [A loss: 0.888834, acc: 0.171875]\n",
"4272: [D loss: 0.698096, acc: 0.519531] [A loss: 0.828459, acc: 0.242188]\n",
"4273: [D loss: 0.705330, acc: 0.521484] [A loss: 0.827108, acc: 0.246094]\n",
"4274: [D loss: 0.689157, acc: 0.550781] [A loss: 0.843152, acc: 0.226562]\n",
"4275: [D loss: 0.701418, acc: 0.501953] [A loss: 0.774524, acc: 0.308594]\n",
"4276: [D loss: 0.703306, acc: 0.525391] [A loss: 0.875797, acc: 0.191406]\n",
"4277: [D loss: 0.696785, acc: 0.527344] [A loss: 0.727115, acc: 0.449219]\n",
"4278: [D loss: 0.710662, acc: 0.519531] [A loss: 0.990970, acc: 0.085938]\n",
"4279: [D loss: 0.693433, acc: 0.552734] [A loss: 0.697158, acc: 0.523438]\n",
"4280: [D loss: 0.730205, acc: 0.501953] [A loss: 1.014845, acc: 0.039062]\n",
"4281: [D loss: 0.688404, acc: 0.531250] [A loss: 0.633826, acc: 0.671875]\n",
"4282: [D loss: 0.723698, acc: 0.533203] [A loss: 0.990241, acc: 0.070312]\n",
"4283: [D loss: 0.716306, acc: 0.500000] [A loss: 0.662694, acc: 0.605469]\n",
"4284: [D loss: 0.717350, acc: 0.488281] [A loss: 0.855779, acc: 0.210938]\n",
"4285: [D loss: 0.708688, acc: 0.494141] [A loss: 0.776025, acc: 0.347656]\n",
"4286: [D loss: 0.699871, acc: 0.523438] [A loss: 0.820554, acc: 0.222656]\n",
"4287: [D loss: 0.688853, acc: 0.558594] [A loss: 0.831546, acc: 0.218750]\n",
"4288: [D loss: 0.699284, acc: 0.513672] [A loss: 0.768492, acc: 0.359375]\n",
"4289: [D loss: 0.705098, acc: 0.507812] [A loss: 0.795048, acc: 0.281250]\n",
"4290: [D loss: 0.700993, acc: 0.529297] [A loss: 0.833390, acc: 0.203125]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4291: [D loss: 0.701313, acc: 0.517578] [A loss: 0.778635, acc: 0.308594]\n",
"4292: [D loss: 0.706225, acc: 0.509766] [A loss: 0.872809, acc: 0.222656]\n",
"4293: [D loss: 0.700372, acc: 0.513672] [A loss: 0.734202, acc: 0.441406]\n",
"4294: [D loss: 0.704534, acc: 0.542969] [A loss: 0.881171, acc: 0.187500]\n",
"4295: [D loss: 0.685932, acc: 0.554688] [A loss: 0.788357, acc: 0.296875]\n",
"4296: [D loss: 0.717926, acc: 0.498047] [A loss: 0.942834, acc: 0.121094]\n",
"4297: [D loss: 0.698806, acc: 0.539062] [A loss: 0.696434, acc: 0.500000]\n",
"4298: [D loss: 0.722190, acc: 0.500000] [A loss: 0.987048, acc: 0.042969]\n",
"4299: [D loss: 0.686190, acc: 0.564453] [A loss: 0.692190, acc: 0.542969]\n",
"4300: [D loss: 0.708525, acc: 0.513672] [A loss: 0.901586, acc: 0.105469]\n",
"4301: [D loss: 0.695477, acc: 0.515625] [A loss: 0.746869, acc: 0.390625]\n",
"4302: [D loss: 0.712495, acc: 0.515625] [A loss: 0.888807, acc: 0.156250]\n",
"4303: [D loss: 0.695369, acc: 0.525391] [A loss: 0.778605, acc: 0.300781]\n",
"4304: [D loss: 0.696837, acc: 0.541016] [A loss: 0.879035, acc: 0.152344]\n",
"4305: [D loss: 0.693766, acc: 0.535156] [A loss: 0.772488, acc: 0.371094]\n",
"4306: [D loss: 0.707008, acc: 0.519531] [A loss: 0.915811, acc: 0.089844]\n",
"4307: [D loss: 0.688226, acc: 0.542969] [A loss: 0.675261, acc: 0.558594]\n",
"4308: [D loss: 0.717375, acc: 0.517578] [A loss: 0.961092, acc: 0.089844]\n",
"4309: [D loss: 0.685532, acc: 0.554688] [A loss: 0.674741, acc: 0.585938]\n",
"4310: [D loss: 0.716162, acc: 0.529297] [A loss: 0.882000, acc: 0.167969]\n",
"4311: [D loss: 0.692866, acc: 0.515625] [A loss: 0.795736, acc: 0.320312]\n",
"4312: [D loss: 0.704336, acc: 0.527344] [A loss: 0.878656, acc: 0.164062]\n",
"4313: [D loss: 0.693050, acc: 0.546875] [A loss: 0.816765, acc: 0.265625]\n",
"4314: [D loss: 0.712660, acc: 0.511719] [A loss: 0.916472, acc: 0.132812]\n",
"4315: [D loss: 0.694050, acc: 0.556641] [A loss: 0.751703, acc: 0.371094]\n",
"4316: [D loss: 0.702811, acc: 0.548828] [A loss: 0.931569, acc: 0.101562]\n",
"4317: [D loss: 0.696914, acc: 0.496094] [A loss: 0.781325, acc: 0.367188]\n",
"4318: [D loss: 0.702386, acc: 0.515625] [A loss: 0.972248, acc: 0.074219]\n",
"4319: [D loss: 0.693112, acc: 0.539062] [A loss: 0.670047, acc: 0.593750]\n",
"4320: [D loss: 0.727002, acc: 0.505859] [A loss: 1.008483, acc: 0.070312]\n",
"4321: [D loss: 0.707362, acc: 0.509766] [A loss: 0.682313, acc: 0.542969]\n",
"4322: [D loss: 0.711928, acc: 0.509766] [A loss: 0.893071, acc: 0.160156]\n",
"4323: [D loss: 0.712866, acc: 0.492188] [A loss: 0.758246, acc: 0.386719]\n",
"4324: [D loss: 0.707681, acc: 0.529297] [A loss: 0.855273, acc: 0.191406]\n",
"4325: [D loss: 0.687680, acc: 0.541016] [A loss: 0.765828, acc: 0.343750]\n",
"4326: [D loss: 0.714441, acc: 0.498047] [A loss: 0.805877, acc: 0.273438]\n",
"4327: [D loss: 0.707320, acc: 0.505859] [A loss: 0.845181, acc: 0.214844]\n",
"4328: [D loss: 0.705102, acc: 0.523438] [A loss: 0.849683, acc: 0.179688]\n",
"4329: [D loss: 0.698091, acc: 0.519531] [A loss: 0.870887, acc: 0.191406]\n",
"4330: [D loss: 0.713016, acc: 0.496094] [A loss: 0.862898, acc: 0.171875]\n",
"4331: [D loss: 0.706767, acc: 0.515625] [A loss: 0.826337, acc: 0.210938]\n",
"4332: [D loss: 0.694136, acc: 0.539062] [A loss: 0.805130, acc: 0.285156]\n",
"4333: [D loss: 0.706244, acc: 0.500000] [A loss: 0.922402, acc: 0.164062]\n",
"4334: [D loss: 0.705399, acc: 0.509766] [A loss: 0.810202, acc: 0.238281]\n",
"4335: [D loss: 0.703637, acc: 0.523438] [A loss: 0.979557, acc: 0.062500]\n",
"4336: [D loss: 0.695622, acc: 0.511719] [A loss: 0.700562, acc: 0.507812]\n",
"4337: [D loss: 0.708071, acc: 0.529297] [A loss: 0.955575, acc: 0.109375]\n",
"4338: [D loss: 0.709736, acc: 0.482422] [A loss: 0.709816, acc: 0.480469]\n",
"4339: [D loss: 0.715645, acc: 0.533203] [A loss: 0.983799, acc: 0.066406]\n",
"4340: [D loss: 0.698998, acc: 0.541016] [A loss: 0.675591, acc: 0.546875]\n",
"4341: [D loss: 0.742435, acc: 0.498047] [A loss: 0.959812, acc: 0.089844]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4342: [D loss: 0.699462, acc: 0.531250] [A loss: 0.699079, acc: 0.500000]\n",
"4343: [D loss: 0.716020, acc: 0.505859] [A loss: 0.868667, acc: 0.164062]\n",
"4344: [D loss: 0.693834, acc: 0.523438] [A loss: 0.784557, acc: 0.308594]\n",
"4345: [D loss: 0.713073, acc: 0.503906] [A loss: 0.889211, acc: 0.113281]\n",
"4346: [D loss: 0.706781, acc: 0.513672] [A loss: 0.728216, acc: 0.429688]\n",
"4347: [D loss: 0.696505, acc: 0.544922] [A loss: 0.871395, acc: 0.191406]\n",
"4348: [D loss: 0.693324, acc: 0.539062] [A loss: 0.752395, acc: 0.390625]\n",
"4349: [D loss: 0.693777, acc: 0.521484] [A loss: 0.873056, acc: 0.152344]\n",
"4350: [D loss: 0.709428, acc: 0.476562] [A loss: 0.810987, acc: 0.242188]\n",
"4351: [D loss: 0.722605, acc: 0.484375] [A loss: 0.853052, acc: 0.203125]\n",
"4352: [D loss: 0.710487, acc: 0.511719] [A loss: 0.853717, acc: 0.187500]\n",
"4353: [D loss: 0.700280, acc: 0.501953] [A loss: 0.881344, acc: 0.171875]\n",
"4354: [D loss: 0.693395, acc: 0.527344] [A loss: 0.766802, acc: 0.378906]\n",
"4355: [D loss: 0.716902, acc: 0.488281] [A loss: 0.952429, acc: 0.082031]\n",
"4356: [D loss: 0.700897, acc: 0.509766] [A loss: 0.711808, acc: 0.507812]\n",
"4357: [D loss: 0.720252, acc: 0.507812] [A loss: 0.965837, acc: 0.066406]\n",
"4358: [D loss: 0.699461, acc: 0.521484] [A loss: 0.674889, acc: 0.574219]\n",
"4359: [D loss: 0.714972, acc: 0.505859] [A loss: 0.949439, acc: 0.078125]\n",
"4360: [D loss: 0.692400, acc: 0.542969] [A loss: 0.680676, acc: 0.578125]\n",
"4361: [D loss: 0.727716, acc: 0.490234] [A loss: 0.942923, acc: 0.105469]\n",
"4362: [D loss: 0.700476, acc: 0.507812] [A loss: 0.672480, acc: 0.593750]\n",
"4363: [D loss: 0.713149, acc: 0.509766] [A loss: 0.919548, acc: 0.113281]\n",
"4364: [D loss: 0.689057, acc: 0.554688] [A loss: 0.701617, acc: 0.484375]\n",
"4365: [D loss: 0.714727, acc: 0.523438] [A loss: 0.872241, acc: 0.121094]\n",
"4366: [D loss: 0.692005, acc: 0.550781] [A loss: 0.718865, acc: 0.453125]\n",
"4367: [D loss: 0.699090, acc: 0.505859] [A loss: 0.845859, acc: 0.210938]\n",
"4368: [D loss: 0.695174, acc: 0.517578] [A loss: 0.759550, acc: 0.343750]\n",
"4369: [D loss: 0.702906, acc: 0.519531] [A loss: 0.848138, acc: 0.183594]\n",
"4370: [D loss: 0.701390, acc: 0.523438] [A loss: 0.790514, acc: 0.234375]\n",
"4371: [D loss: 0.708341, acc: 0.517578] [A loss: 0.933539, acc: 0.085938]\n",
"4372: [D loss: 0.700384, acc: 0.515625] [A loss: 0.736299, acc: 0.402344]\n",
"4373: [D loss: 0.708720, acc: 0.507812] [A loss: 0.835100, acc: 0.183594]\n",
"4374: [D loss: 0.705003, acc: 0.488281] [A loss: 0.759014, acc: 0.347656]\n",
"4375: [D loss: 0.706422, acc: 0.513672] [A loss: 0.894638, acc: 0.132812]\n",
"4376: [D loss: 0.698676, acc: 0.513672] [A loss: 0.758209, acc: 0.332031]\n",
"4377: [D loss: 0.717910, acc: 0.478516] [A loss: 0.894777, acc: 0.117188]\n",
"4378: [D loss: 0.705563, acc: 0.484375] [A loss: 0.752269, acc: 0.367188]\n",
"4379: [D loss: 0.713677, acc: 0.501953] [A loss: 0.971782, acc: 0.050781]\n",
"4380: [D loss: 0.702677, acc: 0.498047] [A loss: 0.713007, acc: 0.429688]\n",
"4381: [D loss: 0.728171, acc: 0.490234] [A loss: 0.908957, acc: 0.117188]\n",
"4382: [D loss: 0.697705, acc: 0.507812] [A loss: 0.729514, acc: 0.433594]\n",
"4383: [D loss: 0.724376, acc: 0.474609] [A loss: 0.874761, acc: 0.148438]\n",
"4384: [D loss: 0.702101, acc: 0.511719] [A loss: 0.779621, acc: 0.343750]\n",
"4385: [D loss: 0.707250, acc: 0.521484] [A loss: 0.883552, acc: 0.113281]\n",
"4386: [D loss: 0.701313, acc: 0.496094] [A loss: 0.708263, acc: 0.464844]\n",
"4387: [D loss: 0.705916, acc: 0.505859] [A loss: 0.885773, acc: 0.132812]\n",
"4388: [D loss: 0.700945, acc: 0.507812] [A loss: 0.759216, acc: 0.339844]\n",
"4389: [D loss: 0.688630, acc: 0.552734] [A loss: 0.878246, acc: 0.148438]\n",
"4390: [D loss: 0.709507, acc: 0.490234] [A loss: 0.767419, acc: 0.367188]\n",
"4391: [D loss: 0.693377, acc: 0.552734] [A loss: 0.849299, acc: 0.191406]\n",
"4392: [D loss: 0.684894, acc: 0.558594] [A loss: 0.799677, acc: 0.234375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4393: [D loss: 0.700660, acc: 0.517578] [A loss: 0.837129, acc: 0.222656]\n",
"4394: [D loss: 0.694303, acc: 0.509766] [A loss: 0.789819, acc: 0.312500]\n",
"4395: [D loss: 0.696846, acc: 0.548828] [A loss: 0.849908, acc: 0.195312]\n",
"4396: [D loss: 0.702273, acc: 0.505859] [A loss: 0.837174, acc: 0.210938]\n",
"4397: [D loss: 0.709137, acc: 0.494141] [A loss: 0.797369, acc: 0.289062]\n",
"4398: [D loss: 0.688942, acc: 0.541016] [A loss: 0.822567, acc: 0.218750]\n",
"4399: [D loss: 0.703573, acc: 0.513672] [A loss: 0.860733, acc: 0.187500]\n",
"4400: [D loss: 0.698902, acc: 0.501953] [A loss: 0.839915, acc: 0.187500]\n",
"4401: [D loss: 0.700514, acc: 0.535156] [A loss: 0.826920, acc: 0.207031]\n",
"4402: [D loss: 0.699135, acc: 0.546875] [A loss: 0.844919, acc: 0.210938]\n",
"4403: [D loss: 0.702696, acc: 0.552734] [A loss: 0.890975, acc: 0.164062]\n",
"4404: [D loss: 0.688342, acc: 0.533203] [A loss: 0.763577, acc: 0.367188]\n",
"4405: [D loss: 0.715010, acc: 0.509766] [A loss: 1.058213, acc: 0.039062]\n",
"4406: [D loss: 0.685856, acc: 0.535156] [A loss: 0.619757, acc: 0.675781]\n",
"4407: [D loss: 0.758649, acc: 0.490234] [A loss: 1.004966, acc: 0.066406]\n",
"4408: [D loss: 0.696507, acc: 0.525391] [A loss: 0.732670, acc: 0.433594]\n",
"4409: [D loss: 0.730879, acc: 0.503906] [A loss: 0.943773, acc: 0.082031]\n",
"4410: [D loss: 0.699584, acc: 0.498047] [A loss: 0.697704, acc: 0.511719]\n",
"4411: [D loss: 0.709761, acc: 0.498047] [A loss: 0.869084, acc: 0.156250]\n",
"4412: [D loss: 0.704702, acc: 0.501953] [A loss: 0.775491, acc: 0.320312]\n",
"4413: [D loss: 0.714774, acc: 0.505859] [A loss: 0.861082, acc: 0.179688]\n",
"4414: [D loss: 0.693987, acc: 0.533203] [A loss: 0.718848, acc: 0.453125]\n",
"4415: [D loss: 0.709529, acc: 0.507812] [A loss: 0.927920, acc: 0.109375]\n",
"4416: [D loss: 0.702732, acc: 0.507812] [A loss: 0.678038, acc: 0.574219]\n",
"4417: [D loss: 0.695258, acc: 0.523438] [A loss: 0.870257, acc: 0.187500]\n",
"4418: [D loss: 0.694076, acc: 0.535156] [A loss: 0.718060, acc: 0.464844]\n",
"4419: [D loss: 0.707788, acc: 0.525391] [A loss: 0.833701, acc: 0.199219]\n",
"4420: [D loss: 0.701028, acc: 0.539062] [A loss: 0.755099, acc: 0.382812]\n",
"4421: [D loss: 0.700196, acc: 0.541016] [A loss: 0.822207, acc: 0.222656]\n",
"4422: [D loss: 0.705153, acc: 0.521484] [A loss: 0.870588, acc: 0.156250]\n",
"4423: [D loss: 0.695230, acc: 0.535156] [A loss: 0.833624, acc: 0.171875]\n",
"4424: [D loss: 0.701775, acc: 0.507812] [A loss: 0.849956, acc: 0.183594]\n",
"4425: [D loss: 0.710937, acc: 0.494141] [A loss: 0.822130, acc: 0.285156]\n",
"4426: [D loss: 0.701433, acc: 0.505859] [A loss: 0.826180, acc: 0.273438]\n",
"4427: [D loss: 0.710800, acc: 0.494141] [A loss: 0.783351, acc: 0.343750]\n",
"4428: [D loss: 0.701789, acc: 0.537109] [A loss: 0.863121, acc: 0.164062]\n",
"4429: [D loss: 0.689977, acc: 0.519531] [A loss: 0.782704, acc: 0.304688]\n",
"4430: [D loss: 0.699763, acc: 0.531250] [A loss: 0.925364, acc: 0.117188]\n",
"4431: [D loss: 0.691580, acc: 0.544922] [A loss: 0.758311, acc: 0.359375]\n",
"4432: [D loss: 0.715117, acc: 0.488281] [A loss: 0.978048, acc: 0.062500]\n",
"4433: [D loss: 0.698628, acc: 0.531250] [A loss: 0.689263, acc: 0.550781]\n",
"4434: [D loss: 0.711441, acc: 0.519531] [A loss: 0.951082, acc: 0.078125]\n",
"4435: [D loss: 0.688931, acc: 0.546875] [A loss: 0.725278, acc: 0.425781]\n",
"4436: [D loss: 0.710889, acc: 0.521484] [A loss: 0.890069, acc: 0.171875]\n",
"4437: [D loss: 0.677478, acc: 0.589844] [A loss: 0.729745, acc: 0.449219]\n",
"4438: [D loss: 0.718500, acc: 0.527344] [A loss: 0.982554, acc: 0.058594]\n",
"4439: [D loss: 0.699237, acc: 0.535156] [A loss: 0.716947, acc: 0.464844]\n",
"4440: [D loss: 0.704567, acc: 0.533203] [A loss: 0.912667, acc: 0.167969]\n",
"4441: [D loss: 0.688426, acc: 0.517578] [A loss: 0.702398, acc: 0.503906]\n",
"4442: [D loss: 0.714223, acc: 0.511719] [A loss: 0.846320, acc: 0.238281]\n",
"4443: [D loss: 0.689337, acc: 0.525391] [A loss: 0.828492, acc: 0.238281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4444: [D loss: 0.698123, acc: 0.525391] [A loss: 0.773683, acc: 0.375000]\n",
"4445: [D loss: 0.707430, acc: 0.523438] [A loss: 1.005007, acc: 0.035156]\n",
"4446: [D loss: 0.697372, acc: 0.525391] [A loss: 0.622264, acc: 0.683594]\n",
"4447: [D loss: 0.734721, acc: 0.519531] [A loss: 1.063763, acc: 0.042969]\n",
"4448: [D loss: 0.708248, acc: 0.517578] [A loss: 0.647455, acc: 0.644531]\n",
"4449: [D loss: 0.721243, acc: 0.533203] [A loss: 0.837964, acc: 0.171875]\n",
"4450: [D loss: 0.686254, acc: 0.548828] [A loss: 0.734027, acc: 0.441406]\n",
"4451: [D loss: 0.700680, acc: 0.529297] [A loss: 0.784878, acc: 0.324219]\n",
"4452: [D loss: 0.703357, acc: 0.525391] [A loss: 0.821359, acc: 0.265625]\n",
"4453: [D loss: 0.698923, acc: 0.539062] [A loss: 0.745895, acc: 0.367188]\n",
"4454: [D loss: 0.711269, acc: 0.529297] [A loss: 0.887946, acc: 0.121094]\n",
"4455: [D loss: 0.693051, acc: 0.552734] [A loss: 0.719031, acc: 0.464844]\n",
"4456: [D loss: 0.692461, acc: 0.533203] [A loss: 0.873841, acc: 0.187500]\n",
"4457: [D loss: 0.702265, acc: 0.484375] [A loss: 0.769336, acc: 0.359375]\n",
"4458: [D loss: 0.713032, acc: 0.494141] [A loss: 0.901434, acc: 0.136719]\n",
"4459: [D loss: 0.696391, acc: 0.541016] [A loss: 0.712496, acc: 0.445312]\n",
"4460: [D loss: 0.717012, acc: 0.503906] [A loss: 0.938475, acc: 0.101562]\n",
"4461: [D loss: 0.693367, acc: 0.554688] [A loss: 0.738388, acc: 0.417969]\n",
"4462: [D loss: 0.711089, acc: 0.533203] [A loss: 0.850263, acc: 0.199219]\n",
"4463: [D loss: 0.699002, acc: 0.519531] [A loss: 0.784084, acc: 0.332031]\n",
"4464: [D loss: 0.707114, acc: 0.511719] [A loss: 0.888232, acc: 0.152344]\n",
"4465: [D loss: 0.713717, acc: 0.492188] [A loss: 0.738469, acc: 0.394531]\n",
"4466: [D loss: 0.710474, acc: 0.529297] [A loss: 0.873713, acc: 0.152344]\n",
"4467: [D loss: 0.692241, acc: 0.542969] [A loss: 0.758440, acc: 0.359375]\n",
"4468: [D loss: 0.708177, acc: 0.486328] [A loss: 0.853519, acc: 0.210938]\n",
"4469: [D loss: 0.706255, acc: 0.503906] [A loss: 0.770950, acc: 0.347656]\n",
"4470: [D loss: 0.707049, acc: 0.527344] [A loss: 0.873440, acc: 0.128906]\n",
"4471: [D loss: 0.702947, acc: 0.537109] [A loss: 0.794820, acc: 0.261719]\n",
"4472: [D loss: 0.691065, acc: 0.548828] [A loss: 0.888714, acc: 0.156250]\n",
"4473: [D loss: 0.697527, acc: 0.539062] [A loss: 0.729575, acc: 0.457031]\n",
"4474: [D loss: 0.704256, acc: 0.521484] [A loss: 0.879986, acc: 0.167969]\n",
"4475: [D loss: 0.693295, acc: 0.537109] [A loss: 0.795370, acc: 0.292969]\n",
"4476: [D loss: 0.704298, acc: 0.480469] [A loss: 0.858573, acc: 0.210938]\n",
"4477: [D loss: 0.696738, acc: 0.535156] [A loss: 0.780324, acc: 0.339844]\n",
"4478: [D loss: 0.703683, acc: 0.492188] [A loss: 0.852658, acc: 0.195312]\n",
"4479: [D loss: 0.705241, acc: 0.513672] [A loss: 0.794592, acc: 0.289062]\n",
"4480: [D loss: 0.702955, acc: 0.527344] [A loss: 0.911290, acc: 0.128906]\n",
"4481: [D loss: 0.702421, acc: 0.521484] [A loss: 0.788034, acc: 0.308594]\n",
"4482: [D loss: 0.697629, acc: 0.531250] [A loss: 0.903476, acc: 0.156250]\n",
"4483: [D loss: 0.708164, acc: 0.511719] [A loss: 0.766086, acc: 0.343750]\n",
"4484: [D loss: 0.716902, acc: 0.501953] [A loss: 0.994228, acc: 0.070312]\n",
"4485: [D loss: 0.711542, acc: 0.470703] [A loss: 0.671492, acc: 0.570312]\n",
"4486: [D loss: 0.724567, acc: 0.507812] [A loss: 0.998640, acc: 0.066406]\n",
"4487: [D loss: 0.697739, acc: 0.519531] [A loss: 0.647910, acc: 0.660156]\n",
"4488: [D loss: 0.736960, acc: 0.496094] [A loss: 0.947398, acc: 0.125000]\n",
"4489: [D loss: 0.702046, acc: 0.517578] [A loss: 0.716129, acc: 0.472656]\n",
"4490: [D loss: 0.727007, acc: 0.507812] [A loss: 0.830431, acc: 0.242188]\n",
"4491: [D loss: 0.700797, acc: 0.533203] [A loss: 0.838489, acc: 0.214844]\n",
"4492: [D loss: 0.711735, acc: 0.480469] [A loss: 0.794518, acc: 0.265625]\n",
"4493: [D loss: 0.688559, acc: 0.539062] [A loss: 0.826316, acc: 0.210938]\n",
"4494: [D loss: 0.701424, acc: 0.527344] [A loss: 0.781962, acc: 0.332031]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4495: [D loss: 0.719235, acc: 0.523438] [A loss: 0.862368, acc: 0.140625]\n",
"4496: [D loss: 0.696384, acc: 0.525391] [A loss: 0.802946, acc: 0.296875]\n",
"4497: [D loss: 0.710129, acc: 0.501953] [A loss: 0.894654, acc: 0.148438]\n",
"4498: [D loss: 0.697925, acc: 0.527344] [A loss: 0.719035, acc: 0.460938]\n",
"4499: [D loss: 0.709016, acc: 0.521484] [A loss: 0.982698, acc: 0.050781]\n",
"4500: [D loss: 0.705151, acc: 0.503906] [A loss: 0.661109, acc: 0.597656]\n",
"4501: [D loss: 0.721936, acc: 0.511719] [A loss: 0.973453, acc: 0.085938]\n",
"4502: [D loss: 0.693291, acc: 0.539062] [A loss: 0.679615, acc: 0.562500]\n",
"4503: [D loss: 0.710150, acc: 0.550781] [A loss: 0.838368, acc: 0.207031]\n",
"4504: [D loss: 0.694523, acc: 0.511719] [A loss: 0.777155, acc: 0.304688]\n",
"4505: [D loss: 0.685867, acc: 0.542969] [A loss: 0.813667, acc: 0.234375]\n",
"4506: [D loss: 0.696628, acc: 0.523438] [A loss: 0.762110, acc: 0.343750]\n",
"4507: [D loss: 0.718458, acc: 0.496094] [A loss: 0.882146, acc: 0.113281]\n",
"4508: [D loss: 0.707760, acc: 0.500000] [A loss: 0.748209, acc: 0.417969]\n",
"4509: [D loss: 0.721596, acc: 0.509766] [A loss: 0.895852, acc: 0.117188]\n",
"4510: [D loss: 0.694051, acc: 0.544922] [A loss: 0.787864, acc: 0.300781]\n",
"4511: [D loss: 0.707372, acc: 0.488281] [A loss: 0.812586, acc: 0.218750]\n",
"4512: [D loss: 0.703774, acc: 0.494141] [A loss: 0.774407, acc: 0.320312]\n",
"4513: [D loss: 0.701315, acc: 0.513672] [A loss: 0.874569, acc: 0.144531]\n",
"4514: [D loss: 0.696914, acc: 0.503906] [A loss: 0.769517, acc: 0.339844]\n",
"4515: [D loss: 0.734174, acc: 0.488281] [A loss: 1.027195, acc: 0.054688]\n",
"4516: [D loss: 0.719454, acc: 0.468750] [A loss: 0.742555, acc: 0.382812]\n",
"4517: [D loss: 0.709231, acc: 0.507812] [A loss: 0.894661, acc: 0.113281]\n",
"4518: [D loss: 0.695076, acc: 0.509766] [A loss: 0.722260, acc: 0.457031]\n",
"4519: [D loss: 0.717832, acc: 0.511719] [A loss: 0.956653, acc: 0.062500]\n",
"4520: [D loss: 0.693054, acc: 0.531250] [A loss: 0.700396, acc: 0.523438]\n",
"4521: [D loss: 0.719977, acc: 0.501953] [A loss: 0.955477, acc: 0.101562]\n",
"4522: [D loss: 0.710039, acc: 0.488281] [A loss: 0.744852, acc: 0.386719]\n",
"4523: [D loss: 0.707876, acc: 0.509766] [A loss: 0.889459, acc: 0.148438]\n",
"4524: [D loss: 0.691886, acc: 0.544922] [A loss: 0.791908, acc: 0.296875]\n",
"4525: [D loss: 0.699048, acc: 0.523438] [A loss: 0.822835, acc: 0.183594]\n",
"4526: [D loss: 0.700514, acc: 0.542969] [A loss: 0.845257, acc: 0.187500]\n",
"4527: [D loss: 0.701238, acc: 0.500000] [A loss: 0.793349, acc: 0.308594]\n",
"4528: [D loss: 0.708809, acc: 0.501953] [A loss: 0.812380, acc: 0.238281]\n",
"4529: [D loss: 0.701700, acc: 0.527344] [A loss: 0.838678, acc: 0.207031]\n",
"4530: [D loss: 0.700093, acc: 0.501953] [A loss: 0.807943, acc: 0.238281]\n",
"4531: [D loss: 0.698212, acc: 0.509766] [A loss: 0.883326, acc: 0.191406]\n",
"4532: [D loss: 0.696925, acc: 0.558594] [A loss: 0.769157, acc: 0.367188]\n",
"4533: [D loss: 0.708800, acc: 0.482422] [A loss: 0.929808, acc: 0.121094]\n",
"4534: [D loss: 0.689004, acc: 0.554688] [A loss: 0.704098, acc: 0.476562]\n",
"4535: [D loss: 0.709738, acc: 0.505859] [A loss: 0.931800, acc: 0.085938]\n",
"4536: [D loss: 0.702538, acc: 0.500000] [A loss: 0.679861, acc: 0.554688]\n",
"4537: [D loss: 0.714312, acc: 0.501953] [A loss: 0.904879, acc: 0.156250]\n",
"4538: [D loss: 0.703654, acc: 0.507812] [A loss: 0.726802, acc: 0.414062]\n",
"4539: [D loss: 0.718513, acc: 0.525391] [A loss: 0.942841, acc: 0.074219]\n",
"4540: [D loss: 0.689110, acc: 0.539062] [A loss: 0.725891, acc: 0.429688]\n",
"4541: [D loss: 0.711310, acc: 0.542969] [A loss: 0.884016, acc: 0.148438]\n",
"4542: [D loss: 0.701457, acc: 0.511719] [A loss: 0.761735, acc: 0.324219]\n",
"4543: [D loss: 0.700359, acc: 0.541016] [A loss: 0.874363, acc: 0.175781]\n",
"4544: [D loss: 0.704524, acc: 0.505859] [A loss: 0.713087, acc: 0.445312]\n",
"4545: [D loss: 0.714827, acc: 0.517578] [A loss: 0.883888, acc: 0.156250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4546: [D loss: 0.695243, acc: 0.525391] [A loss: 0.819752, acc: 0.218750]\n",
"4547: [D loss: 0.707869, acc: 0.488281] [A loss: 0.884202, acc: 0.156250]\n",
"4548: [D loss: 0.684812, acc: 0.583984] [A loss: 0.753377, acc: 0.351562]\n",
"4549: [D loss: 0.716700, acc: 0.501953] [A loss: 0.982688, acc: 0.054688]\n",
"4550: [D loss: 0.711712, acc: 0.509766] [A loss: 0.690305, acc: 0.527344]\n",
"4551: [D loss: 0.718104, acc: 0.486328] [A loss: 0.959370, acc: 0.058594]\n",
"4552: [D loss: 0.695582, acc: 0.521484] [A loss: 0.733259, acc: 0.417969]\n",
"4553: [D loss: 0.715313, acc: 0.500000] [A loss: 0.851088, acc: 0.160156]\n",
"4554: [D loss: 0.706747, acc: 0.507812] [A loss: 0.823692, acc: 0.269531]\n",
"4555: [D loss: 0.693913, acc: 0.521484] [A loss: 0.790542, acc: 0.289062]\n",
"4556: [D loss: 0.705499, acc: 0.527344] [A loss: 0.811764, acc: 0.253906]\n",
"4557: [D loss: 0.698880, acc: 0.511719] [A loss: 0.784146, acc: 0.308594]\n",
"4558: [D loss: 0.699278, acc: 0.527344] [A loss: 0.792838, acc: 0.285156]\n",
"4559: [D loss: 0.695050, acc: 0.525391] [A loss: 0.906116, acc: 0.121094]\n",
"4560: [D loss: 0.697012, acc: 0.523438] [A loss: 0.828312, acc: 0.242188]\n",
"4561: [D loss: 0.691288, acc: 0.531250] [A loss: 0.896653, acc: 0.148438]\n",
"4562: [D loss: 0.696075, acc: 0.505859] [A loss: 0.752008, acc: 0.386719]\n",
"4563: [D loss: 0.704444, acc: 0.515625] [A loss: 0.922917, acc: 0.101562]\n",
"4564: [D loss: 0.698150, acc: 0.519531] [A loss: 0.707227, acc: 0.453125]\n",
"4565: [D loss: 0.706886, acc: 0.515625] [A loss: 0.863124, acc: 0.167969]\n",
"4566: [D loss: 0.697048, acc: 0.517578] [A loss: 0.741897, acc: 0.410156]\n",
"4567: [D loss: 0.709599, acc: 0.511719] [A loss: 0.999561, acc: 0.035156]\n",
"4568: [D loss: 0.701177, acc: 0.527344] [A loss: 0.695710, acc: 0.519531]\n",
"4569: [D loss: 0.721677, acc: 0.486328] [A loss: 0.974984, acc: 0.062500]\n",
"4570: [D loss: 0.697801, acc: 0.529297] [A loss: 0.678545, acc: 0.539062]\n",
"4571: [D loss: 0.720062, acc: 0.503906] [A loss: 0.898228, acc: 0.093750]\n",
"4572: [D loss: 0.685067, acc: 0.572266] [A loss: 0.734030, acc: 0.398438]\n",
"4573: [D loss: 0.701272, acc: 0.535156] [A loss: 0.890031, acc: 0.121094]\n",
"4574: [D loss: 0.695385, acc: 0.519531] [A loss: 0.743701, acc: 0.394531]\n",
"4575: [D loss: 0.699265, acc: 0.523438] [A loss: 0.892342, acc: 0.101562]\n",
"4576: [D loss: 0.703657, acc: 0.501953] [A loss: 0.773196, acc: 0.320312]\n",
"4577: [D loss: 0.717593, acc: 0.503906] [A loss: 0.822145, acc: 0.218750]\n",
"4578: [D loss: 0.704258, acc: 0.519531] [A loss: 0.829715, acc: 0.234375]\n",
"4579: [D loss: 0.692589, acc: 0.529297] [A loss: 0.881180, acc: 0.125000]\n",
"4580: [D loss: 0.683976, acc: 0.554688] [A loss: 0.718100, acc: 0.453125]\n",
"4581: [D loss: 0.709888, acc: 0.517578] [A loss: 0.997453, acc: 0.050781]\n",
"4582: [D loss: 0.696296, acc: 0.525391] [A loss: 0.636287, acc: 0.656250]\n",
"4583: [D loss: 0.739952, acc: 0.482422] [A loss: 0.925250, acc: 0.101562]\n",
"4584: [D loss: 0.703017, acc: 0.500000] [A loss: 0.807937, acc: 0.242188]\n",
"4585: [D loss: 0.705356, acc: 0.486328] [A loss: 0.851405, acc: 0.214844]\n",
"4586: [D loss: 0.692256, acc: 0.513672] [A loss: 0.744443, acc: 0.382812]\n",
"4587: [D loss: 0.714961, acc: 0.496094] [A loss: 0.879961, acc: 0.167969]\n",
"4588: [D loss: 0.706805, acc: 0.527344] [A loss: 0.707506, acc: 0.472656]\n",
"4589: [D loss: 0.701527, acc: 0.519531] [A loss: 0.885892, acc: 0.156250]\n",
"4590: [D loss: 0.690969, acc: 0.533203] [A loss: 0.736604, acc: 0.398438]\n",
"4591: [D loss: 0.714597, acc: 0.503906] [A loss: 0.894884, acc: 0.105469]\n",
"4592: [D loss: 0.705736, acc: 0.511719] [A loss: 0.733311, acc: 0.441406]\n",
"4593: [D loss: 0.720895, acc: 0.490234] [A loss: 0.881563, acc: 0.179688]\n",
"4594: [D loss: 0.691723, acc: 0.507812] [A loss: 0.807510, acc: 0.257812]\n",
"4595: [D loss: 0.696199, acc: 0.523438] [A loss: 0.786099, acc: 0.285156]\n",
"4596: [D loss: 0.709603, acc: 0.507812] [A loss: 0.814802, acc: 0.261719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4597: [D loss: 0.697447, acc: 0.542969] [A loss: 0.792473, acc: 0.265625]\n",
"4598: [D loss: 0.701785, acc: 0.533203] [A loss: 0.872646, acc: 0.125000]\n",
"4599: [D loss: 0.689995, acc: 0.546875] [A loss: 0.733355, acc: 0.410156]\n",
"4600: [D loss: 0.699808, acc: 0.507812] [A loss: 0.860451, acc: 0.171875]\n",
"4601: [D loss: 0.705631, acc: 0.486328] [A loss: 0.727563, acc: 0.441406]\n",
"4602: [D loss: 0.703526, acc: 0.525391] [A loss: 0.906029, acc: 0.128906]\n",
"4603: [D loss: 0.709587, acc: 0.500000] [A loss: 0.683945, acc: 0.562500]\n",
"4604: [D loss: 0.731654, acc: 0.490234] [A loss: 0.957352, acc: 0.066406]\n",
"4605: [D loss: 0.716680, acc: 0.453125] [A loss: 0.699122, acc: 0.464844]\n",
"4606: [D loss: 0.719111, acc: 0.509766] [A loss: 0.886896, acc: 0.156250]\n",
"4607: [D loss: 0.698692, acc: 0.521484] [A loss: 0.752395, acc: 0.355469]\n",
"4608: [D loss: 0.716926, acc: 0.490234] [A loss: 0.913911, acc: 0.167969]\n",
"4609: [D loss: 0.712407, acc: 0.488281] [A loss: 0.719256, acc: 0.460938]\n",
"4610: [D loss: 0.709336, acc: 0.519531] [A loss: 0.900733, acc: 0.128906]\n",
"4611: [D loss: 0.696447, acc: 0.513672] [A loss: 0.722436, acc: 0.453125]\n",
"4612: [D loss: 0.721220, acc: 0.513672] [A loss: 0.896771, acc: 0.121094]\n",
"4613: [D loss: 0.699094, acc: 0.503906] [A loss: 0.772090, acc: 0.343750]\n",
"4614: [D loss: 0.706400, acc: 0.517578] [A loss: 0.931959, acc: 0.128906]\n",
"4615: [D loss: 0.703776, acc: 0.494141] [A loss: 0.785536, acc: 0.367188]\n",
"4616: [D loss: 0.713715, acc: 0.517578] [A loss: 0.900727, acc: 0.136719]\n",
"4617: [D loss: 0.694453, acc: 0.531250] [A loss: 0.717835, acc: 0.464844]\n",
"4618: [D loss: 0.718842, acc: 0.496094] [A loss: 0.896896, acc: 0.144531]\n",
"4619: [D loss: 0.692165, acc: 0.503906] [A loss: 0.710916, acc: 0.472656]\n",
"4620: [D loss: 0.711539, acc: 0.498047] [A loss: 0.887979, acc: 0.148438]\n",
"4621: [D loss: 0.692935, acc: 0.546875] [A loss: 0.788763, acc: 0.335938]\n",
"4622: [D loss: 0.715539, acc: 0.474609] [A loss: 0.837849, acc: 0.234375]\n",
"4623: [D loss: 0.699693, acc: 0.511719] [A loss: 0.911635, acc: 0.101562]\n",
"4624: [D loss: 0.688735, acc: 0.542969] [A loss: 0.718251, acc: 0.484375]\n",
"4625: [D loss: 0.702527, acc: 0.515625] [A loss: 0.924407, acc: 0.089844]\n",
"4626: [D loss: 0.684354, acc: 0.564453] [A loss: 0.692979, acc: 0.515625]\n",
"4627: [D loss: 0.714665, acc: 0.515625] [A loss: 0.922675, acc: 0.085938]\n",
"4628: [D loss: 0.705756, acc: 0.507812] [A loss: 0.738291, acc: 0.414062]\n",
"4629: [D loss: 0.716188, acc: 0.503906] [A loss: 0.877330, acc: 0.160156]\n",
"4630: [D loss: 0.703212, acc: 0.498047] [A loss: 0.722961, acc: 0.460938]\n",
"4631: [D loss: 0.689984, acc: 0.550781] [A loss: 0.906840, acc: 0.152344]\n",
"4632: [D loss: 0.692468, acc: 0.527344] [A loss: 0.715892, acc: 0.484375]\n",
"4633: [D loss: 0.701450, acc: 0.519531] [A loss: 0.825984, acc: 0.218750]\n",
"4634: [D loss: 0.707530, acc: 0.511719] [A loss: 0.757344, acc: 0.375000]\n",
"4635: [D loss: 0.701900, acc: 0.509766] [A loss: 0.849305, acc: 0.203125]\n",
"4636: [D loss: 0.706738, acc: 0.507812] [A loss: 0.794882, acc: 0.300781]\n",
"4637: [D loss: 0.699825, acc: 0.525391] [A loss: 0.873633, acc: 0.140625]\n",
"4638: [D loss: 0.697992, acc: 0.515625] [A loss: 0.722770, acc: 0.453125]\n",
"4639: [D loss: 0.711403, acc: 0.500000] [A loss: 0.973876, acc: 0.101562]\n",
"4640: [D loss: 0.693464, acc: 0.531250] [A loss: 0.717697, acc: 0.476562]\n",
"4641: [D loss: 0.713179, acc: 0.525391] [A loss: 0.879288, acc: 0.125000]\n",
"4642: [D loss: 0.687647, acc: 0.558594] [A loss: 0.741134, acc: 0.429688]\n",
"4643: [D loss: 0.714201, acc: 0.494141] [A loss: 0.848171, acc: 0.207031]\n",
"4644: [D loss: 0.706076, acc: 0.501953] [A loss: 0.797681, acc: 0.277344]\n",
"4645: [D loss: 0.707447, acc: 0.525391] [A loss: 0.807680, acc: 0.281250]\n",
"4646: [D loss: 0.693782, acc: 0.542969] [A loss: 0.767523, acc: 0.351562]\n",
"4647: [D loss: 0.707462, acc: 0.515625] [A loss: 0.890939, acc: 0.128906]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4648: [D loss: 0.697759, acc: 0.529297] [A loss: 0.685734, acc: 0.527344]\n",
"4649: [D loss: 0.733571, acc: 0.498047] [A loss: 0.917277, acc: 0.140625]\n",
"4650: [D loss: 0.705796, acc: 0.515625] [A loss: 0.734166, acc: 0.414062]\n",
"4651: [D loss: 0.703238, acc: 0.531250] [A loss: 0.874410, acc: 0.140625]\n",
"4652: [D loss: 0.696852, acc: 0.537109] [A loss: 0.758094, acc: 0.382812]\n",
"4653: [D loss: 0.705549, acc: 0.500000] [A loss: 0.976840, acc: 0.070312]\n",
"4654: [D loss: 0.693269, acc: 0.560547] [A loss: 0.689896, acc: 0.574219]\n",
"4655: [D loss: 0.724462, acc: 0.517578] [A loss: 1.003573, acc: 0.042969]\n",
"4656: [D loss: 0.704807, acc: 0.492188] [A loss: 0.642873, acc: 0.621094]\n",
"4657: [D loss: 0.724618, acc: 0.505859] [A loss: 0.896573, acc: 0.156250]\n",
"4658: [D loss: 0.700781, acc: 0.501953] [A loss: 0.807022, acc: 0.261719]\n",
"4659: [D loss: 0.707061, acc: 0.496094] [A loss: 0.788807, acc: 0.285156]\n",
"4660: [D loss: 0.694726, acc: 0.541016] [A loss: 0.836952, acc: 0.183594]\n",
"4661: [D loss: 0.702420, acc: 0.519531] [A loss: 0.785350, acc: 0.250000]\n",
"4662: [D loss: 0.706712, acc: 0.535156] [A loss: 0.828661, acc: 0.210938]\n",
"4663: [D loss: 0.705027, acc: 0.496094] [A loss: 0.852093, acc: 0.207031]\n",
"4664: [D loss: 0.697953, acc: 0.523438] [A loss: 0.792447, acc: 0.289062]\n",
"4665: [D loss: 0.705782, acc: 0.531250] [A loss: 0.912086, acc: 0.074219]\n",
"4666: [D loss: 0.703985, acc: 0.521484] [A loss: 0.753514, acc: 0.359375]\n",
"4667: [D loss: 0.706409, acc: 0.513672] [A loss: 0.895585, acc: 0.156250]\n",
"4668: [D loss: 0.705710, acc: 0.503906] [A loss: 0.736403, acc: 0.382812]\n",
"4669: [D loss: 0.711490, acc: 0.513672] [A loss: 0.899183, acc: 0.160156]\n",
"4670: [D loss: 0.701302, acc: 0.515625] [A loss: 0.733608, acc: 0.410156]\n",
"4671: [D loss: 0.698034, acc: 0.546875] [A loss: 0.915197, acc: 0.144531]\n",
"4672: [D loss: 0.692183, acc: 0.544922] [A loss: 0.755215, acc: 0.378906]\n",
"4673: [D loss: 0.696433, acc: 0.523438] [A loss: 0.872836, acc: 0.171875]\n",
"4674: [D loss: 0.691703, acc: 0.533203] [A loss: 0.804729, acc: 0.285156]\n",
"4675: [D loss: 0.711486, acc: 0.501953] [A loss: 0.791559, acc: 0.308594]\n",
"4676: [D loss: 0.706415, acc: 0.521484] [A loss: 0.908570, acc: 0.105469]\n",
"4677: [D loss: 0.688317, acc: 0.550781] [A loss: 0.662242, acc: 0.605469]\n",
"4678: [D loss: 0.712141, acc: 0.517578] [A loss: 0.985691, acc: 0.074219]\n",
"4679: [D loss: 0.699152, acc: 0.525391] [A loss: 0.710596, acc: 0.445312]\n",
"4680: [D loss: 0.732252, acc: 0.494141] [A loss: 0.900380, acc: 0.132812]\n",
"4681: [D loss: 0.681352, acc: 0.544922] [A loss: 0.689683, acc: 0.519531]\n",
"4682: [D loss: 0.706150, acc: 0.541016] [A loss: 0.934123, acc: 0.109375]\n",
"4683: [D loss: 0.691347, acc: 0.525391] [A loss: 0.743879, acc: 0.402344]\n",
"4684: [D loss: 0.716657, acc: 0.480469] [A loss: 0.893635, acc: 0.148438]\n",
"4685: [D loss: 0.703077, acc: 0.511719] [A loss: 0.804638, acc: 0.261719]\n",
"4686: [D loss: 0.692499, acc: 0.546875] [A loss: 0.861452, acc: 0.218750]\n",
"4687: [D loss: 0.701070, acc: 0.503906] [A loss: 0.768697, acc: 0.375000]\n",
"4688: [D loss: 0.701455, acc: 0.519531] [A loss: 0.937027, acc: 0.105469]\n",
"4689: [D loss: 0.711510, acc: 0.500000] [A loss: 0.771534, acc: 0.335938]\n",
"4690: [D loss: 0.714855, acc: 0.519531] [A loss: 0.967265, acc: 0.101562]\n",
"4691: [D loss: 0.704690, acc: 0.507812] [A loss: 0.679190, acc: 0.566406]\n",
"4692: [D loss: 0.706175, acc: 0.525391] [A loss: 0.823255, acc: 0.214844]\n",
"4693: [D loss: 0.682001, acc: 0.566406] [A loss: 0.791425, acc: 0.316406]\n",
"4694: [D loss: 0.694440, acc: 0.552734] [A loss: 0.834080, acc: 0.250000]\n",
"4695: [D loss: 0.712890, acc: 0.498047] [A loss: 0.803559, acc: 0.292969]\n",
"4696: [D loss: 0.702601, acc: 0.513672] [A loss: 0.819616, acc: 0.207031]\n",
"4697: [D loss: 0.696547, acc: 0.550781] [A loss: 0.838896, acc: 0.210938]\n",
"4698: [D loss: 0.694141, acc: 0.548828] [A loss: 0.776985, acc: 0.328125]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4699: [D loss: 0.700188, acc: 0.535156] [A loss: 0.881693, acc: 0.144531]\n",
"4700: [D loss: 0.702760, acc: 0.498047] [A loss: 0.727239, acc: 0.433594]\n",
"4701: [D loss: 0.695093, acc: 0.521484] [A loss: 1.010470, acc: 0.058594]\n",
"4702: [D loss: 0.703263, acc: 0.498047] [A loss: 0.714819, acc: 0.468750]\n",
"4703: [D loss: 0.712505, acc: 0.519531] [A loss: 0.888067, acc: 0.156250]\n",
"4704: [D loss: 0.715471, acc: 0.458984] [A loss: 0.760039, acc: 0.398438]\n",
"4705: [D loss: 0.704750, acc: 0.521484] [A loss: 0.837476, acc: 0.222656]\n",
"4706: [D loss: 0.695688, acc: 0.529297] [A loss: 0.903359, acc: 0.148438]\n",
"4707: [D loss: 0.692774, acc: 0.529297] [A loss: 0.785594, acc: 0.328125]\n",
"4708: [D loss: 0.702425, acc: 0.517578] [A loss: 0.847964, acc: 0.164062]\n",
"4709: [D loss: 0.695913, acc: 0.550781] [A loss: 0.839477, acc: 0.195312]\n",
"4710: [D loss: 0.688289, acc: 0.539062] [A loss: 0.850044, acc: 0.171875]\n",
"4711: [D loss: 0.699033, acc: 0.531250] [A loss: 0.723318, acc: 0.414062]\n",
"4712: [D loss: 0.699726, acc: 0.529297] [A loss: 0.906346, acc: 0.128906]\n",
"4713: [D loss: 0.688227, acc: 0.517578] [A loss: 0.696913, acc: 0.515625]\n",
"4714: [D loss: 0.719760, acc: 0.503906] [A loss: 0.976701, acc: 0.089844]\n",
"4715: [D loss: 0.698257, acc: 0.521484] [A loss: 0.676309, acc: 0.617188]\n",
"4716: [D loss: 0.710139, acc: 0.498047] [A loss: 0.944855, acc: 0.093750]\n",
"4717: [D loss: 0.703970, acc: 0.490234] [A loss: 0.720684, acc: 0.425781]\n",
"4718: [D loss: 0.701547, acc: 0.539062] [A loss: 0.905675, acc: 0.109375]\n",
"4719: [D loss: 0.701130, acc: 0.531250] [A loss: 0.748976, acc: 0.417969]\n",
"4720: [D loss: 0.689623, acc: 0.566406] [A loss: 0.837415, acc: 0.175781]\n",
"4721: [D loss: 0.703036, acc: 0.490234] [A loss: 0.779830, acc: 0.347656]\n",
"4722: [D loss: 0.709161, acc: 0.505859] [A loss: 0.880894, acc: 0.171875]\n",
"4723: [D loss: 0.694147, acc: 0.556641] [A loss: 0.760935, acc: 0.347656]\n",
"4724: [D loss: 0.706969, acc: 0.529297] [A loss: 0.887932, acc: 0.136719]\n",
"4725: [D loss: 0.709109, acc: 0.492188] [A loss: 0.763511, acc: 0.347656]\n",
"4726: [D loss: 0.708765, acc: 0.523438] [A loss: 0.870272, acc: 0.136719]\n",
"4727: [D loss: 0.708608, acc: 0.494141] [A loss: 0.771935, acc: 0.335938]\n",
"4728: [D loss: 0.712990, acc: 0.492188] [A loss: 0.967117, acc: 0.070312]\n",
"4729: [D loss: 0.697433, acc: 0.519531] [A loss: 0.652587, acc: 0.601562]\n",
"4730: [D loss: 0.725611, acc: 0.509766] [A loss: 1.045779, acc: 0.035156]\n",
"4731: [D loss: 0.706189, acc: 0.501953] [A loss: 0.701299, acc: 0.507812]\n",
"4732: [D loss: 0.719312, acc: 0.507812] [A loss: 0.874033, acc: 0.144531]\n",
"4733: [D loss: 0.693273, acc: 0.550781] [A loss: 0.766143, acc: 0.343750]\n",
"4734: [D loss: 0.694556, acc: 0.529297] [A loss: 0.927972, acc: 0.089844]\n",
"4735: [D loss: 0.686289, acc: 0.533203] [A loss: 0.738338, acc: 0.406250]\n",
"4736: [D loss: 0.722016, acc: 0.462891] [A loss: 0.918933, acc: 0.109375]\n",
"4737: [D loss: 0.699049, acc: 0.544922] [A loss: 0.696586, acc: 0.488281]\n",
"4738: [D loss: 0.708008, acc: 0.533203] [A loss: 0.933448, acc: 0.089844]\n",
"4739: [D loss: 0.700843, acc: 0.513672] [A loss: 0.692811, acc: 0.550781]\n",
"4740: [D loss: 0.706066, acc: 0.527344] [A loss: 0.834510, acc: 0.199219]\n",
"4741: [D loss: 0.684692, acc: 0.576172] [A loss: 0.790615, acc: 0.300781]\n",
"4742: [D loss: 0.690574, acc: 0.554688] [A loss: 0.812975, acc: 0.226562]\n",
"4743: [D loss: 0.702244, acc: 0.527344] [A loss: 0.825129, acc: 0.218750]\n",
"4744: [D loss: 0.683778, acc: 0.574219] [A loss: 0.803378, acc: 0.277344]\n",
"4745: [D loss: 0.708533, acc: 0.498047] [A loss: 0.869781, acc: 0.195312]\n",
"4746: [D loss: 0.700633, acc: 0.519531] [A loss: 0.770284, acc: 0.285156]\n",
"4747: [D loss: 0.705628, acc: 0.533203] [A loss: 0.888317, acc: 0.140625]\n",
"4748: [D loss: 0.684597, acc: 0.548828] [A loss: 0.728388, acc: 0.414062]\n",
"4749: [D loss: 0.707150, acc: 0.509766] [A loss: 0.867325, acc: 0.167969]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4750: [D loss: 0.691051, acc: 0.541016] [A loss: 0.790501, acc: 0.312500]\n",
"4751: [D loss: 0.700934, acc: 0.505859] [A loss: 0.807352, acc: 0.281250]\n",
"4752: [D loss: 0.705205, acc: 0.519531] [A loss: 0.817336, acc: 0.222656]\n",
"4753: [D loss: 0.687736, acc: 0.535156] [A loss: 0.804256, acc: 0.238281]\n",
"4754: [D loss: 0.695052, acc: 0.539062] [A loss: 0.803981, acc: 0.261719]\n",
"4755: [D loss: 0.695196, acc: 0.515625] [A loss: 0.849313, acc: 0.195312]\n",
"4756: [D loss: 0.690805, acc: 0.515625] [A loss: 0.893158, acc: 0.175781]\n",
"4757: [D loss: 0.693707, acc: 0.525391] [A loss: 0.744496, acc: 0.441406]\n",
"4758: [D loss: 0.712001, acc: 0.521484] [A loss: 0.924070, acc: 0.121094]\n",
"4759: [D loss: 0.679276, acc: 0.582031] [A loss: 0.714695, acc: 0.460938]\n",
"4760: [D loss: 0.708141, acc: 0.519531] [A loss: 0.966056, acc: 0.101562]\n",
"4761: [D loss: 0.697100, acc: 0.556641] [A loss: 0.661672, acc: 0.609375]\n",
"4762: [D loss: 0.715091, acc: 0.496094] [A loss: 0.931816, acc: 0.089844]\n",
"4763: [D loss: 0.703716, acc: 0.494141] [A loss: 0.742083, acc: 0.378906]\n",
"4764: [D loss: 0.698889, acc: 0.519531] [A loss: 0.878747, acc: 0.164062]\n",
"4765: [D loss: 0.702471, acc: 0.486328] [A loss: 0.701765, acc: 0.480469]\n",
"4766: [D loss: 0.718436, acc: 0.505859] [A loss: 1.002862, acc: 0.050781]\n",
"4767: [D loss: 0.695561, acc: 0.525391] [A loss: 0.675580, acc: 0.562500]\n",
"4768: [D loss: 0.712032, acc: 0.513672] [A loss: 0.939957, acc: 0.132812]\n",
"4769: [D loss: 0.707836, acc: 0.476562] [A loss: 0.726077, acc: 0.429688]\n",
"4770: [D loss: 0.712261, acc: 0.503906] [A loss: 0.880709, acc: 0.164062]\n",
"4771: [D loss: 0.698647, acc: 0.513672] [A loss: 0.770102, acc: 0.304688]\n",
"4772: [D loss: 0.686561, acc: 0.552734] [A loss: 0.844465, acc: 0.199219]\n",
"4773: [D loss: 0.693782, acc: 0.564453] [A loss: 0.867950, acc: 0.195312]\n",
"4774: [D loss: 0.699116, acc: 0.517578] [A loss: 0.817098, acc: 0.238281]\n",
"4775: [D loss: 0.702028, acc: 0.533203] [A loss: 0.828088, acc: 0.226562]\n",
"4776: [D loss: 0.694403, acc: 0.556641] [A loss: 0.754661, acc: 0.367188]\n",
"4777: [D loss: 0.707502, acc: 0.527344] [A loss: 0.909991, acc: 0.113281]\n",
"4778: [D loss: 0.709055, acc: 0.503906] [A loss: 0.719035, acc: 0.507812]\n",
"4779: [D loss: 0.702813, acc: 0.525391] [A loss: 0.876256, acc: 0.183594]\n",
"4780: [D loss: 0.699214, acc: 0.527344] [A loss: 0.805693, acc: 0.308594]\n",
"4781: [D loss: 0.689427, acc: 0.550781] [A loss: 0.760009, acc: 0.371094]\n",
"4782: [D loss: 0.701279, acc: 0.523438] [A loss: 0.943739, acc: 0.089844]\n",
"4783: [D loss: 0.689475, acc: 0.542969] [A loss: 0.719195, acc: 0.468750]\n",
"4784: [D loss: 0.722640, acc: 0.492188] [A loss: 0.910328, acc: 0.128906]\n",
"4785: [D loss: 0.705488, acc: 0.503906] [A loss: 0.684495, acc: 0.574219]\n",
"4786: [D loss: 0.730770, acc: 0.500000] [A loss: 0.944582, acc: 0.078125]\n",
"4787: [D loss: 0.696273, acc: 0.527344] [A loss: 0.713869, acc: 0.480469]\n",
"4788: [D loss: 0.720058, acc: 0.500000] [A loss: 0.864753, acc: 0.160156]\n",
"4789: [D loss: 0.690603, acc: 0.539062] [A loss: 0.762947, acc: 0.417969]\n",
"4790: [D loss: 0.701398, acc: 0.541016] [A loss: 0.799218, acc: 0.277344]\n",
"4791: [D loss: 0.705186, acc: 0.525391] [A loss: 0.838904, acc: 0.203125]\n",
"4792: [D loss: 0.700730, acc: 0.496094] [A loss: 0.794683, acc: 0.281250]\n",
"4793: [D loss: 0.704321, acc: 0.509766] [A loss: 0.889124, acc: 0.132812]\n",
"4794: [D loss: 0.693510, acc: 0.525391] [A loss: 0.704405, acc: 0.460938]\n",
"4795: [D loss: 0.710577, acc: 0.529297] [A loss: 0.969946, acc: 0.070312]\n",
"4796: [D loss: 0.710645, acc: 0.492188] [A loss: 0.710962, acc: 0.453125]\n",
"4797: [D loss: 0.704508, acc: 0.537109] [A loss: 0.896156, acc: 0.128906]\n",
"4798: [D loss: 0.692292, acc: 0.517578] [A loss: 0.747604, acc: 0.410156]\n",
"4799: [D loss: 0.721940, acc: 0.464844] [A loss: 0.827067, acc: 0.207031]\n",
"4800: [D loss: 0.711615, acc: 0.505859] [A loss: 0.892576, acc: 0.121094]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4801: [D loss: 0.693981, acc: 0.539062] [A loss: 0.739976, acc: 0.375000]\n",
"4802: [D loss: 0.702113, acc: 0.535156] [A loss: 0.944809, acc: 0.097656]\n",
"4803: [D loss: 0.687842, acc: 0.552734] [A loss: 0.762174, acc: 0.378906]\n",
"4804: [D loss: 0.702042, acc: 0.548828] [A loss: 0.882159, acc: 0.152344]\n",
"4805: [D loss: 0.703426, acc: 0.492188] [A loss: 0.787470, acc: 0.320312]\n",
"4806: [D loss: 0.705635, acc: 0.509766] [A loss: 0.847081, acc: 0.195312]\n",
"4807: [D loss: 0.699975, acc: 0.537109] [A loss: 0.729537, acc: 0.457031]\n",
"4808: [D loss: 0.711000, acc: 0.490234] [A loss: 0.894197, acc: 0.121094]\n",
"4809: [D loss: 0.697065, acc: 0.531250] [A loss: 0.748856, acc: 0.375000]\n",
"4810: [D loss: 0.701001, acc: 0.492188] [A loss: 0.867373, acc: 0.203125]\n",
"4811: [D loss: 0.686420, acc: 0.570312] [A loss: 0.765991, acc: 0.355469]\n",
"4812: [D loss: 0.697026, acc: 0.541016] [A loss: 0.788584, acc: 0.320312]\n",
"4813: [D loss: 0.699064, acc: 0.541016] [A loss: 0.865269, acc: 0.179688]\n",
"4814: [D loss: 0.686009, acc: 0.568359] [A loss: 0.782037, acc: 0.304688]\n",
"4815: [D loss: 0.697027, acc: 0.554688] [A loss: 0.813165, acc: 0.257812]\n",
"4816: [D loss: 0.697541, acc: 0.513672] [A loss: 0.822731, acc: 0.273438]\n",
"4817: [D loss: 0.698965, acc: 0.501953] [A loss: 0.907300, acc: 0.117188]\n",
"4818: [D loss: 0.689743, acc: 0.539062] [A loss: 0.706341, acc: 0.449219]\n",
"4819: [D loss: 0.719233, acc: 0.498047] [A loss: 1.071440, acc: 0.062500]\n",
"4820: [D loss: 0.701180, acc: 0.529297] [A loss: 0.622427, acc: 0.671875]\n",
"4821: [D loss: 0.743103, acc: 0.501953] [A loss: 0.901002, acc: 0.136719]\n",
"4822: [D loss: 0.703101, acc: 0.537109] [A loss: 0.771895, acc: 0.332031]\n",
"4823: [D loss: 0.703067, acc: 0.515625] [A loss: 0.834478, acc: 0.234375]\n",
"4824: [D loss: 0.697574, acc: 0.513672] [A loss: 0.829832, acc: 0.218750]\n",
"4825: [D loss: 0.697728, acc: 0.498047] [A loss: 0.799397, acc: 0.300781]\n",
"4826: [D loss: 0.702062, acc: 0.525391] [A loss: 0.851344, acc: 0.210938]\n",
"4827: [D loss: 0.710487, acc: 0.511719] [A loss: 0.798761, acc: 0.304688]\n",
"4828: [D loss: 0.700091, acc: 0.511719] [A loss: 0.911958, acc: 0.093750]\n",
"4829: [D loss: 0.699483, acc: 0.523438] [A loss: 0.717014, acc: 0.488281]\n",
"4830: [D loss: 0.726035, acc: 0.509766] [A loss: 0.921626, acc: 0.117188]\n",
"4831: [D loss: 0.700634, acc: 0.498047] [A loss: 0.745122, acc: 0.425781]\n",
"4832: [D loss: 0.719758, acc: 0.507812] [A loss: 0.893743, acc: 0.140625]\n",
"4833: [D loss: 0.691578, acc: 0.525391] [A loss: 0.733911, acc: 0.414062]\n",
"4834: [D loss: 0.705470, acc: 0.498047] [A loss: 0.906430, acc: 0.132812]\n",
"4835: [D loss: 0.695498, acc: 0.511719] [A loss: 0.691732, acc: 0.511719]\n",
"4836: [D loss: 0.728427, acc: 0.484375] [A loss: 0.967054, acc: 0.058594]\n",
"4837: [D loss: 0.703028, acc: 0.505859] [A loss: 0.735423, acc: 0.433594]\n",
"4838: [D loss: 0.704247, acc: 0.509766] [A loss: 0.833433, acc: 0.230469]\n",
"4839: [D loss: 0.705513, acc: 0.501953] [A loss: 0.832374, acc: 0.238281]\n",
"4840: [D loss: 0.693791, acc: 0.535156] [A loss: 0.772083, acc: 0.339844]\n",
"4841: [D loss: 0.698617, acc: 0.558594] [A loss: 0.817305, acc: 0.281250]\n",
"4842: [D loss: 0.716709, acc: 0.488281] [A loss: 0.825813, acc: 0.234375]\n",
"4843: [D loss: 0.690245, acc: 0.527344] [A loss: 0.709100, acc: 0.460938]\n",
"4844: [D loss: 0.717754, acc: 0.498047] [A loss: 0.892197, acc: 0.121094]\n",
"4845: [D loss: 0.693273, acc: 0.537109] [A loss: 0.712520, acc: 0.496094]\n",
"4846: [D loss: 0.708183, acc: 0.511719] [A loss: 0.876520, acc: 0.167969]\n",
"4847: [D loss: 0.700249, acc: 0.513672] [A loss: 0.725044, acc: 0.445312]\n",
"4848: [D loss: 0.697228, acc: 0.542969] [A loss: 0.799995, acc: 0.265625]\n",
"4849: [D loss: 0.705054, acc: 0.486328] [A loss: 0.787435, acc: 0.304688]\n",
"4850: [D loss: 0.709243, acc: 0.494141] [A loss: 0.807932, acc: 0.277344]\n",
"4851: [D loss: 0.706127, acc: 0.507812] [A loss: 0.835002, acc: 0.210938]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4852: [D loss: 0.699961, acc: 0.533203] [A loss: 0.802822, acc: 0.292969]\n",
"4853: [D loss: 0.694610, acc: 0.501953] [A loss: 0.890875, acc: 0.140625]\n",
"4854: [D loss: 0.701385, acc: 0.511719] [A loss: 0.778578, acc: 0.316406]\n",
"4855: [D loss: 0.712284, acc: 0.484375] [A loss: 0.900909, acc: 0.167969]\n",
"4856: [D loss: 0.689923, acc: 0.542969] [A loss: 0.805380, acc: 0.265625]\n",
"4857: [D loss: 0.716969, acc: 0.517578] [A loss: 0.884939, acc: 0.167969]\n",
"4858: [D loss: 0.717125, acc: 0.457031] [A loss: 0.807776, acc: 0.296875]\n",
"4859: [D loss: 0.700731, acc: 0.525391] [A loss: 0.783553, acc: 0.312500]\n",
"4860: [D loss: 0.707524, acc: 0.515625] [A loss: 0.897940, acc: 0.132812]\n",
"4861: [D loss: 0.704279, acc: 0.509766] [A loss: 0.722224, acc: 0.437500]\n",
"4862: [D loss: 0.719969, acc: 0.496094] [A loss: 0.911713, acc: 0.148438]\n",
"4863: [D loss: 0.703196, acc: 0.480469] [A loss: 0.756867, acc: 0.367188]\n",
"4864: [D loss: 0.709326, acc: 0.480469] [A loss: 0.836126, acc: 0.191406]\n",
"4865: [D loss: 0.697574, acc: 0.507812] [A loss: 0.787546, acc: 0.320312]\n",
"4866: [D loss: 0.720885, acc: 0.498047] [A loss: 0.992583, acc: 0.085938]\n",
"4867: [D loss: 0.705170, acc: 0.525391] [A loss: 0.706901, acc: 0.460938]\n",
"4868: [D loss: 0.729860, acc: 0.492188] [A loss: 0.832047, acc: 0.203125]\n",
"4869: [D loss: 0.693683, acc: 0.535156] [A loss: 0.780832, acc: 0.312500]\n",
"4870: [D loss: 0.707492, acc: 0.503906] [A loss: 0.865022, acc: 0.152344]\n",
"4871: [D loss: 0.702179, acc: 0.537109] [A loss: 0.822369, acc: 0.246094]\n",
"4872: [D loss: 0.696743, acc: 0.531250] [A loss: 0.809872, acc: 0.261719]\n",
"4873: [D loss: 0.697617, acc: 0.511719] [A loss: 0.813966, acc: 0.226562]\n",
"4874: [D loss: 0.712820, acc: 0.509766] [A loss: 0.794020, acc: 0.320312]\n",
"4875: [D loss: 0.703458, acc: 0.496094] [A loss: 0.868914, acc: 0.152344]\n",
"4876: [D loss: 0.690631, acc: 0.539062] [A loss: 0.756996, acc: 0.386719]\n",
"4877: [D loss: 0.711285, acc: 0.513672] [A loss: 0.903819, acc: 0.144531]\n",
"4878: [D loss: 0.703459, acc: 0.519531] [A loss: 0.774327, acc: 0.335938]\n",
"4879: [D loss: 0.713689, acc: 0.521484] [A loss: 0.909391, acc: 0.097656]\n",
"4880: [D loss: 0.687607, acc: 0.570312] [A loss: 0.694132, acc: 0.507812]\n",
"4881: [D loss: 0.721452, acc: 0.515625] [A loss: 0.942438, acc: 0.093750]\n",
"4882: [D loss: 0.687431, acc: 0.542969] [A loss: 0.685695, acc: 0.562500]\n",
"4883: [D loss: 0.710319, acc: 0.511719] [A loss: 0.944113, acc: 0.109375]\n",
"4884: [D loss: 0.695850, acc: 0.527344] [A loss: 0.757620, acc: 0.375000]\n",
"4885: [D loss: 0.704333, acc: 0.525391] [A loss: 0.857479, acc: 0.164062]\n",
"4886: [D loss: 0.685246, acc: 0.568359] [A loss: 0.763172, acc: 0.375000]\n",
"4887: [D loss: 0.701734, acc: 0.533203] [A loss: 0.878792, acc: 0.144531]\n",
"4888: [D loss: 0.698950, acc: 0.519531] [A loss: 0.780007, acc: 0.316406]\n",
"4889: [D loss: 0.710057, acc: 0.509766] [A loss: 0.921862, acc: 0.125000]\n",
"4890: [D loss: 0.704019, acc: 0.500000] [A loss: 0.717314, acc: 0.445312]\n",
"4891: [D loss: 0.714208, acc: 0.519531] [A loss: 0.930990, acc: 0.085938]\n",
"4892: [D loss: 0.689340, acc: 0.517578] [A loss: 0.714957, acc: 0.507812]\n",
"4893: [D loss: 0.709470, acc: 0.515625] [A loss: 0.923866, acc: 0.109375]\n",
"4894: [D loss: 0.683768, acc: 0.525391] [A loss: 0.692520, acc: 0.550781]\n",
"4895: [D loss: 0.701786, acc: 0.554688] [A loss: 0.840201, acc: 0.242188]\n",
"4896: [D loss: 0.693597, acc: 0.548828] [A loss: 0.737899, acc: 0.445312]\n",
"4897: [D loss: 0.707788, acc: 0.525391] [A loss: 0.925435, acc: 0.132812]\n",
"4898: [D loss: 0.689905, acc: 0.533203] [A loss: 0.752293, acc: 0.406250]\n",
"4899: [D loss: 0.702711, acc: 0.507812] [A loss: 0.853379, acc: 0.199219]\n",
"4900: [D loss: 0.703651, acc: 0.494141] [A loss: 0.740167, acc: 0.417969]\n",
"4901: [D loss: 0.723282, acc: 0.490234] [A loss: 0.910521, acc: 0.136719]\n",
"4902: [D loss: 0.687991, acc: 0.535156] [A loss: 0.707936, acc: 0.500000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4903: [D loss: 0.709667, acc: 0.535156] [A loss: 0.875787, acc: 0.187500]\n",
"4904: [D loss: 0.707629, acc: 0.509766] [A loss: 0.778569, acc: 0.308594]\n",
"4905: [D loss: 0.714051, acc: 0.488281] [A loss: 0.818615, acc: 0.265625]\n",
"4906: [D loss: 0.684958, acc: 0.560547] [A loss: 0.770417, acc: 0.355469]\n",
"4907: [D loss: 0.711736, acc: 0.496094] [A loss: 0.926449, acc: 0.148438]\n",
"4908: [D loss: 0.714175, acc: 0.464844] [A loss: 0.769168, acc: 0.398438]\n",
"4909: [D loss: 0.702973, acc: 0.515625] [A loss: 0.865584, acc: 0.183594]\n",
"4910: [D loss: 0.695207, acc: 0.523438] [A loss: 0.724741, acc: 0.441406]\n",
"4911: [D loss: 0.708551, acc: 0.519531] [A loss: 0.936977, acc: 0.121094]\n",
"4912: [D loss: 0.694770, acc: 0.531250] [A loss: 0.705554, acc: 0.503906]\n",
"4913: [D loss: 0.714346, acc: 0.541016] [A loss: 0.901804, acc: 0.125000]\n",
"4914: [D loss: 0.704852, acc: 0.507812] [A loss: 0.764793, acc: 0.343750]\n",
"4915: [D loss: 0.708488, acc: 0.503906] [A loss: 0.906445, acc: 0.148438]\n",
"4916: [D loss: 0.714352, acc: 0.476562] [A loss: 0.716237, acc: 0.500000]\n",
"4917: [D loss: 0.716017, acc: 0.513672] [A loss: 0.942728, acc: 0.093750]\n",
"4918: [D loss: 0.709325, acc: 0.501953] [A loss: 0.736975, acc: 0.414062]\n",
"4919: [D loss: 0.720698, acc: 0.496094] [A loss: 0.879303, acc: 0.144531]\n",
"4920: [D loss: 0.703980, acc: 0.505859] [A loss: 0.740348, acc: 0.375000]\n",
"4921: [D loss: 0.699596, acc: 0.523438] [A loss: 0.884592, acc: 0.101562]\n",
"4922: [D loss: 0.708046, acc: 0.470703] [A loss: 0.725415, acc: 0.421875]\n",
"4923: [D loss: 0.719184, acc: 0.486328] [A loss: 0.914156, acc: 0.152344]\n",
"4924: [D loss: 0.688768, acc: 0.539062] [A loss: 0.708857, acc: 0.527344]\n",
"4925: [D loss: 0.720827, acc: 0.486328] [A loss: 0.863088, acc: 0.160156]\n",
"4926: [D loss: 0.705845, acc: 0.476562] [A loss: 0.766942, acc: 0.351562]\n",
"4927: [D loss: 0.709651, acc: 0.517578] [A loss: 0.816520, acc: 0.214844]\n",
"4928: [D loss: 0.690378, acc: 0.535156] [A loss: 0.820352, acc: 0.265625]\n",
"4929: [D loss: 0.699971, acc: 0.542969] [A loss: 0.839325, acc: 0.226562]\n",
"4930: [D loss: 0.701104, acc: 0.515625] [A loss: 0.811903, acc: 0.253906]\n",
"4931: [D loss: 0.702537, acc: 0.541016] [A loss: 0.785052, acc: 0.300781]\n",
"4932: [D loss: 0.692751, acc: 0.529297] [A loss: 0.864569, acc: 0.210938]\n",
"4933: [D loss: 0.677603, acc: 0.568359] [A loss: 0.762426, acc: 0.414062]\n",
"4934: [D loss: 0.706108, acc: 0.500000] [A loss: 0.808741, acc: 0.257812]\n",
"4935: [D loss: 0.712217, acc: 0.492188] [A loss: 0.819531, acc: 0.226562]\n",
"4936: [D loss: 0.699511, acc: 0.521484] [A loss: 0.845710, acc: 0.222656]\n",
"4937: [D loss: 0.692472, acc: 0.505859] [A loss: 0.770547, acc: 0.320312]\n",
"4938: [D loss: 0.720535, acc: 0.507812] [A loss: 0.985472, acc: 0.074219]\n",
"4939: [D loss: 0.710296, acc: 0.494141] [A loss: 0.739210, acc: 0.414062]\n",
"4940: [D loss: 0.721095, acc: 0.496094] [A loss: 0.932838, acc: 0.089844]\n",
"4941: [D loss: 0.696834, acc: 0.519531] [A loss: 0.694736, acc: 0.496094]\n",
"4942: [D loss: 0.730429, acc: 0.492188] [A loss: 0.873722, acc: 0.152344]\n",
"4943: [D loss: 0.697053, acc: 0.498047] [A loss: 0.705225, acc: 0.507812]\n",
"4944: [D loss: 0.710760, acc: 0.531250] [A loss: 0.873272, acc: 0.160156]\n",
"4945: [D loss: 0.694056, acc: 0.537109] [A loss: 0.735165, acc: 0.406250]\n",
"4946: [D loss: 0.726886, acc: 0.511719] [A loss: 0.920617, acc: 0.105469]\n",
"4947: [D loss: 0.692702, acc: 0.535156] [A loss: 0.722040, acc: 0.476562]\n",
"4948: [D loss: 0.706681, acc: 0.531250] [A loss: 0.956461, acc: 0.097656]\n",
"4949: [D loss: 0.690144, acc: 0.550781] [A loss: 0.766588, acc: 0.355469]\n",
"4950: [D loss: 0.714050, acc: 0.501953] [A loss: 0.892217, acc: 0.140625]\n",
"4951: [D loss: 0.685501, acc: 0.562500] [A loss: 0.759438, acc: 0.355469]\n",
"4952: [D loss: 0.707513, acc: 0.519531] [A loss: 0.836076, acc: 0.175781]\n",
"4953: [D loss: 0.706982, acc: 0.496094] [A loss: 0.849311, acc: 0.167969]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"4954: [D loss: 0.703460, acc: 0.501953] [A loss: 0.853721, acc: 0.187500]\n",
"4955: [D loss: 0.699710, acc: 0.541016] [A loss: 0.757810, acc: 0.351562]\n",
"4956: [D loss: 0.714287, acc: 0.500000] [A loss: 0.957575, acc: 0.085938]\n",
"4957: [D loss: 0.707143, acc: 0.505859] [A loss: 0.703404, acc: 0.484375]\n",
"4958: [D loss: 0.719650, acc: 0.517578] [A loss: 0.963315, acc: 0.085938]\n",
"4959: [D loss: 0.696784, acc: 0.529297] [A loss: 0.683396, acc: 0.566406]\n",
"4960: [D loss: 0.719477, acc: 0.507812] [A loss: 0.854798, acc: 0.207031]\n",
"4961: [D loss: 0.699560, acc: 0.513672] [A loss: 0.736789, acc: 0.394531]\n",
"4962: [D loss: 0.710535, acc: 0.503906] [A loss: 0.807742, acc: 0.246094]\n",
"4963: [D loss: 0.699241, acc: 0.533203] [A loss: 0.805790, acc: 0.242188]\n",
"4964: [D loss: 0.690278, acc: 0.562500] [A loss: 0.811395, acc: 0.265625]\n",
"4965: [D loss: 0.707624, acc: 0.500000] [A loss: 0.838552, acc: 0.207031]\n",
"4966: [D loss: 0.687329, acc: 0.552734] [A loss: 0.768015, acc: 0.359375]\n",
"4967: [D loss: 0.706643, acc: 0.501953] [A loss: 0.824262, acc: 0.226562]\n",
"4968: [D loss: 0.701554, acc: 0.490234] [A loss: 0.718338, acc: 0.445312]\n",
"4969: [D loss: 0.709259, acc: 0.525391] [A loss: 0.993899, acc: 0.097656]\n",
"4970: [D loss: 0.705971, acc: 0.513672] [A loss: 0.690442, acc: 0.527344]\n",
"4971: [D loss: 0.709660, acc: 0.517578] [A loss: 0.869411, acc: 0.164062]\n",
"4972: [D loss: 0.697661, acc: 0.513672] [A loss: 0.760672, acc: 0.371094]\n",
"4973: [D loss: 0.703686, acc: 0.523438] [A loss: 0.815051, acc: 0.285156]\n",
"4974: [D loss: 0.684441, acc: 0.585938] [A loss: 0.821461, acc: 0.257812]\n",
"4975: [D loss: 0.694002, acc: 0.539062] [A loss: 0.799280, acc: 0.292969]\n",
"4976: [D loss: 0.703884, acc: 0.503906] [A loss: 0.775770, acc: 0.339844]\n",
"4977: [D loss: 0.713226, acc: 0.519531] [A loss: 0.888715, acc: 0.136719]\n",
"4978: [D loss: 0.712005, acc: 0.494141] [A loss: 0.717091, acc: 0.472656]\n",
"4979: [D loss: 0.716167, acc: 0.505859] [A loss: 0.914903, acc: 0.128906]\n",
"4980: [D loss: 0.705392, acc: 0.484375] [A loss: 0.781575, acc: 0.347656]\n",
"4981: [D loss: 0.703467, acc: 0.515625] [A loss: 0.858167, acc: 0.207031]\n",
"4982: [D loss: 0.688582, acc: 0.560547] [A loss: 0.805261, acc: 0.289062]\n",
"4983: [D loss: 0.702614, acc: 0.517578] [A loss: 0.828935, acc: 0.234375]\n",
"4984: [D loss: 0.704426, acc: 0.525391] [A loss: 0.890329, acc: 0.179688]\n",
"4985: [D loss: 0.696840, acc: 0.527344] [A loss: 0.676097, acc: 0.539062]\n",
"4986: [D loss: 0.707104, acc: 0.505859] [A loss: 0.932184, acc: 0.136719]\n",
"4987: [D loss: 0.705834, acc: 0.507812] [A loss: 0.752062, acc: 0.402344]\n",
"4988: [D loss: 0.723094, acc: 0.498047] [A loss: 0.898602, acc: 0.136719]\n",
"4989: [D loss: 0.696049, acc: 0.513672] [A loss: 0.716404, acc: 0.472656]\n",
"4990: [D loss: 0.720860, acc: 0.533203] [A loss: 0.945633, acc: 0.105469]\n",
"4991: [D loss: 0.684000, acc: 0.523438] [A loss: 0.687825, acc: 0.542969]\n",
"4992: [D loss: 0.718426, acc: 0.503906] [A loss: 0.945462, acc: 0.070312]\n",
"4993: [D loss: 0.695447, acc: 0.505859] [A loss: 0.692476, acc: 0.523438]\n",
"4994: [D loss: 0.709447, acc: 0.515625] [A loss: 0.921996, acc: 0.125000]\n",
"4995: [D loss: 0.689727, acc: 0.525391] [A loss: 0.798443, acc: 0.281250]\n",
"4996: [D loss: 0.712547, acc: 0.503906] [A loss: 0.817061, acc: 0.261719]\n",
"4997: [D loss: 0.696006, acc: 0.537109] [A loss: 0.809064, acc: 0.253906]\n",
"4998: [D loss: 0.712422, acc: 0.500000] [A loss: 0.861767, acc: 0.195312]\n",
"4999: [D loss: 0.690071, acc: 0.533203] [A loss: 0.751076, acc: 0.375000]\n",
"5000: [D loss: 0.684511, acc: 0.544922] [A loss: 0.828000, acc: 0.207031]\n",
"5001: [D loss: 0.689653, acc: 0.537109] [A loss: 0.845297, acc: 0.179688]\n",
"5002: [D loss: 0.702504, acc: 0.515625] [A loss: 0.846079, acc: 0.195312]\n",
"5003: [D loss: 0.695720, acc: 0.531250] [A loss: 0.825117, acc: 0.250000]\n",
"5004: [D loss: 0.705081, acc: 0.498047] [A loss: 0.801113, acc: 0.281250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5005: [D loss: 0.706438, acc: 0.505859] [A loss: 0.796379, acc: 0.273438]\n",
"5006: [D loss: 0.708827, acc: 0.492188] [A loss: 0.808281, acc: 0.246094]\n",
"5007: [D loss: 0.711248, acc: 0.511719] [A loss: 0.935747, acc: 0.085938]\n",
"5008: [D loss: 0.700793, acc: 0.519531] [A loss: 0.684443, acc: 0.554688]\n",
"5009: [D loss: 0.711761, acc: 0.503906] [A loss: 0.948732, acc: 0.113281]\n",
"5010: [D loss: 0.701719, acc: 0.515625] [A loss: 0.716529, acc: 0.460938]\n",
"5011: [D loss: 0.719470, acc: 0.501953] [A loss: 0.823651, acc: 0.234375]\n",
"5012: [D loss: 0.689846, acc: 0.531250] [A loss: 0.845100, acc: 0.199219]\n",
"5013: [D loss: 0.700112, acc: 0.521484] [A loss: 0.884834, acc: 0.156250]\n",
"5014: [D loss: 0.719176, acc: 0.466797] [A loss: 0.864416, acc: 0.171875]\n",
"5015: [D loss: 0.701092, acc: 0.494141] [A loss: 0.740580, acc: 0.359375]\n",
"5016: [D loss: 0.702634, acc: 0.542969] [A loss: 0.901384, acc: 0.144531]\n",
"5017: [D loss: 0.704009, acc: 0.492188] [A loss: 0.728519, acc: 0.457031]\n",
"5018: [D loss: 0.716221, acc: 0.523438] [A loss: 0.900707, acc: 0.093750]\n",
"5019: [D loss: 0.696415, acc: 0.544922] [A loss: 0.722551, acc: 0.464844]\n",
"5020: [D loss: 0.719663, acc: 0.494141] [A loss: 0.900326, acc: 0.136719]\n",
"5021: [D loss: 0.702320, acc: 0.498047] [A loss: 0.748672, acc: 0.378906]\n",
"5022: [D loss: 0.706377, acc: 0.525391] [A loss: 0.901544, acc: 0.109375]\n",
"5023: [D loss: 0.708892, acc: 0.484375] [A loss: 0.777291, acc: 0.343750]\n",
"5024: [D loss: 0.710655, acc: 0.507812] [A loss: 0.857399, acc: 0.195312]\n",
"5025: [D loss: 0.688397, acc: 0.531250] [A loss: 0.816948, acc: 0.238281]\n",
"5026: [D loss: 0.697572, acc: 0.533203] [A loss: 0.763011, acc: 0.328125]\n",
"5027: [D loss: 0.696180, acc: 0.525391] [A loss: 0.853623, acc: 0.187500]\n",
"5028: [D loss: 0.694938, acc: 0.505859] [A loss: 0.802623, acc: 0.312500]\n",
"5029: [D loss: 0.702850, acc: 0.509766] [A loss: 0.826335, acc: 0.214844]\n",
"5030: [D loss: 0.697518, acc: 0.537109] [A loss: 0.824334, acc: 0.300781]\n",
"5031: [D loss: 0.710951, acc: 0.486328] [A loss: 0.875851, acc: 0.144531]\n",
"5032: [D loss: 0.698975, acc: 0.519531] [A loss: 0.926544, acc: 0.066406]\n",
"5033: [D loss: 0.688620, acc: 0.550781] [A loss: 0.685073, acc: 0.531250]\n",
"5034: [D loss: 0.738672, acc: 0.515625] [A loss: 1.065620, acc: 0.027344]\n",
"5035: [D loss: 0.708033, acc: 0.513672] [A loss: 0.667634, acc: 0.574219]\n",
"5036: [D loss: 0.732337, acc: 0.517578] [A loss: 0.853413, acc: 0.160156]\n",
"5037: [D loss: 0.695900, acc: 0.533203] [A loss: 0.752271, acc: 0.367188]\n",
"5038: [D loss: 0.709487, acc: 0.525391] [A loss: 0.831199, acc: 0.234375]\n",
"5039: [D loss: 0.693995, acc: 0.546875] [A loss: 0.772071, acc: 0.359375]\n",
"5040: [D loss: 0.710363, acc: 0.525391] [A loss: 0.806925, acc: 0.214844]\n",
"5041: [D loss: 0.698439, acc: 0.525391] [A loss: 0.876653, acc: 0.175781]\n",
"5042: [D loss: 0.701631, acc: 0.482422] [A loss: 0.832151, acc: 0.281250]\n",
"5043: [D loss: 0.699043, acc: 0.539062] [A loss: 0.813452, acc: 0.230469]\n",
"5044: [D loss: 0.715506, acc: 0.513672] [A loss: 0.915073, acc: 0.132812]\n",
"5045: [D loss: 0.698883, acc: 0.529297] [A loss: 0.788905, acc: 0.343750]\n",
"5046: [D loss: 0.708339, acc: 0.519531] [A loss: 0.900194, acc: 0.164062]\n",
"5047: [D loss: 0.682754, acc: 0.560547] [A loss: 0.713106, acc: 0.492188]\n",
"5048: [D loss: 0.701476, acc: 0.537109] [A loss: 0.872828, acc: 0.207031]\n",
"5049: [D loss: 0.692715, acc: 0.546875] [A loss: 0.742158, acc: 0.382812]\n",
"5050: [D loss: 0.697201, acc: 0.513672] [A loss: 0.866353, acc: 0.191406]\n",
"5051: [D loss: 0.704165, acc: 0.507812] [A loss: 0.800152, acc: 0.285156]\n",
"5052: [D loss: 0.686288, acc: 0.572266] [A loss: 0.837313, acc: 0.246094]\n",
"5053: [D loss: 0.702373, acc: 0.527344] [A loss: 0.812739, acc: 0.246094]\n",
"5054: [D loss: 0.707546, acc: 0.513672] [A loss: 0.863712, acc: 0.242188]\n",
"5055: [D loss: 0.694592, acc: 0.525391] [A loss: 0.751390, acc: 0.386719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5056: [D loss: 0.712166, acc: 0.503906] [A loss: 0.934374, acc: 0.117188]\n",
"5057: [D loss: 0.691377, acc: 0.556641] [A loss: 0.720201, acc: 0.460938]\n",
"5058: [D loss: 0.725978, acc: 0.500000] [A loss: 0.933793, acc: 0.156250]\n",
"5059: [D loss: 0.700595, acc: 0.519531] [A loss: 0.712110, acc: 0.535156]\n",
"5060: [D loss: 0.709384, acc: 0.529297] [A loss: 0.852660, acc: 0.238281]\n",
"5061: [D loss: 0.701118, acc: 0.511719] [A loss: 0.768053, acc: 0.375000]\n",
"5062: [D loss: 0.717778, acc: 0.503906] [A loss: 0.883214, acc: 0.164062]\n",
"5063: [D loss: 0.693977, acc: 0.525391] [A loss: 0.760872, acc: 0.375000]\n",
"5064: [D loss: 0.701201, acc: 0.511719] [A loss: 0.940740, acc: 0.136719]\n",
"5065: [D loss: 0.695240, acc: 0.544922] [A loss: 0.636808, acc: 0.656250]\n",
"5066: [D loss: 0.732778, acc: 0.503906] [A loss: 1.006535, acc: 0.066406]\n",
"5067: [D loss: 0.710722, acc: 0.503906] [A loss: 0.739372, acc: 0.414062]\n",
"5068: [D loss: 0.716390, acc: 0.507812] [A loss: 0.899855, acc: 0.128906]\n",
"5069: [D loss: 0.689337, acc: 0.535156] [A loss: 0.695590, acc: 0.484375]\n",
"5070: [D loss: 0.710684, acc: 0.527344] [A loss: 0.856136, acc: 0.191406]\n",
"5071: [D loss: 0.691921, acc: 0.558594] [A loss: 0.773595, acc: 0.320312]\n",
"5072: [D loss: 0.700503, acc: 0.529297] [A loss: 0.862529, acc: 0.179688]\n",
"5073: [D loss: 0.699529, acc: 0.519531] [A loss: 0.795276, acc: 0.328125]\n",
"5074: [D loss: 0.704621, acc: 0.494141] [A loss: 0.869359, acc: 0.179688]\n",
"5075: [D loss: 0.690473, acc: 0.544922] [A loss: 0.691655, acc: 0.554688]\n",
"5076: [D loss: 0.706222, acc: 0.513672] [A loss: 0.867621, acc: 0.218750]\n",
"5077: [D loss: 0.687770, acc: 0.537109] [A loss: 0.755640, acc: 0.324219]\n",
"5078: [D loss: 0.708940, acc: 0.496094] [A loss: 0.859310, acc: 0.199219]\n",
"5079: [D loss: 0.695919, acc: 0.531250] [A loss: 0.787954, acc: 0.316406]\n",
"5080: [D loss: 0.699976, acc: 0.542969] [A loss: 0.861537, acc: 0.175781]\n",
"5081: [D loss: 0.689807, acc: 0.558594] [A loss: 0.730431, acc: 0.437500]\n",
"5082: [D loss: 0.722710, acc: 0.503906] [A loss: 0.942116, acc: 0.101562]\n",
"5083: [D loss: 0.690646, acc: 0.552734] [A loss: 0.679051, acc: 0.578125]\n",
"5084: [D loss: 0.710364, acc: 0.519531] [A loss: 0.910599, acc: 0.160156]\n",
"5085: [D loss: 0.703350, acc: 0.515625] [A loss: 0.754449, acc: 0.406250]\n",
"5086: [D loss: 0.716689, acc: 0.501953] [A loss: 0.825721, acc: 0.230469]\n",
"5087: [D loss: 0.695140, acc: 0.523438] [A loss: 0.828153, acc: 0.218750]\n",
"5088: [D loss: 0.698160, acc: 0.537109] [A loss: 0.805616, acc: 0.253906]\n",
"5089: [D loss: 0.698396, acc: 0.539062] [A loss: 0.833697, acc: 0.257812]\n",
"5090: [D loss: 0.700532, acc: 0.521484] [A loss: 0.844354, acc: 0.203125]\n",
"5091: [D loss: 0.702281, acc: 0.521484] [A loss: 0.787809, acc: 0.316406]\n",
"5092: [D loss: 0.708698, acc: 0.525391] [A loss: 0.879738, acc: 0.152344]\n",
"5093: [D loss: 0.699396, acc: 0.509766] [A loss: 0.778045, acc: 0.371094]\n",
"5094: [D loss: 0.708460, acc: 0.515625] [A loss: 0.834316, acc: 0.222656]\n",
"5095: [D loss: 0.692049, acc: 0.533203] [A loss: 0.795281, acc: 0.304688]\n",
"5096: [D loss: 0.706604, acc: 0.531250] [A loss: 0.870556, acc: 0.199219]\n",
"5097: [D loss: 0.686659, acc: 0.556641] [A loss: 0.773373, acc: 0.351562]\n",
"5098: [D loss: 0.692122, acc: 0.537109] [A loss: 0.830538, acc: 0.277344]\n",
"5099: [D loss: 0.692866, acc: 0.515625] [A loss: 0.807989, acc: 0.242188]\n",
"5100: [D loss: 0.705225, acc: 0.509766] [A loss: 0.818145, acc: 0.250000]\n",
"5101: [D loss: 0.703249, acc: 0.513672] [A loss: 0.880044, acc: 0.160156]\n",
"5102: [D loss: 0.693642, acc: 0.548828] [A loss: 0.695157, acc: 0.527344]\n",
"5103: [D loss: 0.717138, acc: 0.503906] [A loss: 1.004237, acc: 0.066406]\n",
"5104: [D loss: 0.688901, acc: 0.568359] [A loss: 0.760040, acc: 0.367188]\n",
"5105: [D loss: 0.705549, acc: 0.519531] [A loss: 0.838314, acc: 0.250000]\n",
"5106: [D loss: 0.703539, acc: 0.511719] [A loss: 0.845132, acc: 0.222656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5107: [D loss: 0.684272, acc: 0.572266] [A loss: 0.750845, acc: 0.382812]\n",
"5108: [D loss: 0.695918, acc: 0.562500] [A loss: 0.897727, acc: 0.148438]\n",
"5109: [D loss: 0.713400, acc: 0.496094] [A loss: 0.776486, acc: 0.355469]\n",
"5110: [D loss: 0.696272, acc: 0.515625] [A loss: 0.906708, acc: 0.167969]\n",
"5111: [D loss: 0.717788, acc: 0.492188] [A loss: 0.733083, acc: 0.445312]\n",
"5112: [D loss: 0.706207, acc: 0.529297] [A loss: 0.966626, acc: 0.109375]\n",
"5113: [D loss: 0.715002, acc: 0.509766] [A loss: 0.760640, acc: 0.363281]\n",
"5114: [D loss: 0.700650, acc: 0.513672] [A loss: 0.979679, acc: 0.054688]\n",
"5115: [D loss: 0.692905, acc: 0.541016] [A loss: 0.678689, acc: 0.562500]\n",
"5116: [D loss: 0.717834, acc: 0.500000] [A loss: 0.906325, acc: 0.101562]\n",
"5117: [D loss: 0.683722, acc: 0.546875] [A loss: 0.752154, acc: 0.375000]\n",
"5118: [D loss: 0.685457, acc: 0.550781] [A loss: 0.834640, acc: 0.203125]\n",
"5119: [D loss: 0.699648, acc: 0.521484] [A loss: 0.753749, acc: 0.363281]\n",
"5120: [D loss: 0.700005, acc: 0.537109] [A loss: 0.829285, acc: 0.214844]\n",
"5121: [D loss: 0.681695, acc: 0.546875] [A loss: 0.769541, acc: 0.363281]\n",
"5122: [D loss: 0.706199, acc: 0.521484] [A loss: 0.827856, acc: 0.250000]\n",
"5123: [D loss: 0.691460, acc: 0.552734] [A loss: 0.779224, acc: 0.308594]\n",
"5124: [D loss: 0.701774, acc: 0.529297] [A loss: 0.889173, acc: 0.175781]\n",
"5125: [D loss: 0.681745, acc: 0.572266] [A loss: 0.759130, acc: 0.371094]\n",
"5126: [D loss: 0.719993, acc: 0.525391] [A loss: 0.917888, acc: 0.121094]\n",
"5127: [D loss: 0.690598, acc: 0.527344] [A loss: 0.755407, acc: 0.402344]\n",
"5128: [D loss: 0.713780, acc: 0.503906] [A loss: 0.961002, acc: 0.097656]\n",
"5129: [D loss: 0.698702, acc: 0.523438] [A loss: 0.707638, acc: 0.507812]\n",
"5130: [D loss: 0.738713, acc: 0.488281] [A loss: 0.870789, acc: 0.175781]\n",
"5131: [D loss: 0.704448, acc: 0.517578] [A loss: 0.753412, acc: 0.363281]\n",
"5132: [D loss: 0.709593, acc: 0.503906] [A loss: 0.972361, acc: 0.097656]\n",
"5133: [D loss: 0.704594, acc: 0.501953] [A loss: 0.695829, acc: 0.492188]\n",
"5134: [D loss: 0.720971, acc: 0.498047] [A loss: 0.867135, acc: 0.148438]\n",
"5135: [D loss: 0.696191, acc: 0.533203] [A loss: 0.783751, acc: 0.312500]\n",
"5136: [D loss: 0.704840, acc: 0.517578] [A loss: 0.837930, acc: 0.234375]\n",
"5137: [D loss: 0.684261, acc: 0.558594] [A loss: 0.755521, acc: 0.398438]\n",
"5138: [D loss: 0.710579, acc: 0.519531] [A loss: 0.910694, acc: 0.148438]\n",
"5139: [D loss: 0.704146, acc: 0.511719] [A loss: 0.761179, acc: 0.394531]\n",
"5140: [D loss: 0.723380, acc: 0.505859] [A loss: 0.908831, acc: 0.140625]\n",
"5141: [D loss: 0.716851, acc: 0.476562] [A loss: 0.718678, acc: 0.480469]\n",
"5142: [D loss: 0.696450, acc: 0.531250] [A loss: 0.867303, acc: 0.187500]\n",
"5143: [D loss: 0.687478, acc: 0.537109] [A loss: 0.752597, acc: 0.367188]\n",
"5144: [D loss: 0.712852, acc: 0.500000] [A loss: 0.860169, acc: 0.203125]\n",
"5145: [D loss: 0.691945, acc: 0.511719] [A loss: 0.787047, acc: 0.328125]\n",
"5146: [D loss: 0.709331, acc: 0.505859] [A loss: 0.894226, acc: 0.144531]\n",
"5147: [D loss: 0.687499, acc: 0.558594] [A loss: 0.775481, acc: 0.308594]\n",
"5148: [D loss: 0.705799, acc: 0.535156] [A loss: 0.914137, acc: 0.113281]\n",
"5149: [D loss: 0.706092, acc: 0.501953] [A loss: 0.745099, acc: 0.437500]\n",
"5150: [D loss: 0.712650, acc: 0.515625] [A loss: 0.979025, acc: 0.066406]\n",
"5151: [D loss: 0.679922, acc: 0.564453] [A loss: 0.734458, acc: 0.449219]\n",
"5152: [D loss: 0.717373, acc: 0.482422] [A loss: 1.037832, acc: 0.042969]\n",
"5153: [D loss: 0.695434, acc: 0.531250] [A loss: 0.661026, acc: 0.621094]\n",
"5154: [D loss: 0.725406, acc: 0.505859] [A loss: 0.971773, acc: 0.105469]\n",
"5155: [D loss: 0.704147, acc: 0.537109] [A loss: 0.713845, acc: 0.421875]\n",
"5156: [D loss: 0.713949, acc: 0.523438] [A loss: 0.837106, acc: 0.261719]\n",
"5157: [D loss: 0.701335, acc: 0.505859] [A loss: 0.746640, acc: 0.402344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5158: [D loss: 0.708719, acc: 0.523438] [A loss: 0.919583, acc: 0.121094]\n",
"5159: [D loss: 0.694828, acc: 0.542969] [A loss: 0.759521, acc: 0.347656]\n",
"5160: [D loss: 0.723220, acc: 0.496094] [A loss: 1.024642, acc: 0.058594]\n",
"5161: [D loss: 0.701866, acc: 0.539062] [A loss: 0.732356, acc: 0.445312]\n",
"5162: [D loss: 0.717346, acc: 0.515625] [A loss: 0.890691, acc: 0.152344]\n",
"5163: [D loss: 0.686839, acc: 0.554688] [A loss: 0.762183, acc: 0.386719]\n",
"5164: [D loss: 0.707642, acc: 0.503906] [A loss: 0.789031, acc: 0.320312]\n",
"5165: [D loss: 0.710617, acc: 0.527344] [A loss: 0.854859, acc: 0.214844]\n",
"5166: [D loss: 0.695835, acc: 0.539062] [A loss: 0.749047, acc: 0.378906]\n",
"5167: [D loss: 0.708197, acc: 0.533203] [A loss: 0.918307, acc: 0.148438]\n",
"5168: [D loss: 0.697785, acc: 0.513672] [A loss: 0.718626, acc: 0.460938]\n",
"5169: [D loss: 0.698240, acc: 0.511719] [A loss: 0.852674, acc: 0.203125]\n",
"5170: [D loss: 0.705985, acc: 0.523438] [A loss: 0.805876, acc: 0.261719]\n",
"5171: [D loss: 0.700009, acc: 0.509766] [A loss: 0.870928, acc: 0.171875]\n",
"5172: [D loss: 0.692868, acc: 0.533203] [A loss: 0.789494, acc: 0.296875]\n",
"5173: [D loss: 0.711828, acc: 0.503906] [A loss: 0.909736, acc: 0.140625]\n",
"5174: [D loss: 0.684806, acc: 0.548828] [A loss: 0.785714, acc: 0.328125]\n",
"5175: [D loss: 0.708935, acc: 0.554688] [A loss: 0.938253, acc: 0.117188]\n",
"5176: [D loss: 0.711209, acc: 0.507812] [A loss: 0.752936, acc: 0.363281]\n",
"5177: [D loss: 0.717952, acc: 0.488281] [A loss: 0.826418, acc: 0.246094]\n",
"5178: [D loss: 0.689113, acc: 0.542969] [A loss: 0.840451, acc: 0.167969]\n",
"5179: [D loss: 0.697813, acc: 0.542969] [A loss: 0.795685, acc: 0.296875]\n",
"5180: [D loss: 0.705977, acc: 0.501953] [A loss: 0.897328, acc: 0.167969]\n",
"5181: [D loss: 0.704612, acc: 0.505859] [A loss: 0.813137, acc: 0.253906]\n",
"5182: [D loss: 0.699005, acc: 0.517578] [A loss: 0.871466, acc: 0.187500]\n",
"5183: [D loss: 0.692232, acc: 0.552734] [A loss: 0.781557, acc: 0.335938]\n",
"5184: [D loss: 0.693124, acc: 0.527344] [A loss: 0.832431, acc: 0.261719]\n",
"5185: [D loss: 0.687760, acc: 0.537109] [A loss: 0.881269, acc: 0.171875]\n",
"5186: [D loss: 0.693663, acc: 0.519531] [A loss: 0.825400, acc: 0.226562]\n",
"5187: [D loss: 0.696110, acc: 0.539062] [A loss: 0.905570, acc: 0.132812]\n",
"5188: [D loss: 0.684732, acc: 0.546875] [A loss: 0.686382, acc: 0.546875]\n",
"5189: [D loss: 0.711110, acc: 0.523438] [A loss: 0.978085, acc: 0.078125]\n",
"5190: [D loss: 0.705379, acc: 0.519531] [A loss: 0.809341, acc: 0.261719]\n",
"5191: [D loss: 0.719571, acc: 0.490234] [A loss: 0.871861, acc: 0.179688]\n",
"5192: [D loss: 0.688652, acc: 0.544922] [A loss: 0.806629, acc: 0.292969]\n",
"5193: [D loss: 0.715663, acc: 0.521484] [A loss: 0.852305, acc: 0.207031]\n",
"5194: [D loss: 0.705051, acc: 0.535156] [A loss: 0.926204, acc: 0.140625]\n",
"5195: [D loss: 0.697708, acc: 0.519531] [A loss: 0.789793, acc: 0.320312]\n",
"5196: [D loss: 0.713831, acc: 0.505859] [A loss: 0.821793, acc: 0.246094]\n",
"5197: [D loss: 0.700926, acc: 0.537109] [A loss: 0.816949, acc: 0.265625]\n",
"5198: [D loss: 0.693955, acc: 0.552734] [A loss: 0.877579, acc: 0.191406]\n",
"5199: [D loss: 0.705530, acc: 0.523438] [A loss: 0.763812, acc: 0.359375]\n",
"5200: [D loss: 0.716726, acc: 0.498047] [A loss: 0.946698, acc: 0.101562]\n",
"5201: [D loss: 0.712145, acc: 0.498047] [A loss: 0.672505, acc: 0.562500]\n",
"5202: [D loss: 0.723872, acc: 0.523438] [A loss: 0.986505, acc: 0.097656]\n",
"5203: [D loss: 0.709295, acc: 0.517578] [A loss: 0.777016, acc: 0.335938]\n",
"5204: [D loss: 0.710375, acc: 0.542969] [A loss: 0.897107, acc: 0.125000]\n",
"5205: [D loss: 0.701808, acc: 0.525391] [A loss: 0.762514, acc: 0.382812]\n",
"5206: [D loss: 0.696759, acc: 0.546875] [A loss: 0.880292, acc: 0.195312]\n",
"5207: [D loss: 0.703815, acc: 0.517578] [A loss: 0.722742, acc: 0.445312]\n",
"5208: [D loss: 0.710153, acc: 0.517578] [A loss: 0.886000, acc: 0.195312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5209: [D loss: 0.696149, acc: 0.542969] [A loss: 0.824303, acc: 0.246094]\n",
"5210: [D loss: 0.706393, acc: 0.533203] [A loss: 0.790211, acc: 0.332031]\n",
"5211: [D loss: 0.721063, acc: 0.488281] [A loss: 0.856487, acc: 0.214844]\n",
"5212: [D loss: 0.695490, acc: 0.529297] [A loss: 0.800677, acc: 0.304688]\n",
"5213: [D loss: 0.699240, acc: 0.519531] [A loss: 0.871858, acc: 0.152344]\n",
"5214: [D loss: 0.704128, acc: 0.519531] [A loss: 0.947604, acc: 0.105469]\n",
"5215: [D loss: 0.704190, acc: 0.494141] [A loss: 0.714009, acc: 0.488281]\n",
"5216: [D loss: 0.718956, acc: 0.498047] [A loss: 0.936726, acc: 0.117188]\n",
"5217: [D loss: 0.695078, acc: 0.558594] [A loss: 0.724835, acc: 0.390625]\n",
"5218: [D loss: 0.706715, acc: 0.507812] [A loss: 0.926459, acc: 0.140625]\n",
"5219: [D loss: 0.709058, acc: 0.537109] [A loss: 0.739811, acc: 0.414062]\n",
"5220: [D loss: 0.706082, acc: 0.500000] [A loss: 0.868135, acc: 0.195312]\n",
"5221: [D loss: 0.704928, acc: 0.500000] [A loss: 0.797992, acc: 0.304688]\n",
"5222: [D loss: 0.706850, acc: 0.531250] [A loss: 0.883017, acc: 0.156250]\n",
"5223: [D loss: 0.690604, acc: 0.542969] [A loss: 0.784231, acc: 0.332031]\n",
"5224: [D loss: 0.709696, acc: 0.519531] [A loss: 0.883540, acc: 0.179688]\n",
"5225: [D loss: 0.694320, acc: 0.529297] [A loss: 0.794735, acc: 0.308594]\n",
"5226: [D loss: 0.687033, acc: 0.568359] [A loss: 0.858189, acc: 0.156250]\n",
"5227: [D loss: 0.690764, acc: 0.554688] [A loss: 0.801906, acc: 0.265625]\n",
"5228: [D loss: 0.696691, acc: 0.517578] [A loss: 0.855490, acc: 0.250000]\n",
"5229: [D loss: 0.698263, acc: 0.527344] [A loss: 0.892451, acc: 0.203125]\n",
"5230: [D loss: 0.691841, acc: 0.539062] [A loss: 0.807603, acc: 0.300781]\n",
"5231: [D loss: 0.706166, acc: 0.505859] [A loss: 0.885408, acc: 0.156250]\n",
"5232: [D loss: 0.709295, acc: 0.503906] [A loss: 0.745367, acc: 0.429688]\n",
"5233: [D loss: 0.711543, acc: 0.521484] [A loss: 0.983382, acc: 0.066406]\n",
"5234: [D loss: 0.702315, acc: 0.511719] [A loss: 0.705811, acc: 0.500000]\n",
"5235: [D loss: 0.707369, acc: 0.533203] [A loss: 0.966836, acc: 0.093750]\n",
"5236: [D loss: 0.703660, acc: 0.513672] [A loss: 0.686765, acc: 0.515625]\n",
"5237: [D loss: 0.737573, acc: 0.503906] [A loss: 0.940920, acc: 0.167969]\n",
"5238: [D loss: 0.683867, acc: 0.558594] [A loss: 0.711026, acc: 0.484375]\n",
"5239: [D loss: 0.754852, acc: 0.490234] [A loss: 0.898439, acc: 0.167969]\n",
"5240: [D loss: 0.704000, acc: 0.535156] [A loss: 0.761585, acc: 0.359375]\n",
"5241: [D loss: 0.716505, acc: 0.513672] [A loss: 0.913815, acc: 0.140625]\n",
"5242: [D loss: 0.697230, acc: 0.529297] [A loss: 0.712217, acc: 0.445312]\n",
"5243: [D loss: 0.721834, acc: 0.501953] [A loss: 0.876179, acc: 0.160156]\n",
"5244: [D loss: 0.703590, acc: 0.496094] [A loss: 0.765309, acc: 0.343750]\n",
"5245: [D loss: 0.700381, acc: 0.558594] [A loss: 0.889672, acc: 0.160156]\n",
"5246: [D loss: 0.693259, acc: 0.517578] [A loss: 0.755374, acc: 0.355469]\n",
"5247: [D loss: 0.713885, acc: 0.527344] [A loss: 0.942048, acc: 0.121094]\n",
"5248: [D loss: 0.693949, acc: 0.527344] [A loss: 0.741070, acc: 0.402344]\n",
"5249: [D loss: 0.709605, acc: 0.486328] [A loss: 0.938073, acc: 0.152344]\n",
"5250: [D loss: 0.697187, acc: 0.546875] [A loss: 0.730161, acc: 0.417969]\n",
"5251: [D loss: 0.711505, acc: 0.505859] [A loss: 0.860618, acc: 0.222656]\n",
"5252: [D loss: 0.699379, acc: 0.531250] [A loss: 0.775761, acc: 0.359375]\n",
"5253: [D loss: 0.708998, acc: 0.531250] [A loss: 0.876402, acc: 0.191406]\n",
"5254: [D loss: 0.697642, acc: 0.537109] [A loss: 0.733626, acc: 0.417969]\n",
"5255: [D loss: 0.698704, acc: 0.519531] [A loss: 0.913311, acc: 0.136719]\n",
"5256: [D loss: 0.683580, acc: 0.542969] [A loss: 0.678822, acc: 0.562500]\n",
"5257: [D loss: 0.728984, acc: 0.509766] [A loss: 1.093224, acc: 0.054688]\n",
"5258: [D loss: 0.707513, acc: 0.517578] [A loss: 0.718730, acc: 0.457031]\n",
"5259: [D loss: 0.720327, acc: 0.513672] [A loss: 0.864222, acc: 0.175781]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5260: [D loss: 0.689165, acc: 0.535156] [A loss: 0.755400, acc: 0.390625]\n",
"5261: [D loss: 0.721076, acc: 0.507812] [A loss: 0.844478, acc: 0.222656]\n",
"5262: [D loss: 0.696814, acc: 0.511719] [A loss: 0.735275, acc: 0.437500]\n",
"5263: [D loss: 0.710296, acc: 0.546875] [A loss: 0.907878, acc: 0.148438]\n",
"5264: [D loss: 0.684854, acc: 0.572266] [A loss: 0.745592, acc: 0.375000]\n",
"5265: [D loss: 0.715455, acc: 0.529297] [A loss: 0.876798, acc: 0.214844]\n",
"5266: [D loss: 0.700384, acc: 0.527344] [A loss: 0.780226, acc: 0.355469]\n",
"5267: [D loss: 0.700351, acc: 0.539062] [A loss: 0.857868, acc: 0.203125]\n",
"5268: [D loss: 0.691045, acc: 0.537109] [A loss: 0.752199, acc: 0.398438]\n",
"5269: [D loss: 0.703464, acc: 0.517578] [A loss: 0.938049, acc: 0.125000]\n",
"5270: [D loss: 0.701445, acc: 0.531250] [A loss: 0.736078, acc: 0.433594]\n",
"5271: [D loss: 0.705870, acc: 0.525391] [A loss: 0.935566, acc: 0.148438]\n",
"5272: [D loss: 0.699434, acc: 0.498047] [A loss: 0.763970, acc: 0.363281]\n",
"5273: [D loss: 0.718502, acc: 0.511719] [A loss: 0.883528, acc: 0.167969]\n",
"5274: [D loss: 0.698566, acc: 0.511719] [A loss: 0.726639, acc: 0.464844]\n",
"5275: [D loss: 0.728276, acc: 0.531250] [A loss: 1.007082, acc: 0.085938]\n",
"5276: [D loss: 0.692167, acc: 0.523438] [A loss: 0.701006, acc: 0.480469]\n",
"5277: [D loss: 0.713200, acc: 0.552734] [A loss: 0.838917, acc: 0.281250]\n",
"5278: [D loss: 0.706723, acc: 0.511719] [A loss: 0.764365, acc: 0.390625]\n",
"5279: [D loss: 0.714859, acc: 0.486328] [A loss: 0.834014, acc: 0.289062]\n",
"5280: [D loss: 0.692024, acc: 0.537109] [A loss: 0.812491, acc: 0.300781]\n",
"5281: [D loss: 0.708905, acc: 0.537109] [A loss: 0.834755, acc: 0.234375]\n",
"5282: [D loss: 0.700194, acc: 0.500000] [A loss: 0.784155, acc: 0.285156]\n",
"5283: [D loss: 0.696996, acc: 0.550781] [A loss: 0.798196, acc: 0.332031]\n",
"5284: [D loss: 0.710433, acc: 0.515625] [A loss: 0.848104, acc: 0.222656]\n",
"5285: [D loss: 0.688978, acc: 0.550781] [A loss: 0.758948, acc: 0.382812]\n",
"5286: [D loss: 0.721291, acc: 0.523438] [A loss: 0.965347, acc: 0.105469]\n",
"5287: [D loss: 0.703654, acc: 0.496094] [A loss: 0.724270, acc: 0.441406]\n",
"5288: [D loss: 0.714249, acc: 0.517578] [A loss: 0.945317, acc: 0.128906]\n",
"5289: [D loss: 0.685288, acc: 0.566406] [A loss: 0.763205, acc: 0.406250]\n",
"5290: [D loss: 0.703022, acc: 0.542969] [A loss: 0.872769, acc: 0.207031]\n",
"5291: [D loss: 0.696152, acc: 0.548828] [A loss: 0.761045, acc: 0.382812]\n",
"5292: [D loss: 0.700206, acc: 0.548828] [A loss: 0.914743, acc: 0.171875]\n",
"5293: [D loss: 0.695897, acc: 0.548828] [A loss: 0.749327, acc: 0.398438]\n",
"5294: [D loss: 0.707247, acc: 0.501953] [A loss: 0.878386, acc: 0.171875]\n",
"5295: [D loss: 0.716393, acc: 0.490234] [A loss: 0.764779, acc: 0.378906]\n",
"5296: [D loss: 0.719259, acc: 0.494141] [A loss: 0.921786, acc: 0.156250]\n",
"5297: [D loss: 0.693491, acc: 0.523438] [A loss: 0.737031, acc: 0.421875]\n",
"5298: [D loss: 0.713341, acc: 0.535156] [A loss: 0.868234, acc: 0.187500]\n",
"5299: [D loss: 0.690957, acc: 0.533203] [A loss: 0.833534, acc: 0.214844]\n",
"5300: [D loss: 0.714885, acc: 0.488281] [A loss: 0.807515, acc: 0.273438]\n",
"5301: [D loss: 0.692091, acc: 0.523438] [A loss: 0.837736, acc: 0.230469]\n",
"5302: [D loss: 0.690747, acc: 0.550781] [A loss: 0.764470, acc: 0.375000]\n",
"5303: [D loss: 0.702138, acc: 0.515625] [A loss: 0.950646, acc: 0.085938]\n",
"5304: [D loss: 0.706470, acc: 0.503906] [A loss: 0.650422, acc: 0.628906]\n",
"5305: [D loss: 0.724877, acc: 0.511719] [A loss: 0.976375, acc: 0.093750]\n",
"5306: [D loss: 0.705134, acc: 0.501953] [A loss: 0.708919, acc: 0.492188]\n",
"5307: [D loss: 0.705656, acc: 0.529297] [A loss: 0.968237, acc: 0.101562]\n",
"5308: [D loss: 0.698409, acc: 0.500000] [A loss: 0.750811, acc: 0.386719]\n",
"5309: [D loss: 0.719728, acc: 0.525391] [A loss: 0.935883, acc: 0.109375]\n",
"5310: [D loss: 0.704369, acc: 0.494141] [A loss: 0.734016, acc: 0.390625]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5311: [D loss: 0.714423, acc: 0.507812] [A loss: 0.862040, acc: 0.152344]\n",
"5312: [D loss: 0.704217, acc: 0.523438] [A loss: 0.762023, acc: 0.324219]\n",
"5313: [D loss: 0.706867, acc: 0.517578] [A loss: 0.842518, acc: 0.171875]\n",
"5314: [D loss: 0.690897, acc: 0.525391] [A loss: 0.795588, acc: 0.300781]\n",
"5315: [D loss: 0.699096, acc: 0.541016] [A loss: 0.807524, acc: 0.261719]\n",
"5316: [D loss: 0.711771, acc: 0.531250] [A loss: 0.867073, acc: 0.179688]\n",
"5317: [D loss: 0.698500, acc: 0.519531] [A loss: 0.883327, acc: 0.128906]\n",
"5318: [D loss: 0.686622, acc: 0.560547] [A loss: 0.779626, acc: 0.308594]\n",
"5319: [D loss: 0.694586, acc: 0.503906] [A loss: 0.884804, acc: 0.167969]\n",
"5320: [D loss: 0.694253, acc: 0.535156] [A loss: 0.764277, acc: 0.375000]\n",
"5321: [D loss: 0.704152, acc: 0.517578] [A loss: 0.896830, acc: 0.164062]\n",
"5322: [D loss: 0.689998, acc: 0.544922] [A loss: 0.728901, acc: 0.453125]\n",
"5323: [D loss: 0.709803, acc: 0.525391] [A loss: 0.914588, acc: 0.160156]\n",
"5324: [D loss: 0.678503, acc: 0.550781] [A loss: 0.719218, acc: 0.468750]\n",
"5325: [D loss: 0.719726, acc: 0.509766] [A loss: 1.012421, acc: 0.074219]\n",
"5326: [D loss: 0.702336, acc: 0.535156] [A loss: 0.706824, acc: 0.492188]\n",
"5327: [D loss: 0.726104, acc: 0.494141] [A loss: 0.914563, acc: 0.148438]\n",
"5328: [D loss: 0.705723, acc: 0.500000] [A loss: 0.742710, acc: 0.410156]\n",
"5329: [D loss: 0.706722, acc: 0.496094] [A loss: 0.818293, acc: 0.281250]\n",
"5330: [D loss: 0.694766, acc: 0.539062] [A loss: 0.789310, acc: 0.300781]\n",
"5331: [D loss: 0.705818, acc: 0.533203] [A loss: 0.886050, acc: 0.187500]\n",
"5332: [D loss: 0.687069, acc: 0.548828] [A loss: 0.764473, acc: 0.335938]\n",
"5333: [D loss: 0.702233, acc: 0.509766] [A loss: 0.900264, acc: 0.128906]\n",
"5334: [D loss: 0.712610, acc: 0.498047] [A loss: 0.801341, acc: 0.304688]\n",
"5335: [D loss: 0.695423, acc: 0.539062] [A loss: 0.803529, acc: 0.304688]\n",
"5336: [D loss: 0.695390, acc: 0.554688] [A loss: 0.825222, acc: 0.230469]\n",
"5337: [D loss: 0.696713, acc: 0.507812] [A loss: 0.823679, acc: 0.214844]\n",
"5338: [D loss: 0.703273, acc: 0.529297] [A loss: 0.852336, acc: 0.218750]\n",
"5339: [D loss: 0.706130, acc: 0.511719] [A loss: 0.798377, acc: 0.312500]\n",
"5340: [D loss: 0.706932, acc: 0.521484] [A loss: 0.818947, acc: 0.292969]\n",
"5341: [D loss: 0.698407, acc: 0.533203] [A loss: 0.808257, acc: 0.292969]\n",
"5342: [D loss: 0.696889, acc: 0.517578] [A loss: 0.837675, acc: 0.238281]\n",
"5343: [D loss: 0.719014, acc: 0.498047] [A loss: 0.907321, acc: 0.156250]\n",
"5344: [D loss: 0.689140, acc: 0.537109] [A loss: 0.709397, acc: 0.464844]\n",
"5345: [D loss: 0.705500, acc: 0.541016] [A loss: 0.922836, acc: 0.152344]\n",
"5346: [D loss: 0.711841, acc: 0.484375] [A loss: 0.684481, acc: 0.531250]\n",
"5347: [D loss: 0.705436, acc: 0.531250] [A loss: 0.950302, acc: 0.132812]\n",
"5348: [D loss: 0.702406, acc: 0.515625] [A loss: 0.711376, acc: 0.468750]\n",
"5349: [D loss: 0.738667, acc: 0.482422] [A loss: 0.942644, acc: 0.242188]\n",
"5350: [D loss: 0.691463, acc: 0.552734] [A loss: 0.954223, acc: 0.132812]\n",
"5351: [D loss: 0.714522, acc: 0.482422] [A loss: 0.800761, acc: 0.296875]\n",
"5352: [D loss: 0.716822, acc: 0.513672] [A loss: 1.045325, acc: 0.062500]\n",
"5353: [D loss: 0.708768, acc: 0.515625] [A loss: 0.707800, acc: 0.492188]\n",
"5354: [D loss: 0.709217, acc: 0.505859] [A loss: 0.873877, acc: 0.152344]\n",
"5355: [D loss: 0.701441, acc: 0.537109] [A loss: 0.717869, acc: 0.492188]\n",
"5356: [D loss: 0.708822, acc: 0.523438] [A loss: 0.963422, acc: 0.097656]\n",
"5357: [D loss: 0.695911, acc: 0.517578] [A loss: 0.726925, acc: 0.457031]\n",
"5358: [D loss: 0.714750, acc: 0.525391] [A loss: 0.789988, acc: 0.347656]\n",
"5359: [D loss: 0.704695, acc: 0.525391] [A loss: 0.853978, acc: 0.214844]\n",
"5360: [D loss: 0.707544, acc: 0.486328] [A loss: 0.758047, acc: 0.390625]\n",
"5361: [D loss: 0.719290, acc: 0.513672] [A loss: 0.861863, acc: 0.246094]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5362: [D loss: 0.708157, acc: 0.466797] [A loss: 0.751007, acc: 0.390625]\n",
"5363: [D loss: 0.697196, acc: 0.535156] [A loss: 0.805561, acc: 0.292969]\n",
"5364: [D loss: 0.702217, acc: 0.523438] [A loss: 0.790639, acc: 0.300781]\n",
"5365: [D loss: 0.705091, acc: 0.488281] [A loss: 0.847317, acc: 0.218750]\n",
"5366: [D loss: 0.687770, acc: 0.537109] [A loss: 0.808008, acc: 0.265625]\n",
"5367: [D loss: 0.697677, acc: 0.537109] [A loss: 0.796985, acc: 0.281250]\n",
"5368: [D loss: 0.718615, acc: 0.505859] [A loss: 0.938386, acc: 0.121094]\n",
"5369: [D loss: 0.687910, acc: 0.542969] [A loss: 0.741264, acc: 0.425781]\n",
"5370: [D loss: 0.722456, acc: 0.500000] [A loss: 0.887450, acc: 0.140625]\n",
"5371: [D loss: 0.700922, acc: 0.513672] [A loss: 0.727814, acc: 0.457031]\n",
"5372: [D loss: 0.698222, acc: 0.525391] [A loss: 0.933264, acc: 0.093750]\n",
"5373: [D loss: 0.679795, acc: 0.562500] [A loss: 0.711249, acc: 0.468750]\n",
"5374: [D loss: 0.712707, acc: 0.535156] [A loss: 0.879866, acc: 0.171875]\n",
"5375: [D loss: 0.698293, acc: 0.509766] [A loss: 0.756076, acc: 0.414062]\n",
"5376: [D loss: 0.723948, acc: 0.488281] [A loss: 0.845304, acc: 0.222656]\n",
"5377: [D loss: 0.688419, acc: 0.539062] [A loss: 0.794378, acc: 0.265625]\n",
"5378: [D loss: 0.712131, acc: 0.517578] [A loss: 0.839546, acc: 0.238281]\n",
"5379: [D loss: 0.692208, acc: 0.523438] [A loss: 0.782427, acc: 0.320312]\n",
"5380: [D loss: 0.707802, acc: 0.513672] [A loss: 0.888480, acc: 0.175781]\n",
"5381: [D loss: 0.707894, acc: 0.500000] [A loss: 0.778849, acc: 0.363281]\n",
"5382: [D loss: 0.719605, acc: 0.470703] [A loss: 0.883425, acc: 0.140625]\n",
"5383: [D loss: 0.691216, acc: 0.552734] [A loss: 0.694820, acc: 0.554688]\n",
"5384: [D loss: 0.722082, acc: 0.511719] [A loss: 0.883875, acc: 0.171875]\n",
"5385: [D loss: 0.698098, acc: 0.539062] [A loss: 0.772612, acc: 0.335938]\n",
"5386: [D loss: 0.701991, acc: 0.529297] [A loss: 0.927270, acc: 0.144531]\n",
"5387: [D loss: 0.705980, acc: 0.480469] [A loss: 0.708572, acc: 0.503906]\n",
"5388: [D loss: 0.717940, acc: 0.500000] [A loss: 0.952155, acc: 0.113281]\n",
"5389: [D loss: 0.698622, acc: 0.511719] [A loss: 0.717387, acc: 0.488281]\n",
"5390: [D loss: 0.709778, acc: 0.500000] [A loss: 0.807564, acc: 0.250000]\n",
"5391: [D loss: 0.700436, acc: 0.523438] [A loss: 0.864711, acc: 0.191406]\n",
"5392: [D loss: 0.698586, acc: 0.505859] [A loss: 0.750871, acc: 0.378906]\n",
"5393: [D loss: 0.706874, acc: 0.544922] [A loss: 0.920107, acc: 0.144531]\n",
"5394: [D loss: 0.685461, acc: 0.552734] [A loss: 0.718768, acc: 0.464844]\n",
"5395: [D loss: 0.721675, acc: 0.517578] [A loss: 0.907591, acc: 0.117188]\n",
"5396: [D loss: 0.715374, acc: 0.500000] [A loss: 0.731416, acc: 0.449219]\n",
"5397: [D loss: 0.740474, acc: 0.462891] [A loss: 0.854081, acc: 0.195312]\n",
"5398: [D loss: 0.697904, acc: 0.523438] [A loss: 0.716771, acc: 0.488281]\n",
"5399: [D loss: 0.727676, acc: 0.511719] [A loss: 1.044580, acc: 0.054688]\n",
"5400: [D loss: 0.718266, acc: 0.498047] [A loss: 0.705472, acc: 0.472656]\n",
"5401: [D loss: 0.708151, acc: 0.515625] [A loss: 0.821708, acc: 0.234375]\n",
"5402: [D loss: 0.706755, acc: 0.525391] [A loss: 0.743482, acc: 0.402344]\n",
"5403: [D loss: 0.708700, acc: 0.498047] [A loss: 0.774172, acc: 0.328125]\n",
"5404: [D loss: 0.693831, acc: 0.531250] [A loss: 0.822192, acc: 0.253906]\n",
"5405: [D loss: 0.680981, acc: 0.556641] [A loss: 0.764605, acc: 0.359375]\n",
"5406: [D loss: 0.712234, acc: 0.517578] [A loss: 0.857862, acc: 0.195312]\n",
"5407: [D loss: 0.701406, acc: 0.537109] [A loss: 0.760240, acc: 0.382812]\n",
"5408: [D loss: 0.706270, acc: 0.535156] [A loss: 0.893041, acc: 0.167969]\n",
"5409: [D loss: 0.698675, acc: 0.533203] [A loss: 0.739824, acc: 0.437500]\n",
"5410: [D loss: 0.705912, acc: 0.537109] [A loss: 0.945111, acc: 0.125000]\n",
"5411: [D loss: 0.702353, acc: 0.511719] [A loss: 0.726940, acc: 0.449219]\n",
"5412: [D loss: 0.709016, acc: 0.523438] [A loss: 0.869764, acc: 0.171875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5413: [D loss: 0.701672, acc: 0.523438] [A loss: 0.750288, acc: 0.367188]\n",
"5414: [D loss: 0.708656, acc: 0.515625] [A loss: 0.890746, acc: 0.164062]\n",
"5415: [D loss: 0.701361, acc: 0.498047] [A loss: 0.745473, acc: 0.375000]\n",
"5416: [D loss: 0.711467, acc: 0.523438] [A loss: 0.829826, acc: 0.269531]\n",
"5417: [D loss: 0.702351, acc: 0.521484] [A loss: 0.797981, acc: 0.296875]\n",
"5418: [D loss: 0.702477, acc: 0.507812] [A loss: 0.798304, acc: 0.289062]\n",
"5419: [D loss: 0.712363, acc: 0.523438] [A loss: 0.850030, acc: 0.207031]\n",
"5420: [D loss: 0.703960, acc: 0.498047] [A loss: 0.877345, acc: 0.214844]\n",
"5421: [D loss: 0.694074, acc: 0.548828] [A loss: 0.737678, acc: 0.445312]\n",
"5422: [D loss: 0.708131, acc: 0.517578] [A loss: 0.858919, acc: 0.175781]\n",
"5423: [D loss: 0.689845, acc: 0.533203] [A loss: 0.820784, acc: 0.281250]\n",
"5424: [D loss: 0.696442, acc: 0.529297] [A loss: 0.810151, acc: 0.277344]\n",
"5425: [D loss: 0.702118, acc: 0.498047] [A loss: 0.722805, acc: 0.472656]\n",
"5426: [D loss: 0.733994, acc: 0.492188] [A loss: 0.920001, acc: 0.113281]\n",
"5427: [D loss: 0.694202, acc: 0.544922] [A loss: 0.784612, acc: 0.312500]\n",
"5428: [D loss: 0.714371, acc: 0.513672] [A loss: 0.850436, acc: 0.218750]\n",
"5429: [D loss: 0.687849, acc: 0.548828] [A loss: 0.773212, acc: 0.347656]\n",
"5430: [D loss: 0.715156, acc: 0.494141] [A loss: 0.926359, acc: 0.128906]\n",
"5431: [D loss: 0.710458, acc: 0.507812] [A loss: 0.737109, acc: 0.410156]\n",
"5432: [D loss: 0.707469, acc: 0.527344] [A loss: 0.807940, acc: 0.308594]\n",
"5433: [D loss: 0.698466, acc: 0.529297] [A loss: 0.772580, acc: 0.343750]\n",
"5434: [D loss: 0.710238, acc: 0.517578] [A loss: 0.842899, acc: 0.167969]\n",
"5435: [D loss: 0.697512, acc: 0.535156] [A loss: 0.828845, acc: 0.234375]\n",
"5436: [D loss: 0.690400, acc: 0.539062] [A loss: 0.942348, acc: 0.101562]\n",
"5437: [D loss: 0.701100, acc: 0.498047] [A loss: 0.701758, acc: 0.500000]\n",
"5438: [D loss: 0.720774, acc: 0.503906] [A loss: 0.919841, acc: 0.144531]\n",
"5439: [D loss: 0.705030, acc: 0.501953] [A loss: 0.737853, acc: 0.398438]\n",
"5440: [D loss: 0.710848, acc: 0.517578] [A loss: 0.846383, acc: 0.187500]\n",
"5441: [D loss: 0.693243, acc: 0.527344] [A loss: 0.770222, acc: 0.316406]\n",
"5442: [D loss: 0.704027, acc: 0.529297] [A loss: 0.840609, acc: 0.226562]\n",
"5443: [D loss: 0.704355, acc: 0.515625] [A loss: 0.848346, acc: 0.183594]\n",
"5444: [D loss: 0.705823, acc: 0.496094] [A loss: 0.782514, acc: 0.332031]\n",
"5445: [D loss: 0.711053, acc: 0.511719] [A loss: 0.884207, acc: 0.136719]\n",
"5446: [D loss: 0.697476, acc: 0.525391] [A loss: 0.739460, acc: 0.414062]\n",
"5447: [D loss: 0.710629, acc: 0.500000] [A loss: 0.897317, acc: 0.195312]\n",
"5448: [D loss: 0.700893, acc: 0.519531] [A loss: 0.741112, acc: 0.425781]\n",
"5449: [D loss: 0.706561, acc: 0.515625] [A loss: 0.938826, acc: 0.125000]\n",
"5450: [D loss: 0.690272, acc: 0.539062] [A loss: 0.701128, acc: 0.503906]\n",
"5451: [D loss: 0.720450, acc: 0.505859] [A loss: 0.882921, acc: 0.191406]\n",
"5452: [D loss: 0.703905, acc: 0.523438] [A loss: 0.739507, acc: 0.453125]\n",
"5453: [D loss: 0.713388, acc: 0.521484] [A loss: 0.974881, acc: 0.105469]\n",
"5454: [D loss: 0.688445, acc: 0.544922] [A loss: 0.740890, acc: 0.402344]\n",
"5455: [D loss: 0.711492, acc: 0.513672] [A loss: 0.843860, acc: 0.246094]\n",
"5456: [D loss: 0.701665, acc: 0.511719] [A loss: 0.806528, acc: 0.265625]\n",
"5457: [D loss: 0.703021, acc: 0.517578] [A loss: 0.932358, acc: 0.128906]\n",
"5458: [D loss: 0.693990, acc: 0.517578] [A loss: 0.682320, acc: 0.550781]\n",
"5459: [D loss: 0.720270, acc: 0.517578] [A loss: 0.944548, acc: 0.093750]\n",
"5460: [D loss: 0.686437, acc: 0.513672] [A loss: 0.739777, acc: 0.437500]\n",
"5461: [D loss: 0.729305, acc: 0.500000] [A loss: 1.088428, acc: 0.078125]\n",
"5462: [D loss: 0.690734, acc: 0.539062] [A loss: 0.676710, acc: 0.550781]\n",
"5463: [D loss: 0.739522, acc: 0.482422] [A loss: 0.783272, acc: 0.320312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5464: [D loss: 0.711437, acc: 0.490234] [A loss: 0.853388, acc: 0.207031]\n",
"5465: [D loss: 0.706587, acc: 0.517578] [A loss: 0.787397, acc: 0.292969]\n",
"5466: [D loss: 0.713860, acc: 0.519531] [A loss: 0.842129, acc: 0.218750]\n",
"5467: [D loss: 0.705294, acc: 0.496094] [A loss: 0.726037, acc: 0.445312]\n",
"5468: [D loss: 0.706707, acc: 0.519531] [A loss: 0.861568, acc: 0.187500]\n",
"5469: [D loss: 0.720133, acc: 0.457031] [A loss: 0.770531, acc: 0.328125]\n",
"5470: [D loss: 0.712806, acc: 0.496094] [A loss: 0.783765, acc: 0.296875]\n",
"5471: [D loss: 0.698479, acc: 0.525391] [A loss: 0.859656, acc: 0.199219]\n",
"5472: [D loss: 0.704839, acc: 0.503906] [A loss: 0.884554, acc: 0.183594]\n",
"5473: [D loss: 0.696388, acc: 0.548828] [A loss: 0.710480, acc: 0.453125]\n",
"5474: [D loss: 0.726697, acc: 0.513672] [A loss: 0.994488, acc: 0.082031]\n",
"5475: [D loss: 0.697784, acc: 0.511719] [A loss: 0.693110, acc: 0.562500]\n",
"5476: [D loss: 0.707754, acc: 0.517578] [A loss: 0.802979, acc: 0.289062]\n",
"5477: [D loss: 0.703597, acc: 0.496094] [A loss: 0.817387, acc: 0.265625]\n",
"5478: [D loss: 0.697142, acc: 0.550781] [A loss: 0.824709, acc: 0.253906]\n",
"5479: [D loss: 0.713327, acc: 0.466797] [A loss: 0.732335, acc: 0.445312]\n",
"5480: [D loss: 0.704305, acc: 0.533203] [A loss: 0.889817, acc: 0.175781]\n",
"5481: [D loss: 0.690268, acc: 0.560547] [A loss: 0.688907, acc: 0.515625]\n",
"5482: [D loss: 0.713288, acc: 0.511719] [A loss: 0.872677, acc: 0.183594]\n",
"5483: [D loss: 0.711730, acc: 0.501953] [A loss: 0.733696, acc: 0.433594]\n",
"5484: [D loss: 0.725581, acc: 0.451172] [A loss: 0.821533, acc: 0.214844]\n",
"5485: [D loss: 0.704797, acc: 0.535156] [A loss: 0.840719, acc: 0.218750]\n",
"5486: [D loss: 0.676956, acc: 0.576172] [A loss: 0.785022, acc: 0.343750]\n",
"5487: [D loss: 0.705027, acc: 0.500000] [A loss: 0.792351, acc: 0.285156]\n",
"5488: [D loss: 0.694135, acc: 0.533203] [A loss: 0.846039, acc: 0.203125]\n",
"5489: [D loss: 0.700696, acc: 0.515625] [A loss: 0.770187, acc: 0.339844]\n",
"5490: [D loss: 0.709033, acc: 0.509766] [A loss: 0.955019, acc: 0.078125]\n",
"5491: [D loss: 0.711158, acc: 0.494141] [A loss: 0.669643, acc: 0.582031]\n",
"5492: [D loss: 0.729705, acc: 0.490234] [A loss: 0.902478, acc: 0.167969]\n",
"5493: [D loss: 0.701807, acc: 0.535156] [A loss: 0.691024, acc: 0.527344]\n",
"5494: [D loss: 0.722317, acc: 0.503906] [A loss: 0.829810, acc: 0.289062]\n",
"5495: [D loss: 0.709171, acc: 0.503906] [A loss: 0.830915, acc: 0.218750]\n",
"5496: [D loss: 0.698105, acc: 0.525391] [A loss: 0.940081, acc: 0.164062]\n",
"5497: [D loss: 0.693480, acc: 0.542969] [A loss: 0.693848, acc: 0.511719]\n",
"5498: [D loss: 0.715564, acc: 0.513672] [A loss: 0.929913, acc: 0.121094]\n",
"5499: [D loss: 0.698623, acc: 0.503906] [A loss: 0.739452, acc: 0.410156]\n",
"5500: [D loss: 0.710245, acc: 0.521484] [A loss: 0.823378, acc: 0.269531]\n",
"5501: [D loss: 0.699267, acc: 0.525391] [A loss: 0.767085, acc: 0.363281]\n",
"5502: [D loss: 0.706364, acc: 0.513672] [A loss: 0.837883, acc: 0.238281]\n",
"5503: [D loss: 0.695634, acc: 0.529297] [A loss: 0.828731, acc: 0.226562]\n",
"5504: [D loss: 0.724091, acc: 0.476562] [A loss: 0.798896, acc: 0.300781]\n",
"5505: [D loss: 0.700481, acc: 0.533203] [A loss: 0.897147, acc: 0.171875]\n",
"5506: [D loss: 0.693675, acc: 0.537109] [A loss: 0.747037, acc: 0.394531]\n",
"5507: [D loss: 0.703334, acc: 0.539062] [A loss: 0.791814, acc: 0.316406]\n",
"5508: [D loss: 0.700490, acc: 0.517578] [A loss: 0.801989, acc: 0.277344]\n",
"5509: [D loss: 0.699982, acc: 0.546875] [A loss: 0.808471, acc: 0.277344]\n",
"5510: [D loss: 0.705747, acc: 0.501953] [A loss: 0.830094, acc: 0.222656]\n",
"5511: [D loss: 0.703439, acc: 0.519531] [A loss: 0.823117, acc: 0.261719]\n",
"5512: [D loss: 0.703456, acc: 0.494141] [A loss: 0.774050, acc: 0.351562]\n",
"5513: [D loss: 0.713690, acc: 0.501953] [A loss: 0.893171, acc: 0.117188]\n",
"5514: [D loss: 0.704295, acc: 0.507812] [A loss: 0.794067, acc: 0.308594]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5515: [D loss: 0.701533, acc: 0.505859] [A loss: 0.775507, acc: 0.355469]\n",
"5516: [D loss: 0.708421, acc: 0.523438] [A loss: 0.854797, acc: 0.183594]\n",
"5517: [D loss: 0.706919, acc: 0.507812] [A loss: 0.791576, acc: 0.281250]\n",
"5518: [D loss: 0.693200, acc: 0.546875] [A loss: 0.809254, acc: 0.261719]\n",
"5519: [D loss: 0.702538, acc: 0.500000] [A loss: 0.832339, acc: 0.210938]\n",
"5520: [D loss: 0.684163, acc: 0.550781] [A loss: 0.849942, acc: 0.195312]\n",
"5521: [D loss: 0.686244, acc: 0.560547] [A loss: 0.798703, acc: 0.312500]\n",
"5522: [D loss: 0.700064, acc: 0.533203] [A loss: 0.863871, acc: 0.183594]\n",
"5523: [D loss: 0.704411, acc: 0.521484] [A loss: 0.862983, acc: 0.203125]\n",
"5524: [D loss: 0.704136, acc: 0.507812] [A loss: 0.838862, acc: 0.214844]\n",
"5525: [D loss: 0.687838, acc: 0.548828] [A loss: 0.801493, acc: 0.289062]\n",
"5526: [D loss: 0.697773, acc: 0.507812] [A loss: 0.846493, acc: 0.199219]\n",
"5527: [D loss: 0.694338, acc: 0.562500] [A loss: 0.740080, acc: 0.425781]\n",
"5528: [D loss: 0.720157, acc: 0.511719] [A loss: 0.983400, acc: 0.058594]\n",
"5529: [D loss: 0.709347, acc: 0.494141] [A loss: 0.722018, acc: 0.480469]\n",
"5530: [D loss: 0.713420, acc: 0.527344] [A loss: 0.889735, acc: 0.175781]\n",
"5531: [D loss: 0.694863, acc: 0.529297] [A loss: 0.710997, acc: 0.464844]\n",
"5532: [D loss: 0.732970, acc: 0.503906] [A loss: 1.046876, acc: 0.101562]\n",
"5533: [D loss: 0.709220, acc: 0.486328] [A loss: 0.701461, acc: 0.511719]\n",
"5534: [D loss: 0.726705, acc: 0.539062] [A loss: 0.921119, acc: 0.128906]\n",
"5535: [D loss: 0.697144, acc: 0.511719] [A loss: 0.697468, acc: 0.503906]\n",
"5536: [D loss: 0.717015, acc: 0.496094] [A loss: 0.896149, acc: 0.136719]\n",
"5537: [D loss: 0.707108, acc: 0.484375] [A loss: 0.737316, acc: 0.417969]\n",
"5538: [D loss: 0.720706, acc: 0.478516] [A loss: 0.862342, acc: 0.175781]\n",
"5539: [D loss: 0.693343, acc: 0.519531] [A loss: 0.771149, acc: 0.363281]\n",
"5540: [D loss: 0.705803, acc: 0.515625] [A loss: 0.842722, acc: 0.222656]\n",
"5541: [D loss: 0.716786, acc: 0.484375] [A loss: 0.798195, acc: 0.343750]\n",
"5542: [D loss: 0.712661, acc: 0.496094] [A loss: 0.914254, acc: 0.113281]\n",
"5543: [D loss: 0.715647, acc: 0.490234] [A loss: 0.741171, acc: 0.390625]\n",
"5544: [D loss: 0.702352, acc: 0.509766] [A loss: 0.833647, acc: 0.285156]\n",
"5545: [D loss: 0.692624, acc: 0.546875] [A loss: 0.758787, acc: 0.371094]\n",
"5546: [D loss: 0.715588, acc: 0.482422] [A loss: 0.929077, acc: 0.097656]\n",
"5547: [D loss: 0.697839, acc: 0.498047] [A loss: 0.732378, acc: 0.457031]\n",
"5548: [D loss: 0.698290, acc: 0.529297] [A loss: 0.817368, acc: 0.265625]\n",
"5549: [D loss: 0.696668, acc: 0.548828] [A loss: 0.756911, acc: 0.394531]\n",
"5550: [D loss: 0.718851, acc: 0.500000] [A loss: 1.030722, acc: 0.074219]\n",
"5551: [D loss: 0.693274, acc: 0.523438] [A loss: 0.711783, acc: 0.503906]\n",
"5552: [D loss: 0.710430, acc: 0.548828] [A loss: 0.876471, acc: 0.199219]\n",
"5553: [D loss: 0.692385, acc: 0.515625] [A loss: 0.736924, acc: 0.421875]\n",
"5554: [D loss: 0.713291, acc: 0.492188] [A loss: 0.850884, acc: 0.242188]\n",
"5555: [D loss: 0.703322, acc: 0.527344] [A loss: 0.820554, acc: 0.230469]\n",
"5556: [D loss: 0.689241, acc: 0.554688] [A loss: 0.803198, acc: 0.281250]\n",
"5557: [D loss: 0.711060, acc: 0.482422] [A loss: 0.804521, acc: 0.273438]\n",
"5558: [D loss: 0.715024, acc: 0.496094] [A loss: 0.933135, acc: 0.105469]\n",
"5559: [D loss: 0.694086, acc: 0.533203] [A loss: 0.709765, acc: 0.449219]\n",
"5560: [D loss: 0.702236, acc: 0.562500] [A loss: 0.968531, acc: 0.117188]\n",
"5561: [D loss: 0.697618, acc: 0.535156] [A loss: 0.775167, acc: 0.304688]\n",
"5562: [D loss: 0.704970, acc: 0.501953] [A loss: 0.883841, acc: 0.183594]\n",
"5563: [D loss: 0.697829, acc: 0.519531] [A loss: 0.755353, acc: 0.382812]\n",
"5564: [D loss: 0.713731, acc: 0.507812] [A loss: 0.883798, acc: 0.171875]\n",
"5565: [D loss: 0.706382, acc: 0.482422] [A loss: 0.727795, acc: 0.421875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5566: [D loss: 0.702347, acc: 0.505859] [A loss: 0.919409, acc: 0.136719]\n",
"5567: [D loss: 0.693239, acc: 0.533203] [A loss: 0.759266, acc: 0.359375]\n",
"5568: [D loss: 0.717136, acc: 0.496094] [A loss: 0.972493, acc: 0.113281]\n",
"5569: [D loss: 0.708045, acc: 0.519531] [A loss: 0.763259, acc: 0.347656]\n",
"5570: [D loss: 0.700603, acc: 0.529297] [A loss: 0.839772, acc: 0.218750]\n",
"5571: [D loss: 0.698929, acc: 0.523438] [A loss: 0.806005, acc: 0.277344]\n",
"5572: [D loss: 0.692752, acc: 0.523438] [A loss: 0.782414, acc: 0.316406]\n",
"5573: [D loss: 0.718475, acc: 0.509766] [A loss: 0.898912, acc: 0.167969]\n",
"5574: [D loss: 0.708414, acc: 0.501953] [A loss: 0.766888, acc: 0.339844]\n",
"5575: [D loss: 0.703025, acc: 0.511719] [A loss: 0.808101, acc: 0.250000]\n",
"5576: [D loss: 0.698577, acc: 0.537109] [A loss: 0.796344, acc: 0.304688]\n",
"5577: [D loss: 0.706242, acc: 0.529297] [A loss: 0.869848, acc: 0.175781]\n",
"5578: [D loss: 0.702392, acc: 0.505859] [A loss: 0.685000, acc: 0.531250]\n",
"5579: [D loss: 0.732490, acc: 0.496094] [A loss: 0.906596, acc: 0.128906]\n",
"5580: [D loss: 0.715487, acc: 0.472656] [A loss: 0.761199, acc: 0.382812]\n",
"5581: [D loss: 0.700025, acc: 0.539062] [A loss: 0.942715, acc: 0.136719]\n",
"5582: [D loss: 0.707589, acc: 0.500000] [A loss: 0.749897, acc: 0.375000]\n",
"5583: [D loss: 0.717239, acc: 0.480469] [A loss: 0.862356, acc: 0.214844]\n",
"5584: [D loss: 0.706812, acc: 0.505859] [A loss: 0.781043, acc: 0.320312]\n",
"5585: [D loss: 0.707792, acc: 0.500000] [A loss: 0.835433, acc: 0.253906]\n",
"5586: [D loss: 0.701592, acc: 0.515625] [A loss: 0.769358, acc: 0.394531]\n",
"5587: [D loss: 0.706966, acc: 0.498047] [A loss: 0.919356, acc: 0.140625]\n",
"5588: [D loss: 0.690942, acc: 0.519531] [A loss: 0.739563, acc: 0.394531]\n",
"5589: [D loss: 0.701306, acc: 0.527344] [A loss: 0.840351, acc: 0.222656]\n",
"5590: [D loss: 0.697258, acc: 0.517578] [A loss: 0.737163, acc: 0.429688]\n",
"5591: [D loss: 0.699909, acc: 0.533203] [A loss: 0.872642, acc: 0.203125]\n",
"5592: [D loss: 0.711841, acc: 0.492188] [A loss: 0.776914, acc: 0.300781]\n",
"5593: [D loss: 0.716838, acc: 0.507812] [A loss: 0.926775, acc: 0.085938]\n",
"5594: [D loss: 0.703894, acc: 0.492188] [A loss: 0.772239, acc: 0.355469]\n",
"5595: [D loss: 0.702953, acc: 0.531250] [A loss: 0.907421, acc: 0.093750]\n",
"5596: [D loss: 0.696724, acc: 0.492188] [A loss: 0.695249, acc: 0.539062]\n",
"5597: [D loss: 0.730435, acc: 0.492188] [A loss: 0.924912, acc: 0.105469]\n",
"5598: [D loss: 0.696428, acc: 0.523438] [A loss: 0.729926, acc: 0.460938]\n",
"5599: [D loss: 0.708812, acc: 0.507812] [A loss: 0.810185, acc: 0.273438]\n",
"5600: [D loss: 0.706514, acc: 0.500000] [A loss: 0.813470, acc: 0.261719]\n",
"5601: [D loss: 0.702647, acc: 0.498047] [A loss: 0.831346, acc: 0.214844]\n",
"5602: [D loss: 0.699646, acc: 0.507812] [A loss: 0.756164, acc: 0.332031]\n",
"5603: [D loss: 0.707940, acc: 0.513672] [A loss: 0.826226, acc: 0.238281]\n",
"5604: [D loss: 0.692049, acc: 0.544922] [A loss: 0.880192, acc: 0.156250]\n",
"5605: [D loss: 0.690831, acc: 0.531250] [A loss: 0.728769, acc: 0.468750]\n",
"5606: [D loss: 0.710649, acc: 0.498047] [A loss: 0.890817, acc: 0.125000]\n",
"5607: [D loss: 0.713648, acc: 0.507812] [A loss: 0.760503, acc: 0.355469]\n",
"5608: [D loss: 0.709686, acc: 0.503906] [A loss: 0.850108, acc: 0.156250]\n",
"5609: [D loss: 0.703655, acc: 0.496094] [A loss: 0.746273, acc: 0.417969]\n",
"5610: [D loss: 0.708948, acc: 0.507812] [A loss: 0.954671, acc: 0.121094]\n",
"5611: [D loss: 0.701095, acc: 0.523438] [A loss: 0.661255, acc: 0.578125]\n",
"5612: [D loss: 0.716421, acc: 0.515625] [A loss: 0.895230, acc: 0.179688]\n",
"5613: [D loss: 0.704452, acc: 0.505859] [A loss: 0.723376, acc: 0.476562]\n",
"5614: [D loss: 0.703228, acc: 0.517578] [A loss: 0.779245, acc: 0.265625]\n",
"5615: [D loss: 0.693214, acc: 0.539062] [A loss: 0.815657, acc: 0.277344]\n",
"5616: [D loss: 0.709008, acc: 0.511719] [A loss: 0.897975, acc: 0.156250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5617: [D loss: 0.708819, acc: 0.478516] [A loss: 0.738415, acc: 0.414062]\n",
"5618: [D loss: 0.714334, acc: 0.515625] [A loss: 0.944726, acc: 0.093750]\n",
"5619: [D loss: 0.697417, acc: 0.509766] [A loss: 0.717677, acc: 0.480469]\n",
"5620: [D loss: 0.694490, acc: 0.541016] [A loss: 0.860187, acc: 0.222656]\n",
"5621: [D loss: 0.700399, acc: 0.511719] [A loss: 0.802373, acc: 0.289062]\n",
"5622: [D loss: 0.716024, acc: 0.507812] [A loss: 0.792364, acc: 0.273438]\n",
"5623: [D loss: 0.704152, acc: 0.517578] [A loss: 0.787447, acc: 0.335938]\n",
"5624: [D loss: 0.692477, acc: 0.525391] [A loss: 0.837220, acc: 0.218750]\n",
"5625: [D loss: 0.697102, acc: 0.523438] [A loss: 0.889315, acc: 0.132812]\n",
"5626: [D loss: 0.700253, acc: 0.513672] [A loss: 0.734231, acc: 0.437500]\n",
"5627: [D loss: 0.695219, acc: 0.523438] [A loss: 0.910654, acc: 0.121094]\n",
"5628: [D loss: 0.698075, acc: 0.492188] [A loss: 0.719948, acc: 0.472656]\n",
"5629: [D loss: 0.700307, acc: 0.544922] [A loss: 0.825856, acc: 0.218750]\n",
"5630: [D loss: 0.696846, acc: 0.517578] [A loss: 0.774004, acc: 0.359375]\n",
"5631: [D loss: 0.719289, acc: 0.500000] [A loss: 0.885859, acc: 0.183594]\n",
"5632: [D loss: 0.689975, acc: 0.562500] [A loss: 0.796395, acc: 0.269531]\n",
"5633: [D loss: 0.712460, acc: 0.492188] [A loss: 0.850032, acc: 0.222656]\n",
"5634: [D loss: 0.698893, acc: 0.535156] [A loss: 0.746419, acc: 0.378906]\n",
"5635: [D loss: 0.710636, acc: 0.492188] [A loss: 0.921742, acc: 0.109375]\n",
"5636: [D loss: 0.722842, acc: 0.462891] [A loss: 0.762043, acc: 0.351562]\n",
"5637: [D loss: 0.721953, acc: 0.498047] [A loss: 0.865043, acc: 0.214844]\n",
"5638: [D loss: 0.699725, acc: 0.525391] [A loss: 0.744463, acc: 0.406250]\n",
"5639: [D loss: 0.703152, acc: 0.509766] [A loss: 0.828488, acc: 0.261719]\n",
"5640: [D loss: 0.696202, acc: 0.531250] [A loss: 0.730681, acc: 0.460938]\n",
"5641: [D loss: 0.715512, acc: 0.505859] [A loss: 0.876903, acc: 0.207031]\n",
"5642: [D loss: 0.711589, acc: 0.490234] [A loss: 0.742703, acc: 0.449219]\n",
"5643: [D loss: 0.706800, acc: 0.533203] [A loss: 0.872741, acc: 0.164062]\n",
"5644: [D loss: 0.720705, acc: 0.486328] [A loss: 0.771433, acc: 0.332031]\n",
"5645: [D loss: 0.707011, acc: 0.490234] [A loss: 0.835267, acc: 0.230469]\n",
"5646: [D loss: 0.710097, acc: 0.498047] [A loss: 0.839482, acc: 0.210938]\n",
"5647: [D loss: 0.695539, acc: 0.542969] [A loss: 0.852915, acc: 0.207031]\n",
"5648: [D loss: 0.712053, acc: 0.480469] [A loss: 0.917672, acc: 0.113281]\n",
"5649: [D loss: 0.691848, acc: 0.515625] [A loss: 0.748560, acc: 0.371094]\n",
"5650: [D loss: 0.711948, acc: 0.490234] [A loss: 0.878988, acc: 0.156250]\n",
"5651: [D loss: 0.696822, acc: 0.533203] [A loss: 0.750731, acc: 0.378906]\n",
"5652: [D loss: 0.692394, acc: 0.525391] [A loss: 0.903019, acc: 0.152344]\n",
"5653: [D loss: 0.710912, acc: 0.492188] [A loss: 0.769454, acc: 0.359375]\n",
"5654: [D loss: 0.714426, acc: 0.460938] [A loss: 0.864360, acc: 0.191406]\n",
"5655: [D loss: 0.712056, acc: 0.498047] [A loss: 0.825693, acc: 0.234375]\n",
"5656: [D loss: 0.705448, acc: 0.517578] [A loss: 0.882689, acc: 0.136719]\n",
"5657: [D loss: 0.697773, acc: 0.527344] [A loss: 0.860876, acc: 0.152344]\n",
"5658: [D loss: 0.705024, acc: 0.519531] [A loss: 0.965815, acc: 0.101562]\n",
"5659: [D loss: 0.694803, acc: 0.527344] [A loss: 0.683489, acc: 0.550781]\n",
"5660: [D loss: 0.711696, acc: 0.531250] [A loss: 1.019735, acc: 0.054688]\n",
"5661: [D loss: 0.717513, acc: 0.496094] [A loss: 0.732033, acc: 0.433594]\n",
"5662: [D loss: 0.721817, acc: 0.480469] [A loss: 0.872779, acc: 0.175781]\n",
"5663: [D loss: 0.704165, acc: 0.515625] [A loss: 0.736160, acc: 0.406250]\n",
"5664: [D loss: 0.715990, acc: 0.509766] [A loss: 0.914095, acc: 0.125000]\n",
"5665: [D loss: 0.709256, acc: 0.494141] [A loss: 0.688583, acc: 0.527344]\n",
"5666: [D loss: 0.709440, acc: 0.539062] [A loss: 0.931882, acc: 0.101562]\n",
"5667: [D loss: 0.705856, acc: 0.500000] [A loss: 0.754750, acc: 0.390625]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5668: [D loss: 0.711927, acc: 0.548828] [A loss: 0.929691, acc: 0.125000]\n",
"5669: [D loss: 0.705043, acc: 0.525391] [A loss: 0.701474, acc: 0.523438]\n",
"5670: [D loss: 0.700644, acc: 0.527344] [A loss: 0.863074, acc: 0.214844]\n",
"5671: [D loss: 0.693511, acc: 0.560547] [A loss: 0.747268, acc: 0.390625]\n",
"5672: [D loss: 0.718976, acc: 0.509766] [A loss: 0.912586, acc: 0.109375]\n",
"5673: [D loss: 0.706423, acc: 0.482422] [A loss: 0.719662, acc: 0.468750]\n",
"5674: [D loss: 0.714102, acc: 0.505859] [A loss: 0.865177, acc: 0.191406]\n",
"5675: [D loss: 0.698906, acc: 0.517578] [A loss: 0.725745, acc: 0.468750]\n",
"5676: [D loss: 0.694082, acc: 0.554688] [A loss: 0.866562, acc: 0.167969]\n",
"5677: [D loss: 0.702181, acc: 0.505859] [A loss: 0.721962, acc: 0.441406]\n",
"5678: [D loss: 0.697360, acc: 0.537109] [A loss: 0.875579, acc: 0.199219]\n",
"5679: [D loss: 0.685583, acc: 0.576172] [A loss: 0.716447, acc: 0.472656]\n",
"5680: [D loss: 0.708664, acc: 0.527344] [A loss: 0.882150, acc: 0.136719]\n",
"5681: [D loss: 0.691265, acc: 0.550781] [A loss: 0.713659, acc: 0.496094]\n",
"5682: [D loss: 0.718690, acc: 0.509766] [A loss: 0.895935, acc: 0.136719]\n",
"5683: [D loss: 0.702810, acc: 0.505859] [A loss: 0.729748, acc: 0.429688]\n",
"5684: [D loss: 0.709605, acc: 0.517578] [A loss: 0.839522, acc: 0.179688]\n",
"5685: [D loss: 0.703357, acc: 0.519531] [A loss: 0.750894, acc: 0.390625]\n",
"5686: [D loss: 0.723770, acc: 0.505859] [A loss: 0.966820, acc: 0.089844]\n",
"5687: [D loss: 0.694184, acc: 0.529297] [A loss: 0.723657, acc: 0.468750]\n",
"5688: [D loss: 0.722069, acc: 0.515625] [A loss: 0.861395, acc: 0.242188]\n",
"5689: [D loss: 0.708723, acc: 0.517578] [A loss: 0.884822, acc: 0.132812]\n",
"5690: [D loss: 0.693068, acc: 0.542969] [A loss: 0.693732, acc: 0.523438]\n",
"5691: [D loss: 0.718324, acc: 0.517578] [A loss: 0.884955, acc: 0.140625]\n",
"5692: [D loss: 0.693685, acc: 0.525391] [A loss: 0.739765, acc: 0.425781]\n",
"5693: [D loss: 0.714837, acc: 0.533203] [A loss: 0.874939, acc: 0.164062]\n",
"5694: [D loss: 0.715129, acc: 0.488281] [A loss: 0.727122, acc: 0.437500]\n",
"5695: [D loss: 0.717475, acc: 0.503906] [A loss: 0.878547, acc: 0.175781]\n",
"5696: [D loss: 0.689639, acc: 0.544922] [A loss: 0.786101, acc: 0.312500]\n",
"5697: [D loss: 0.705394, acc: 0.525391] [A loss: 0.878473, acc: 0.156250]\n",
"5698: [D loss: 0.712878, acc: 0.505859] [A loss: 0.766427, acc: 0.351562]\n",
"5699: [D loss: 0.715814, acc: 0.490234] [A loss: 0.924058, acc: 0.132812]\n",
"5700: [D loss: 0.683004, acc: 0.556641] [A loss: 0.722299, acc: 0.484375]\n",
"5701: [D loss: 0.707309, acc: 0.513672] [A loss: 0.885130, acc: 0.183594]\n",
"5702: [D loss: 0.695166, acc: 0.544922] [A loss: 0.773274, acc: 0.320312]\n",
"5703: [D loss: 0.712700, acc: 0.529297] [A loss: 0.817006, acc: 0.257812]\n",
"5704: [D loss: 0.699977, acc: 0.550781] [A loss: 0.814748, acc: 0.292969]\n",
"5705: [D loss: 0.695733, acc: 0.519531] [A loss: 0.892194, acc: 0.144531]\n",
"5706: [D loss: 0.687792, acc: 0.550781] [A loss: 0.746833, acc: 0.414062]\n",
"5707: [D loss: 0.692238, acc: 0.537109] [A loss: 0.925140, acc: 0.105469]\n",
"5708: [D loss: 0.710313, acc: 0.488281] [A loss: 0.699465, acc: 0.511719]\n",
"5709: [D loss: 0.717772, acc: 0.527344] [A loss: 0.898033, acc: 0.160156]\n",
"5710: [D loss: 0.707114, acc: 0.496094] [A loss: 0.737160, acc: 0.449219]\n",
"5711: [D loss: 0.712236, acc: 0.478516] [A loss: 0.846796, acc: 0.210938]\n",
"5712: [D loss: 0.705966, acc: 0.519531] [A loss: 0.771198, acc: 0.378906]\n",
"5713: [D loss: 0.698831, acc: 0.539062] [A loss: 0.919325, acc: 0.136719]\n",
"5714: [D loss: 0.700490, acc: 0.521484] [A loss: 0.733922, acc: 0.433594]\n",
"5715: [D loss: 0.720216, acc: 0.486328] [A loss: 0.884327, acc: 0.207031]\n",
"5716: [D loss: 0.701982, acc: 0.529297] [A loss: 0.786565, acc: 0.328125]\n",
"5717: [D loss: 0.708477, acc: 0.500000] [A loss: 0.822137, acc: 0.257812]\n",
"5718: [D loss: 0.704941, acc: 0.539062] [A loss: 0.770533, acc: 0.324219]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5719: [D loss: 0.699072, acc: 0.529297] [A loss: 0.841477, acc: 0.175781]\n",
"5720: [D loss: 0.703124, acc: 0.537109] [A loss: 0.861131, acc: 0.175781]\n",
"5721: [D loss: 0.699849, acc: 0.490234] [A loss: 0.827210, acc: 0.242188]\n",
"5722: [D loss: 0.704593, acc: 0.523438] [A loss: 0.782056, acc: 0.324219]\n",
"5723: [D loss: 0.707316, acc: 0.511719] [A loss: 0.882722, acc: 0.152344]\n",
"5724: [D loss: 0.704976, acc: 0.488281] [A loss: 0.712283, acc: 0.480469]\n",
"5725: [D loss: 0.712903, acc: 0.509766] [A loss: 0.895536, acc: 0.148438]\n",
"5726: [D loss: 0.707596, acc: 0.486328] [A loss: 0.753151, acc: 0.437500]\n",
"5727: [D loss: 0.698584, acc: 0.533203] [A loss: 0.876052, acc: 0.164062]\n",
"5728: [D loss: 0.692136, acc: 0.552734] [A loss: 0.916215, acc: 0.113281]\n",
"5729: [D loss: 0.713787, acc: 0.466797] [A loss: 0.780706, acc: 0.292969]\n",
"5730: [D loss: 0.706041, acc: 0.515625] [A loss: 1.067375, acc: 0.039062]\n",
"5731: [D loss: 0.706746, acc: 0.498047] [A loss: 0.671517, acc: 0.589844]\n",
"5732: [D loss: 0.722625, acc: 0.496094] [A loss: 0.900577, acc: 0.121094]\n",
"5733: [D loss: 0.693750, acc: 0.511719] [A loss: 0.774936, acc: 0.300781]\n",
"5734: [D loss: 0.713096, acc: 0.507812] [A loss: 0.801010, acc: 0.273438]\n",
"5735: [D loss: 0.697493, acc: 0.527344] [A loss: 0.859353, acc: 0.183594]\n",
"5736: [D loss: 0.693128, acc: 0.550781] [A loss: 0.725877, acc: 0.476562]\n",
"5737: [D loss: 0.698057, acc: 0.556641] [A loss: 0.912722, acc: 0.140625]\n",
"5738: [D loss: 0.705784, acc: 0.517578] [A loss: 0.758974, acc: 0.339844]\n",
"5739: [D loss: 0.700872, acc: 0.525391] [A loss: 0.923876, acc: 0.113281]\n",
"5740: [D loss: 0.711073, acc: 0.484375] [A loss: 0.721179, acc: 0.472656]\n",
"5741: [D loss: 0.698066, acc: 0.535156] [A loss: 0.944513, acc: 0.097656]\n",
"5742: [D loss: 0.699317, acc: 0.525391] [A loss: 0.699787, acc: 0.500000]\n",
"5743: [D loss: 0.708905, acc: 0.525391] [A loss: 0.903291, acc: 0.125000]\n",
"5744: [D loss: 0.701487, acc: 0.507812] [A loss: 0.711973, acc: 0.472656]\n",
"5745: [D loss: 0.695209, acc: 0.517578] [A loss: 0.895549, acc: 0.140625]\n",
"5746: [D loss: 0.699151, acc: 0.517578] [A loss: 0.712784, acc: 0.480469]\n",
"5747: [D loss: 0.704962, acc: 0.529297] [A loss: 0.958877, acc: 0.078125]\n",
"5748: [D loss: 0.688852, acc: 0.539062] [A loss: 0.702786, acc: 0.460938]\n",
"5749: [D loss: 0.707913, acc: 0.539062] [A loss: 0.842593, acc: 0.238281]\n",
"5750: [D loss: 0.697467, acc: 0.529297] [A loss: 0.834954, acc: 0.234375]\n",
"5751: [D loss: 0.701052, acc: 0.515625] [A loss: 0.764236, acc: 0.359375]\n",
"5752: [D loss: 0.704265, acc: 0.531250] [A loss: 0.849499, acc: 0.199219]\n",
"5753: [D loss: 0.709333, acc: 0.501953] [A loss: 0.857181, acc: 0.195312]\n",
"5754: [D loss: 0.712431, acc: 0.492188] [A loss: 0.824169, acc: 0.210938]\n",
"5755: [D loss: 0.693290, acc: 0.548828] [A loss: 0.852719, acc: 0.214844]\n",
"5756: [D loss: 0.701940, acc: 0.503906] [A loss: 0.749612, acc: 0.398438]\n",
"5757: [D loss: 0.714561, acc: 0.507812] [A loss: 0.835424, acc: 0.250000]\n",
"5758: [D loss: 0.694382, acc: 0.496094] [A loss: 0.785932, acc: 0.281250]\n",
"5759: [D loss: 0.721840, acc: 0.468750] [A loss: 0.898044, acc: 0.128906]\n",
"5760: [D loss: 0.711371, acc: 0.478516] [A loss: 0.753244, acc: 0.355469]\n",
"5761: [D loss: 0.708206, acc: 0.501953] [A loss: 0.903060, acc: 0.167969]\n",
"5762: [D loss: 0.706142, acc: 0.505859] [A loss: 0.754698, acc: 0.355469]\n",
"5763: [D loss: 0.700203, acc: 0.527344] [A loss: 0.883333, acc: 0.164062]\n",
"5764: [D loss: 0.687528, acc: 0.531250] [A loss: 0.703092, acc: 0.464844]\n",
"5765: [D loss: 0.715486, acc: 0.507812] [A loss: 0.854128, acc: 0.183594]\n",
"5766: [D loss: 0.705382, acc: 0.517578] [A loss: 1.001776, acc: 0.050781]\n",
"5767: [D loss: 0.694581, acc: 0.517578] [A loss: 0.769582, acc: 0.351562]\n",
"5768: [D loss: 0.695287, acc: 0.550781] [A loss: 0.844328, acc: 0.218750]\n",
"5769: [D loss: 0.685454, acc: 0.562500] [A loss: 0.768752, acc: 0.324219]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5770: [D loss: 0.716333, acc: 0.494141] [A loss: 0.936936, acc: 0.121094]\n",
"5771: [D loss: 0.716781, acc: 0.490234] [A loss: 0.745342, acc: 0.429688]\n",
"5772: [D loss: 0.706628, acc: 0.542969] [A loss: 0.946638, acc: 0.109375]\n",
"5773: [D loss: 0.723472, acc: 0.462891] [A loss: 0.753362, acc: 0.378906]\n",
"5774: [D loss: 0.698270, acc: 0.529297] [A loss: 0.934753, acc: 0.125000]\n",
"5775: [D loss: 0.714356, acc: 0.486328] [A loss: 0.700494, acc: 0.511719]\n",
"5776: [D loss: 0.712837, acc: 0.498047] [A loss: 0.897492, acc: 0.128906]\n",
"5777: [D loss: 0.701765, acc: 0.494141] [A loss: 0.764525, acc: 0.363281]\n",
"5778: [D loss: 0.696991, acc: 0.537109] [A loss: 0.757089, acc: 0.351562]\n",
"5779: [D loss: 0.706482, acc: 0.511719] [A loss: 0.895046, acc: 0.132812]\n",
"5780: [D loss: 0.691606, acc: 0.562500] [A loss: 0.735408, acc: 0.453125]\n",
"5781: [D loss: 0.701427, acc: 0.519531] [A loss: 0.909738, acc: 0.156250]\n",
"5782: [D loss: 0.696698, acc: 0.541016] [A loss: 0.779068, acc: 0.324219]\n",
"5783: [D loss: 0.694717, acc: 0.537109] [A loss: 0.914346, acc: 0.140625]\n",
"5784: [D loss: 0.695464, acc: 0.525391] [A loss: 0.687492, acc: 0.550781]\n",
"5785: [D loss: 0.714606, acc: 0.507812] [A loss: 0.894964, acc: 0.144531]\n",
"5786: [D loss: 0.702459, acc: 0.511719] [A loss: 0.790762, acc: 0.382812]\n",
"5787: [D loss: 0.700866, acc: 0.515625] [A loss: 0.879927, acc: 0.156250]\n",
"5788: [D loss: 0.694488, acc: 0.539062] [A loss: 0.839523, acc: 0.250000]\n",
"5789: [D loss: 0.710017, acc: 0.505859] [A loss: 0.947844, acc: 0.101562]\n",
"5790: [D loss: 0.706725, acc: 0.507812] [A loss: 0.705981, acc: 0.492188]\n",
"5791: [D loss: 0.726431, acc: 0.500000] [A loss: 1.030108, acc: 0.035156]\n",
"5792: [D loss: 0.706145, acc: 0.509766] [A loss: 0.708817, acc: 0.492188]\n",
"5793: [D loss: 0.715489, acc: 0.507812] [A loss: 0.912927, acc: 0.125000]\n",
"5794: [D loss: 0.693714, acc: 0.507812] [A loss: 0.726375, acc: 0.457031]\n",
"5795: [D loss: 0.721983, acc: 0.501953] [A loss: 0.896921, acc: 0.148438]\n",
"5796: [D loss: 0.703062, acc: 0.496094] [A loss: 0.727872, acc: 0.425781]\n",
"5797: [D loss: 0.715473, acc: 0.500000] [A loss: 0.939076, acc: 0.109375]\n",
"5798: [D loss: 0.685875, acc: 0.541016] [A loss: 0.723094, acc: 0.496094]\n",
"5799: [D loss: 0.702098, acc: 0.519531] [A loss: 0.893447, acc: 0.140625]\n",
"5800: [D loss: 0.694957, acc: 0.544922] [A loss: 0.720276, acc: 0.453125]\n",
"5801: [D loss: 0.705686, acc: 0.525391] [A loss: 1.005823, acc: 0.105469]\n",
"5802: [D loss: 0.703930, acc: 0.511719] [A loss: 0.683500, acc: 0.519531]\n",
"5803: [D loss: 0.721150, acc: 0.505859] [A loss: 0.880175, acc: 0.191406]\n",
"5804: [D loss: 0.720893, acc: 0.505859] [A loss: 0.803180, acc: 0.292969]\n",
"5805: [D loss: 0.705957, acc: 0.505859] [A loss: 0.803040, acc: 0.265625]\n",
"5806: [D loss: 0.686552, acc: 0.552734] [A loss: 0.764970, acc: 0.355469]\n",
"5807: [D loss: 0.703112, acc: 0.505859] [A loss: 0.821127, acc: 0.238281]\n",
"5808: [D loss: 0.704434, acc: 0.501953] [A loss: 0.794074, acc: 0.355469]\n",
"5809: [D loss: 0.712257, acc: 0.519531] [A loss: 0.823506, acc: 0.261719]\n",
"5810: [D loss: 0.700340, acc: 0.515625] [A loss: 0.748461, acc: 0.414062]\n",
"5811: [D loss: 0.690772, acc: 0.542969] [A loss: 0.832329, acc: 0.210938]\n",
"5812: [D loss: 0.700050, acc: 0.513672] [A loss: 0.813545, acc: 0.253906]\n",
"5813: [D loss: 0.681339, acc: 0.544922] [A loss: 0.827023, acc: 0.257812]\n",
"5814: [D loss: 0.694744, acc: 0.548828] [A loss: 0.837148, acc: 0.246094]\n",
"5815: [D loss: 0.689524, acc: 0.550781] [A loss: 0.805262, acc: 0.281250]\n",
"5816: [D loss: 0.693136, acc: 0.527344] [A loss: 0.825859, acc: 0.246094]\n",
"5817: [D loss: 0.693054, acc: 0.535156] [A loss: 0.816500, acc: 0.238281]\n",
"5818: [D loss: 0.702826, acc: 0.521484] [A loss: 0.851159, acc: 0.199219]\n",
"5819: [D loss: 0.694824, acc: 0.542969] [A loss: 0.755079, acc: 0.359375]\n",
"5820: [D loss: 0.696390, acc: 0.546875] [A loss: 0.884228, acc: 0.167969]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5821: [D loss: 0.693014, acc: 0.537109] [A loss: 0.764992, acc: 0.359375]\n",
"5822: [D loss: 0.712384, acc: 0.523438] [A loss: 0.962277, acc: 0.089844]\n",
"5823: [D loss: 0.694931, acc: 0.519531] [A loss: 0.699224, acc: 0.511719]\n",
"5824: [D loss: 0.722519, acc: 0.492188] [A loss: 0.877082, acc: 0.187500]\n",
"5825: [D loss: 0.698221, acc: 0.523438] [A loss: 0.712672, acc: 0.472656]\n",
"5826: [D loss: 0.717818, acc: 0.529297] [A loss: 1.007142, acc: 0.035156]\n",
"5827: [D loss: 0.693599, acc: 0.523438] [A loss: 0.671621, acc: 0.554688]\n",
"5828: [D loss: 0.719954, acc: 0.521484] [A loss: 0.908584, acc: 0.105469]\n",
"5829: [D loss: 0.690159, acc: 0.566406] [A loss: 0.707674, acc: 0.515625]\n",
"5830: [D loss: 0.712417, acc: 0.523438] [A loss: 0.957949, acc: 0.125000]\n",
"5831: [D loss: 0.698416, acc: 0.519531] [A loss: 0.753122, acc: 0.351562]\n",
"5832: [D loss: 0.705086, acc: 0.525391] [A loss: 0.881297, acc: 0.183594]\n",
"5833: [D loss: 0.711330, acc: 0.503906] [A loss: 0.774098, acc: 0.316406]\n",
"5834: [D loss: 0.702337, acc: 0.560547] [A loss: 0.863539, acc: 0.171875]\n",
"5835: [D loss: 0.705280, acc: 0.527344] [A loss: 0.804352, acc: 0.289062]\n",
"5836: [D loss: 0.694635, acc: 0.535156] [A loss: 0.847682, acc: 0.218750]\n",
"5837: [D loss: 0.705839, acc: 0.542969] [A loss: 0.814209, acc: 0.312500]\n",
"5838: [D loss: 0.704172, acc: 0.523438] [A loss: 0.790116, acc: 0.304688]\n",
"5839: [D loss: 0.715608, acc: 0.490234] [A loss: 0.910024, acc: 0.187500]\n",
"5840: [D loss: 0.690825, acc: 0.537109] [A loss: 0.746168, acc: 0.398438]\n",
"5841: [D loss: 0.709869, acc: 0.474609] [A loss: 0.902594, acc: 0.171875]\n",
"5842: [D loss: 0.701727, acc: 0.519531] [A loss: 0.711514, acc: 0.480469]\n",
"5843: [D loss: 0.714827, acc: 0.503906] [A loss: 0.873021, acc: 0.203125]\n",
"5844: [D loss: 0.691059, acc: 0.535156] [A loss: 0.756346, acc: 0.406250]\n",
"5845: [D loss: 0.706780, acc: 0.507812] [A loss: 0.857010, acc: 0.187500]\n",
"5846: [D loss: 0.694314, acc: 0.535156] [A loss: 0.810276, acc: 0.261719]\n",
"5847: [D loss: 0.710195, acc: 0.513672] [A loss: 0.886216, acc: 0.152344]\n",
"5848: [D loss: 0.701948, acc: 0.511719] [A loss: 0.722705, acc: 0.492188]\n",
"5849: [D loss: 0.706052, acc: 0.505859] [A loss: 0.923635, acc: 0.113281]\n",
"5850: [D loss: 0.698046, acc: 0.525391] [A loss: 0.690560, acc: 0.531250]\n",
"5851: [D loss: 0.718777, acc: 0.527344] [A loss: 0.951619, acc: 0.093750]\n",
"5852: [D loss: 0.701227, acc: 0.525391] [A loss: 0.715052, acc: 0.464844]\n",
"5853: [D loss: 0.710845, acc: 0.496094] [A loss: 0.863295, acc: 0.218750]\n",
"5854: [D loss: 0.700208, acc: 0.496094] [A loss: 0.784984, acc: 0.324219]\n",
"5855: [D loss: 0.692999, acc: 0.531250] [A loss: 0.838273, acc: 0.226562]\n",
"5856: [D loss: 0.695614, acc: 0.539062] [A loss: 0.731925, acc: 0.421875]\n",
"5857: [D loss: 0.706804, acc: 0.503906] [A loss: 0.889036, acc: 0.175781]\n",
"5858: [D loss: 0.700600, acc: 0.527344] [A loss: 0.745536, acc: 0.375000]\n",
"5859: [D loss: 0.695786, acc: 0.542969] [A loss: 0.895017, acc: 0.136719]\n",
"5860: [D loss: 0.703729, acc: 0.531250] [A loss: 0.749170, acc: 0.367188]\n",
"5861: [D loss: 0.712436, acc: 0.496094] [A loss: 0.828152, acc: 0.214844]\n",
"5862: [D loss: 0.706364, acc: 0.517578] [A loss: 0.825473, acc: 0.234375]\n",
"5863: [D loss: 0.705566, acc: 0.515625] [A loss: 0.783625, acc: 0.355469]\n",
"5864: [D loss: 0.704189, acc: 0.517578] [A loss: 0.864752, acc: 0.203125]\n",
"5865: [D loss: 0.691042, acc: 0.523438] [A loss: 0.829863, acc: 0.269531]\n",
"5866: [D loss: 0.702166, acc: 0.517578] [A loss: 0.894608, acc: 0.132812]\n",
"5867: [D loss: 0.706210, acc: 0.500000] [A loss: 0.812268, acc: 0.257812]\n",
"5868: [D loss: 0.716584, acc: 0.478516] [A loss: 0.871181, acc: 0.183594]\n",
"5869: [D loss: 0.710931, acc: 0.503906] [A loss: 0.954024, acc: 0.097656]\n",
"5870: [D loss: 0.691443, acc: 0.544922] [A loss: 0.732623, acc: 0.402344]\n",
"5871: [D loss: 0.696829, acc: 0.525391] [A loss: 0.876536, acc: 0.199219]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5872: [D loss: 0.689846, acc: 0.529297] [A loss: 0.784685, acc: 0.316406]\n",
"5873: [D loss: 0.723411, acc: 0.488281] [A loss: 0.886095, acc: 0.179688]\n",
"5874: [D loss: 0.693256, acc: 0.539062] [A loss: 0.745475, acc: 0.371094]\n",
"5875: [D loss: 0.698350, acc: 0.527344] [A loss: 0.900846, acc: 0.152344]\n",
"5876: [D loss: 0.694181, acc: 0.517578] [A loss: 0.729004, acc: 0.437500]\n",
"5877: [D loss: 0.699536, acc: 0.542969] [A loss: 0.823007, acc: 0.250000]\n",
"5878: [D loss: 0.695603, acc: 0.529297] [A loss: 0.771883, acc: 0.335938]\n",
"5879: [D loss: 0.714910, acc: 0.523438] [A loss: 1.056836, acc: 0.066406]\n",
"5880: [D loss: 0.715639, acc: 0.478516] [A loss: 0.776350, acc: 0.378906]\n",
"5881: [D loss: 0.712558, acc: 0.500000] [A loss: 0.921755, acc: 0.125000]\n",
"5882: [D loss: 0.688104, acc: 0.554688] [A loss: 0.662066, acc: 0.601562]\n",
"5883: [D loss: 0.708138, acc: 0.539062] [A loss: 0.964166, acc: 0.074219]\n",
"5884: [D loss: 0.707481, acc: 0.505859] [A loss: 0.725121, acc: 0.453125]\n",
"5885: [D loss: 0.703687, acc: 0.527344] [A loss: 0.902100, acc: 0.156250]\n",
"5886: [D loss: 0.694333, acc: 0.537109] [A loss: 0.738335, acc: 0.433594]\n",
"5887: [D loss: 0.727681, acc: 0.505859] [A loss: 0.935111, acc: 0.132812]\n",
"5888: [D loss: 0.689444, acc: 0.542969] [A loss: 0.707840, acc: 0.484375]\n",
"5889: [D loss: 0.708272, acc: 0.529297] [A loss: 0.922373, acc: 0.082031]\n",
"5890: [D loss: 0.697052, acc: 0.539062] [A loss: 0.692872, acc: 0.519531]\n",
"5891: [D loss: 0.712574, acc: 0.492188] [A loss: 0.855395, acc: 0.183594]\n",
"5892: [D loss: 0.698922, acc: 0.515625] [A loss: 0.769633, acc: 0.343750]\n",
"5893: [D loss: 0.710809, acc: 0.517578] [A loss: 0.967398, acc: 0.101562]\n",
"5894: [D loss: 0.699617, acc: 0.517578] [A loss: 0.702345, acc: 0.507812]\n",
"5895: [D loss: 0.706989, acc: 0.521484] [A loss: 0.880462, acc: 0.140625]\n",
"5896: [D loss: 0.695789, acc: 0.517578] [A loss: 0.730949, acc: 0.441406]\n",
"5897: [D loss: 0.704780, acc: 0.523438] [A loss: 0.887287, acc: 0.179688]\n",
"5898: [D loss: 0.701456, acc: 0.521484] [A loss: 0.718577, acc: 0.492188]\n",
"5899: [D loss: 0.719435, acc: 0.517578] [A loss: 0.944961, acc: 0.085938]\n",
"5900: [D loss: 0.701778, acc: 0.527344] [A loss: 0.669731, acc: 0.570312]\n",
"5901: [D loss: 0.712432, acc: 0.509766] [A loss: 0.838984, acc: 0.207031]\n",
"5902: [D loss: 0.700344, acc: 0.517578] [A loss: 0.766567, acc: 0.332031]\n",
"5903: [D loss: 0.703696, acc: 0.531250] [A loss: 0.815806, acc: 0.265625]\n",
"5904: [D loss: 0.709630, acc: 0.503906] [A loss: 0.832933, acc: 0.281250]\n",
"5905: [D loss: 0.712747, acc: 0.482422] [A loss: 0.765964, acc: 0.324219]\n",
"5906: [D loss: 0.699158, acc: 0.531250] [A loss: 0.760894, acc: 0.355469]\n",
"5907: [D loss: 0.705570, acc: 0.501953] [A loss: 0.864474, acc: 0.210938]\n",
"5908: [D loss: 0.701209, acc: 0.527344] [A loss: 0.790236, acc: 0.296875]\n",
"5909: [D loss: 0.702297, acc: 0.515625] [A loss: 0.825676, acc: 0.222656]\n",
"5910: [D loss: 0.699082, acc: 0.542969] [A loss: 0.708180, acc: 0.500000]\n",
"5911: [D loss: 0.704631, acc: 0.531250] [A loss: 0.917084, acc: 0.113281]\n",
"5912: [D loss: 0.697668, acc: 0.544922] [A loss: 0.689431, acc: 0.558594]\n",
"5913: [D loss: 0.706758, acc: 0.515625] [A loss: 0.870327, acc: 0.152344]\n",
"5914: [D loss: 0.696946, acc: 0.539062] [A loss: 0.732431, acc: 0.468750]\n",
"5915: [D loss: 0.729447, acc: 0.482422] [A loss: 0.900303, acc: 0.148438]\n",
"5916: [D loss: 0.695830, acc: 0.537109] [A loss: 0.725024, acc: 0.425781]\n",
"5917: [D loss: 0.718736, acc: 0.494141] [A loss: 0.887083, acc: 0.167969]\n",
"5918: [D loss: 0.699354, acc: 0.517578] [A loss: 0.742982, acc: 0.421875]\n",
"5919: [D loss: 0.710354, acc: 0.519531] [A loss: 0.912580, acc: 0.156250]\n",
"5920: [D loss: 0.696887, acc: 0.517578] [A loss: 0.735458, acc: 0.445312]\n",
"5921: [D loss: 0.697092, acc: 0.525391] [A loss: 0.876151, acc: 0.160156]\n",
"5922: [D loss: 0.695540, acc: 0.546875] [A loss: 0.739179, acc: 0.414062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5923: [D loss: 0.714302, acc: 0.519531] [A loss: 0.959409, acc: 0.101562]\n",
"5924: [D loss: 0.698276, acc: 0.531250] [A loss: 0.696240, acc: 0.519531]\n",
"5925: [D loss: 0.713316, acc: 0.505859] [A loss: 0.859992, acc: 0.167969]\n",
"5926: [D loss: 0.692369, acc: 0.527344] [A loss: 0.719594, acc: 0.449219]\n",
"5927: [D loss: 0.713913, acc: 0.533203] [A loss: 0.897037, acc: 0.183594]\n",
"5928: [D loss: 0.693123, acc: 0.531250] [A loss: 0.695340, acc: 0.535156]\n",
"5929: [D loss: 0.707847, acc: 0.529297] [A loss: 0.835875, acc: 0.195312]\n",
"5930: [D loss: 0.697059, acc: 0.511719] [A loss: 0.943731, acc: 0.164062]\n",
"5931: [D loss: 0.690642, acc: 0.541016] [A loss: 0.718932, acc: 0.445312]\n",
"5932: [D loss: 0.714338, acc: 0.533203] [A loss: 0.840681, acc: 0.226562]\n",
"5933: [D loss: 0.698542, acc: 0.539062] [A loss: 0.769636, acc: 0.328125]\n",
"5934: [D loss: 0.710325, acc: 0.464844] [A loss: 0.796904, acc: 0.351562]\n",
"5935: [D loss: 0.700573, acc: 0.525391] [A loss: 0.801900, acc: 0.308594]\n",
"5936: [D loss: 0.711311, acc: 0.503906] [A loss: 0.916897, acc: 0.132812]\n",
"5937: [D loss: 0.698403, acc: 0.507812] [A loss: 0.746300, acc: 0.425781]\n",
"5938: [D loss: 0.705477, acc: 0.521484] [A loss: 0.903408, acc: 0.179688]\n",
"5939: [D loss: 0.694876, acc: 0.527344] [A loss: 0.737837, acc: 0.410156]\n",
"5940: [D loss: 0.724811, acc: 0.511719] [A loss: 0.945586, acc: 0.097656]\n",
"5941: [D loss: 0.708325, acc: 0.527344] [A loss: 0.701031, acc: 0.507812]\n",
"5942: [D loss: 0.705221, acc: 0.521484] [A loss: 0.878305, acc: 0.183594]\n",
"5943: [D loss: 0.690861, acc: 0.542969] [A loss: 0.733725, acc: 0.460938]\n",
"5944: [D loss: 0.715464, acc: 0.511719] [A loss: 0.923853, acc: 0.121094]\n",
"5945: [D loss: 0.691683, acc: 0.548828] [A loss: 0.751125, acc: 0.386719]\n",
"5946: [D loss: 0.690795, acc: 0.556641] [A loss: 0.889595, acc: 0.148438]\n",
"5947: [D loss: 0.692233, acc: 0.552734] [A loss: 0.768905, acc: 0.320312]\n",
"5948: [D loss: 0.711182, acc: 0.507812] [A loss: 0.899204, acc: 0.140625]\n",
"5949: [D loss: 0.695882, acc: 0.542969] [A loss: 0.735152, acc: 0.390625]\n",
"5950: [D loss: 0.707936, acc: 0.515625] [A loss: 0.831278, acc: 0.207031]\n",
"5951: [D loss: 0.694636, acc: 0.533203] [A loss: 0.736816, acc: 0.441406]\n",
"5952: [D loss: 0.704241, acc: 0.539062] [A loss: 0.858963, acc: 0.199219]\n",
"5953: [D loss: 0.693454, acc: 0.539062] [A loss: 0.787110, acc: 0.304688]\n",
"5954: [D loss: 0.702789, acc: 0.554688] [A loss: 0.936606, acc: 0.113281]\n",
"5955: [D loss: 0.718741, acc: 0.460938] [A loss: 0.692442, acc: 0.492188]\n",
"5956: [D loss: 0.744210, acc: 0.457031] [A loss: 0.924523, acc: 0.132812]\n",
"5957: [D loss: 0.702002, acc: 0.529297] [A loss: 0.762261, acc: 0.355469]\n",
"5958: [D loss: 0.721878, acc: 0.486328] [A loss: 0.883073, acc: 0.179688]\n",
"5959: [D loss: 0.700230, acc: 0.521484] [A loss: 0.785866, acc: 0.332031]\n",
"5960: [D loss: 0.699015, acc: 0.521484] [A loss: 0.824729, acc: 0.242188]\n",
"5961: [D loss: 0.700816, acc: 0.527344] [A loss: 0.867626, acc: 0.191406]\n",
"5962: [D loss: 0.687120, acc: 0.558594] [A loss: 0.760609, acc: 0.378906]\n",
"5963: [D loss: 0.701802, acc: 0.515625] [A loss: 0.895370, acc: 0.136719]\n",
"5964: [D loss: 0.707870, acc: 0.488281] [A loss: 0.729121, acc: 0.460938]\n",
"5965: [D loss: 0.702467, acc: 0.531250] [A loss: 1.017059, acc: 0.105469]\n",
"5966: [D loss: 0.698555, acc: 0.531250] [A loss: 0.676273, acc: 0.531250]\n",
"5967: [D loss: 0.718311, acc: 0.513672] [A loss: 0.897957, acc: 0.097656]\n",
"5968: [D loss: 0.703327, acc: 0.500000] [A loss: 0.771668, acc: 0.328125]\n",
"5969: [D loss: 0.707085, acc: 0.500000] [A loss: 0.821347, acc: 0.242188]\n",
"5970: [D loss: 0.701184, acc: 0.509766] [A loss: 0.873905, acc: 0.144531]\n",
"5971: [D loss: 0.702829, acc: 0.521484] [A loss: 0.773801, acc: 0.343750]\n",
"5972: [D loss: 0.707848, acc: 0.517578] [A loss: 0.896529, acc: 0.171875]\n",
"5973: [D loss: 0.700706, acc: 0.521484] [A loss: 0.798640, acc: 0.312500]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"5974: [D loss: 0.706118, acc: 0.519531] [A loss: 0.900577, acc: 0.125000]\n",
"5975: [D loss: 0.692702, acc: 0.531250] [A loss: 0.734771, acc: 0.417969]\n",
"5976: [D loss: 0.699984, acc: 0.541016] [A loss: 1.046803, acc: 0.023438]\n",
"5977: [D loss: 0.714015, acc: 0.507812] [A loss: 0.649496, acc: 0.597656]\n",
"5978: [D loss: 0.707883, acc: 0.542969] [A loss: 0.927180, acc: 0.179688]\n",
"5979: [D loss: 0.696117, acc: 0.525391] [A loss: 0.769563, acc: 0.351562]\n",
"5980: [D loss: 0.706370, acc: 0.517578] [A loss: 0.892428, acc: 0.144531]\n",
"5981: [D loss: 0.678045, acc: 0.572266] [A loss: 0.748148, acc: 0.406250]\n",
"5982: [D loss: 0.699514, acc: 0.525391] [A loss: 0.824047, acc: 0.226562]\n",
"5983: [D loss: 0.696712, acc: 0.500000] [A loss: 0.833408, acc: 0.222656]\n",
"5984: [D loss: 0.696445, acc: 0.558594] [A loss: 0.956103, acc: 0.105469]\n",
"5985: [D loss: 0.692688, acc: 0.548828] [A loss: 0.703642, acc: 0.542969]\n",
"5986: [D loss: 0.718443, acc: 0.486328] [A loss: 0.893564, acc: 0.152344]\n",
"5987: [D loss: 0.696195, acc: 0.529297] [A loss: 0.689392, acc: 0.554688]\n",
"5988: [D loss: 0.724120, acc: 0.509766] [A loss: 0.912558, acc: 0.156250]\n",
"5989: [D loss: 0.700693, acc: 0.531250] [A loss: 0.688721, acc: 0.542969]\n",
"5990: [D loss: 0.696388, acc: 0.539062] [A loss: 0.912214, acc: 0.128906]\n",
"5991: [D loss: 0.711978, acc: 0.494141] [A loss: 0.717137, acc: 0.472656]\n",
"5992: [D loss: 0.707787, acc: 0.521484] [A loss: 0.868269, acc: 0.203125]\n",
"5993: [D loss: 0.702516, acc: 0.498047] [A loss: 0.785873, acc: 0.316406]\n",
"5994: [D loss: 0.694240, acc: 0.519531] [A loss: 0.864734, acc: 0.179688]\n",
"5995: [D loss: 0.695651, acc: 0.527344] [A loss: 0.715199, acc: 0.492188]\n",
"5996: [D loss: 0.706618, acc: 0.492188] [A loss: 0.813070, acc: 0.304688]\n",
"5997: [D loss: 0.686543, acc: 0.560547] [A loss: 0.805917, acc: 0.300781]\n",
"5998: [D loss: 0.694544, acc: 0.529297] [A loss: 0.831686, acc: 0.238281]\n",
"5999: [D loss: 0.699695, acc: 0.523438] [A loss: 0.752868, acc: 0.406250]\n",
"6000: [D loss: 0.709634, acc: 0.519531] [A loss: 0.913994, acc: 0.148438]\n",
"6001: [D loss: 0.691570, acc: 0.523438] [A loss: 0.735720, acc: 0.425781]\n",
"6002: [D loss: 0.706813, acc: 0.535156] [A loss: 0.943320, acc: 0.117188]\n",
"6003: [D loss: 0.697756, acc: 0.546875] [A loss: 0.717019, acc: 0.468750]\n",
"6004: [D loss: 0.712511, acc: 0.496094] [A loss: 0.963557, acc: 0.101562]\n",
"6005: [D loss: 0.699323, acc: 0.531250] [A loss: 0.745986, acc: 0.386719]\n",
"6006: [D loss: 0.730724, acc: 0.478516] [A loss: 0.904355, acc: 0.125000]\n",
"6007: [D loss: 0.703222, acc: 0.507812] [A loss: 0.709428, acc: 0.492188]\n",
"6008: [D loss: 0.707035, acc: 0.527344] [A loss: 0.843844, acc: 0.187500]\n",
"6009: [D loss: 0.702153, acc: 0.533203] [A loss: 0.786599, acc: 0.304688]\n",
"6010: [D loss: 0.705763, acc: 0.503906] [A loss: 0.915300, acc: 0.140625]\n",
"6011: [D loss: 0.697941, acc: 0.523438] [A loss: 0.719433, acc: 0.441406]\n",
"6012: [D loss: 0.715135, acc: 0.500000] [A loss: 0.919849, acc: 0.132812]\n",
"6013: [D loss: 0.696604, acc: 0.517578] [A loss: 0.713308, acc: 0.484375]\n",
"6014: [D loss: 0.712226, acc: 0.529297] [A loss: 0.915303, acc: 0.136719]\n",
"6015: [D loss: 0.705093, acc: 0.501953] [A loss: 0.730666, acc: 0.472656]\n",
"6016: [D loss: 0.694380, acc: 0.552734] [A loss: 0.902430, acc: 0.113281]\n",
"6017: [D loss: 0.693992, acc: 0.523438] [A loss: 0.765637, acc: 0.351562]\n",
"6018: [D loss: 0.710814, acc: 0.498047] [A loss: 0.872947, acc: 0.136719]\n",
"6019: [D loss: 0.695774, acc: 0.552734] [A loss: 0.787829, acc: 0.382812]\n",
"6020: [D loss: 0.704790, acc: 0.544922] [A loss: 0.970887, acc: 0.085938]\n",
"6021: [D loss: 0.700149, acc: 0.513672] [A loss: 0.723202, acc: 0.414062]\n",
"6022: [D loss: 0.705432, acc: 0.525391] [A loss: 0.890867, acc: 0.160156]\n",
"6023: [D loss: 0.701137, acc: 0.533203] [A loss: 0.832841, acc: 0.238281]\n",
"6024: [D loss: 0.706917, acc: 0.519531] [A loss: 0.737513, acc: 0.414062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6025: [D loss: 0.698132, acc: 0.546875] [A loss: 0.972199, acc: 0.093750]\n",
"6026: [D loss: 0.707980, acc: 0.531250] [A loss: 0.728555, acc: 0.472656]\n",
"6027: [D loss: 0.708194, acc: 0.492188] [A loss: 0.930247, acc: 0.082031]\n",
"6028: [D loss: 0.690692, acc: 0.527344] [A loss: 0.709291, acc: 0.445312]\n",
"6029: [D loss: 0.709545, acc: 0.515625] [A loss: 0.839055, acc: 0.253906]\n",
"6030: [D loss: 0.688006, acc: 0.546875] [A loss: 0.746281, acc: 0.417969]\n",
"6031: [D loss: 0.717839, acc: 0.488281] [A loss: 0.824535, acc: 0.265625]\n",
"6032: [D loss: 0.692515, acc: 0.535156] [A loss: 0.883166, acc: 0.152344]\n",
"6033: [D loss: 0.698969, acc: 0.537109] [A loss: 0.785390, acc: 0.343750]\n",
"6034: [D loss: 0.691883, acc: 0.548828] [A loss: 0.902726, acc: 0.132812]\n",
"6035: [D loss: 0.687059, acc: 0.554688] [A loss: 0.760130, acc: 0.402344]\n",
"6036: [D loss: 0.709615, acc: 0.525391] [A loss: 0.928492, acc: 0.128906]\n",
"6037: [D loss: 0.697137, acc: 0.525391] [A loss: 0.760895, acc: 0.367188]\n",
"6038: [D loss: 0.702923, acc: 0.523438] [A loss: 0.939068, acc: 0.074219]\n",
"6039: [D loss: 0.695126, acc: 0.517578] [A loss: 0.661134, acc: 0.613281]\n",
"6040: [D loss: 0.714108, acc: 0.505859] [A loss: 0.948344, acc: 0.128906]\n",
"6041: [D loss: 0.707369, acc: 0.515625] [A loss: 0.688315, acc: 0.515625]\n",
"6042: [D loss: 0.712135, acc: 0.519531] [A loss: 0.870910, acc: 0.171875]\n",
"6043: [D loss: 0.710708, acc: 0.513672] [A loss: 0.744205, acc: 0.414062]\n",
"6044: [D loss: 0.704491, acc: 0.525391] [A loss: 0.896163, acc: 0.175781]\n",
"6045: [D loss: 0.697373, acc: 0.535156] [A loss: 0.744611, acc: 0.390625]\n",
"6046: [D loss: 0.716876, acc: 0.509766] [A loss: 0.892886, acc: 0.187500]\n",
"6047: [D loss: 0.693404, acc: 0.523438] [A loss: 0.759402, acc: 0.367188]\n",
"6048: [D loss: 0.698306, acc: 0.548828] [A loss: 0.856835, acc: 0.183594]\n",
"6049: [D loss: 0.690160, acc: 0.568359] [A loss: 0.778521, acc: 0.308594]\n",
"6050: [D loss: 0.710950, acc: 0.511719] [A loss: 0.866616, acc: 0.167969]\n",
"6051: [D loss: 0.696207, acc: 0.525391] [A loss: 0.730801, acc: 0.433594]\n",
"6052: [D loss: 0.697090, acc: 0.533203] [A loss: 0.867613, acc: 0.156250]\n",
"6053: [D loss: 0.691405, acc: 0.542969] [A loss: 0.766384, acc: 0.332031]\n",
"6054: [D loss: 0.720734, acc: 0.464844] [A loss: 0.913893, acc: 0.144531]\n",
"6055: [D loss: 0.696877, acc: 0.511719] [A loss: 0.741665, acc: 0.437500]\n",
"6056: [D loss: 0.707287, acc: 0.500000] [A loss: 0.864916, acc: 0.183594]\n",
"6057: [D loss: 0.696637, acc: 0.531250] [A loss: 0.759404, acc: 0.355469]\n",
"6058: [D loss: 0.701184, acc: 0.542969] [A loss: 0.958484, acc: 0.105469]\n",
"6059: [D loss: 0.697344, acc: 0.517578] [A loss: 0.803768, acc: 0.289062]\n",
"6060: [D loss: 0.698579, acc: 0.558594] [A loss: 0.807593, acc: 0.253906]\n",
"6061: [D loss: 0.695982, acc: 0.529297] [A loss: 0.765187, acc: 0.382812]\n",
"6062: [D loss: 0.706401, acc: 0.521484] [A loss: 0.883447, acc: 0.171875]\n",
"6063: [D loss: 0.691626, acc: 0.558594] [A loss: 0.839752, acc: 0.246094]\n",
"6064: [D loss: 0.692668, acc: 0.531250] [A loss: 0.783900, acc: 0.304688]\n",
"6065: [D loss: 0.710174, acc: 0.500000] [A loss: 0.851001, acc: 0.187500]\n",
"6066: [D loss: 0.699353, acc: 0.531250] [A loss: 0.913810, acc: 0.164062]\n",
"6067: [D loss: 0.690031, acc: 0.542969] [A loss: 0.780143, acc: 0.320312]\n",
"6068: [D loss: 0.713392, acc: 0.523438] [A loss: 0.898162, acc: 0.148438]\n",
"6069: [D loss: 0.699445, acc: 0.515625] [A loss: 0.771163, acc: 0.351562]\n",
"6070: [D loss: 0.698028, acc: 0.548828] [A loss: 0.959718, acc: 0.132812]\n",
"6071: [D loss: 0.681591, acc: 0.556641] [A loss: 0.702427, acc: 0.507812]\n",
"6072: [D loss: 0.713951, acc: 0.509766] [A loss: 0.902641, acc: 0.128906]\n",
"6073: [D loss: 0.711200, acc: 0.500000] [A loss: 0.683132, acc: 0.582031]\n",
"6074: [D loss: 0.726310, acc: 0.503906] [A loss: 1.010492, acc: 0.074219]\n",
"6075: [D loss: 0.697161, acc: 0.505859] [A loss: 0.717152, acc: 0.464844]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6076: [D loss: 0.718048, acc: 0.503906] [A loss: 0.889065, acc: 0.167969]\n",
"6077: [D loss: 0.695062, acc: 0.511719] [A loss: 0.742938, acc: 0.414062]\n",
"6078: [D loss: 0.699066, acc: 0.533203] [A loss: 0.935194, acc: 0.156250]\n",
"6079: [D loss: 0.691052, acc: 0.541016] [A loss: 0.767364, acc: 0.375000]\n",
"6080: [D loss: 0.706795, acc: 0.509766] [A loss: 0.897472, acc: 0.148438]\n",
"6081: [D loss: 0.697903, acc: 0.533203] [A loss: 0.713709, acc: 0.488281]\n",
"6082: [D loss: 0.704126, acc: 0.513672] [A loss: 0.821905, acc: 0.261719]\n",
"6083: [D loss: 0.701259, acc: 0.519531] [A loss: 0.810083, acc: 0.316406]\n",
"6084: [D loss: 0.714752, acc: 0.486328] [A loss: 0.774111, acc: 0.304688]\n",
"6085: [D loss: 0.714856, acc: 0.500000] [A loss: 0.822657, acc: 0.242188]\n",
"6086: [D loss: 0.715840, acc: 0.498047] [A loss: 0.905344, acc: 0.144531]\n",
"6087: [D loss: 0.693470, acc: 0.529297] [A loss: 0.754155, acc: 0.382812]\n",
"6088: [D loss: 0.697671, acc: 0.527344] [A loss: 0.866792, acc: 0.152344]\n",
"6089: [D loss: 0.682324, acc: 0.554688] [A loss: 0.738129, acc: 0.390625]\n",
"6090: [D loss: 0.700652, acc: 0.523438] [A loss: 0.895516, acc: 0.187500]\n",
"6091: [D loss: 0.699411, acc: 0.519531] [A loss: 0.749356, acc: 0.390625]\n",
"6092: [D loss: 0.692859, acc: 0.531250] [A loss: 0.822334, acc: 0.273438]\n",
"6093: [D loss: 0.687484, acc: 0.529297] [A loss: 0.787798, acc: 0.285156]\n",
"6094: [D loss: 0.698004, acc: 0.529297] [A loss: 0.820274, acc: 0.285156]\n",
"6095: [D loss: 0.706800, acc: 0.525391] [A loss: 0.788401, acc: 0.300781]\n",
"6096: [D loss: 0.707134, acc: 0.525391] [A loss: 0.849997, acc: 0.164062]\n",
"6097: [D loss: 0.695435, acc: 0.523438] [A loss: 0.817216, acc: 0.289062]\n",
"6098: [D loss: 0.708394, acc: 0.500000] [A loss: 0.869022, acc: 0.152344]\n",
"6099: [D loss: 0.686548, acc: 0.529297] [A loss: 0.824357, acc: 0.269531]\n",
"6100: [D loss: 0.706775, acc: 0.513672] [A loss: 0.972570, acc: 0.074219]\n",
"6101: [D loss: 0.719931, acc: 0.494141] [A loss: 0.700298, acc: 0.578125]\n",
"6102: [D loss: 0.718922, acc: 0.525391] [A loss: 1.031158, acc: 0.058594]\n",
"6103: [D loss: 0.690474, acc: 0.539062] [A loss: 0.675484, acc: 0.531250]\n",
"6104: [D loss: 0.753533, acc: 0.490234] [A loss: 0.885830, acc: 0.136719]\n",
"6105: [D loss: 0.687506, acc: 0.539062] [A loss: 0.799279, acc: 0.277344]\n",
"6106: [D loss: 0.698462, acc: 0.537109] [A loss: 0.884570, acc: 0.179688]\n",
"6107: [D loss: 0.693843, acc: 0.529297] [A loss: 0.728712, acc: 0.429688]\n",
"6108: [D loss: 0.711755, acc: 0.525391] [A loss: 0.977511, acc: 0.136719]\n",
"6109: [D loss: 0.697967, acc: 0.521484] [A loss: 0.700579, acc: 0.507812]\n",
"6110: [D loss: 0.718614, acc: 0.511719] [A loss: 0.766996, acc: 0.351562]\n",
"6111: [D loss: 0.696661, acc: 0.550781] [A loss: 0.873722, acc: 0.187500]\n",
"6112: [D loss: 0.702310, acc: 0.521484] [A loss: 0.740499, acc: 0.433594]\n",
"6113: [D loss: 0.693241, acc: 0.527344] [A loss: 0.963549, acc: 0.109375]\n",
"6114: [D loss: 0.697124, acc: 0.529297] [A loss: 0.640207, acc: 0.648438]\n",
"6115: [D loss: 0.730268, acc: 0.513672] [A loss: 0.950388, acc: 0.085938]\n",
"6116: [D loss: 0.701152, acc: 0.501953] [A loss: 0.741996, acc: 0.406250]\n",
"6117: [D loss: 0.718118, acc: 0.541016] [A loss: 0.863271, acc: 0.199219]\n",
"6118: [D loss: 0.686301, acc: 0.560547] [A loss: 0.696867, acc: 0.476562]\n",
"6119: [D loss: 0.707151, acc: 0.548828] [A loss: 0.839550, acc: 0.238281]\n",
"6120: [D loss: 0.701728, acc: 0.517578] [A loss: 0.762736, acc: 0.367188]\n",
"6121: [D loss: 0.713514, acc: 0.496094] [A loss: 0.785933, acc: 0.324219]\n",
"6122: [D loss: 0.697325, acc: 0.496094] [A loss: 0.818997, acc: 0.238281]\n",
"6123: [D loss: 0.707745, acc: 0.505859] [A loss: 0.788962, acc: 0.289062]\n",
"6124: [D loss: 0.700131, acc: 0.525391] [A loss: 0.821923, acc: 0.253906]\n",
"6125: [D loss: 0.693827, acc: 0.505859] [A loss: 0.780625, acc: 0.292969]\n",
"6126: [D loss: 0.710790, acc: 0.500000] [A loss: 0.820615, acc: 0.238281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6127: [D loss: 0.694073, acc: 0.558594] [A loss: 0.739531, acc: 0.417969]\n",
"6128: [D loss: 0.703888, acc: 0.521484] [A loss: 0.870948, acc: 0.230469]\n",
"6129: [D loss: 0.701851, acc: 0.515625] [A loss: 0.731916, acc: 0.410156]\n",
"6130: [D loss: 0.703911, acc: 0.525391] [A loss: 0.813036, acc: 0.285156]\n",
"6131: [D loss: 0.704546, acc: 0.501953] [A loss: 0.834652, acc: 0.238281]\n",
"6132: [D loss: 0.702992, acc: 0.511719] [A loss: 0.813338, acc: 0.253906]\n",
"6133: [D loss: 0.700549, acc: 0.523438] [A loss: 0.795796, acc: 0.304688]\n",
"6134: [D loss: 0.710397, acc: 0.501953] [A loss: 0.871477, acc: 0.164062]\n",
"6135: [D loss: 0.687104, acc: 0.546875] [A loss: 0.744703, acc: 0.398438]\n",
"6136: [D loss: 0.698415, acc: 0.535156] [A loss: 0.903305, acc: 0.105469]\n",
"6137: [D loss: 0.688872, acc: 0.554688] [A loss: 0.758363, acc: 0.414062]\n",
"6138: [D loss: 0.710917, acc: 0.523438] [A loss: 0.963302, acc: 0.097656]\n",
"6139: [D loss: 0.710448, acc: 0.492188] [A loss: 0.803068, acc: 0.394531]\n",
"6140: [D loss: 0.701670, acc: 0.500000] [A loss: 0.791325, acc: 0.324219]\n",
"6141: [D loss: 0.704315, acc: 0.511719] [A loss: 0.823333, acc: 0.230469]\n",
"6142: [D loss: 0.691147, acc: 0.541016] [A loss: 0.809548, acc: 0.304688]\n",
"6143: [D loss: 0.696730, acc: 0.537109] [A loss: 0.824880, acc: 0.246094]\n",
"6144: [D loss: 0.699441, acc: 0.511719] [A loss: 0.744473, acc: 0.468750]\n",
"6145: [D loss: 0.709080, acc: 0.507812] [A loss: 0.894592, acc: 0.203125]\n",
"6146: [D loss: 0.698617, acc: 0.511719] [A loss: 0.738856, acc: 0.421875]\n",
"6147: [D loss: 0.717093, acc: 0.515625] [A loss: 0.960585, acc: 0.070312]\n",
"6148: [D loss: 0.700122, acc: 0.498047] [A loss: 0.687642, acc: 0.515625]\n",
"6149: [D loss: 0.742689, acc: 0.519531] [A loss: 0.989856, acc: 0.082031]\n",
"6150: [D loss: 0.704787, acc: 0.486328] [A loss: 0.702536, acc: 0.531250]\n",
"6151: [D loss: 0.733139, acc: 0.474609] [A loss: 0.925626, acc: 0.171875]\n",
"6152: [D loss: 0.689510, acc: 0.544922] [A loss: 0.813125, acc: 0.265625]\n",
"6153: [D loss: 0.688756, acc: 0.552734] [A loss: 0.876053, acc: 0.195312]\n",
"6154: [D loss: 0.700700, acc: 0.519531] [A loss: 0.740495, acc: 0.429688]\n",
"6155: [D loss: 0.723132, acc: 0.472656] [A loss: 0.850624, acc: 0.214844]\n",
"6156: [D loss: 0.710217, acc: 0.527344] [A loss: 0.733553, acc: 0.394531]\n",
"6157: [D loss: 0.708072, acc: 0.515625] [A loss: 0.854579, acc: 0.179688]\n",
"6158: [D loss: 0.700694, acc: 0.496094] [A loss: 0.773071, acc: 0.347656]\n",
"6159: [D loss: 0.704463, acc: 0.509766] [A loss: 0.964840, acc: 0.101562]\n",
"6160: [D loss: 0.700614, acc: 0.529297] [A loss: 0.757459, acc: 0.382812]\n",
"6161: [D loss: 0.710377, acc: 0.511719] [A loss: 0.783103, acc: 0.312500]\n",
"6162: [D loss: 0.690071, acc: 0.533203] [A loss: 0.871300, acc: 0.195312]\n",
"6163: [D loss: 0.688162, acc: 0.556641] [A loss: 0.756020, acc: 0.402344]\n",
"6164: [D loss: 0.711944, acc: 0.515625] [A loss: 0.894421, acc: 0.164062]\n",
"6165: [D loss: 0.704687, acc: 0.523438] [A loss: 0.769932, acc: 0.402344]\n",
"6166: [D loss: 0.704154, acc: 0.509766] [A loss: 0.896865, acc: 0.136719]\n",
"6167: [D loss: 0.709904, acc: 0.500000] [A loss: 0.760324, acc: 0.390625]\n",
"6168: [D loss: 0.707189, acc: 0.544922] [A loss: 0.852519, acc: 0.222656]\n",
"6169: [D loss: 0.693639, acc: 0.546875] [A loss: 0.827541, acc: 0.234375]\n",
"6170: [D loss: 0.703923, acc: 0.541016] [A loss: 0.894438, acc: 0.152344]\n",
"6171: [D loss: 0.698230, acc: 0.513672] [A loss: 0.746834, acc: 0.371094]\n",
"6172: [D loss: 0.715532, acc: 0.494141] [A loss: 0.999289, acc: 0.066406]\n",
"6173: [D loss: 0.710702, acc: 0.478516] [A loss: 0.661555, acc: 0.644531]\n",
"6174: [D loss: 0.716100, acc: 0.492188] [A loss: 0.889836, acc: 0.125000]\n",
"6175: [D loss: 0.693027, acc: 0.550781] [A loss: 0.724354, acc: 0.421875]\n",
"6176: [D loss: 0.716583, acc: 0.523438] [A loss: 0.934246, acc: 0.109375]\n",
"6177: [D loss: 0.692965, acc: 0.537109] [A loss: 0.689957, acc: 0.550781]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6178: [D loss: 0.715042, acc: 0.507812] [A loss: 0.881565, acc: 0.132812]\n",
"6179: [D loss: 0.693896, acc: 0.546875] [A loss: 0.714742, acc: 0.468750]\n",
"6180: [D loss: 0.712248, acc: 0.515625] [A loss: 0.874752, acc: 0.187500]\n",
"6181: [D loss: 0.684628, acc: 0.568359] [A loss: 0.751124, acc: 0.390625]\n",
"6182: [D loss: 0.701607, acc: 0.511719] [A loss: 0.933822, acc: 0.097656]\n",
"6183: [D loss: 0.704425, acc: 0.492188] [A loss: 0.753433, acc: 0.363281]\n",
"6184: [D loss: 0.706406, acc: 0.517578] [A loss: 0.856888, acc: 0.207031]\n",
"6185: [D loss: 0.698404, acc: 0.513672] [A loss: 0.737211, acc: 0.410156]\n",
"6186: [D loss: 0.711728, acc: 0.505859] [A loss: 0.979681, acc: 0.097656]\n",
"6187: [D loss: 0.700480, acc: 0.523438] [A loss: 0.688982, acc: 0.558594]\n",
"6188: [D loss: 0.712498, acc: 0.529297] [A loss: 0.810983, acc: 0.257812]\n",
"6189: [D loss: 0.704558, acc: 0.482422] [A loss: 0.842703, acc: 0.195312]\n",
"6190: [D loss: 0.696103, acc: 0.529297] [A loss: 0.796633, acc: 0.363281]\n",
"6191: [D loss: 0.693251, acc: 0.544922] [A loss: 0.860227, acc: 0.218750]\n",
"6192: [D loss: 0.703129, acc: 0.519531] [A loss: 0.760851, acc: 0.308594]\n",
"6193: [D loss: 0.705581, acc: 0.519531] [A loss: 0.823985, acc: 0.238281]\n",
"6194: [D loss: 0.700801, acc: 0.523438] [A loss: 0.716618, acc: 0.484375]\n",
"6195: [D loss: 0.712112, acc: 0.503906] [A loss: 0.858178, acc: 0.195312]\n",
"6196: [D loss: 0.691842, acc: 0.552734] [A loss: 0.743066, acc: 0.386719]\n",
"6197: [D loss: 0.702458, acc: 0.544922] [A loss: 0.800817, acc: 0.343750]\n",
"6198: [D loss: 0.702729, acc: 0.517578] [A loss: 0.881115, acc: 0.207031]\n",
"6199: [D loss: 0.693620, acc: 0.531250] [A loss: 0.778385, acc: 0.347656]\n",
"6200: [D loss: 0.709481, acc: 0.511719] [A loss: 1.039980, acc: 0.046875]\n",
"6201: [D loss: 0.681403, acc: 0.572266] [A loss: 0.667355, acc: 0.605469]\n",
"6202: [D loss: 0.717525, acc: 0.513672] [A loss: 0.915209, acc: 0.140625]\n",
"6203: [D loss: 0.704462, acc: 0.501953] [A loss: 0.741719, acc: 0.414062]\n",
"6204: [D loss: 0.692932, acc: 0.554688] [A loss: 0.860215, acc: 0.210938]\n",
"6205: [D loss: 0.697262, acc: 0.525391] [A loss: 0.672314, acc: 0.566406]\n",
"6206: [D loss: 0.705712, acc: 0.519531] [A loss: 0.836366, acc: 0.218750]\n",
"6207: [D loss: 0.700941, acc: 0.511719] [A loss: 0.745023, acc: 0.414062]\n",
"6208: [D loss: 0.697338, acc: 0.542969] [A loss: 0.835600, acc: 0.257812]\n",
"6209: [D loss: 0.690711, acc: 0.560547] [A loss: 0.876726, acc: 0.226562]\n",
"6210: [D loss: 0.712084, acc: 0.490234] [A loss: 0.777034, acc: 0.304688]\n",
"6211: [D loss: 0.708552, acc: 0.501953] [A loss: 0.850559, acc: 0.187500]\n",
"6212: [D loss: 0.691172, acc: 0.546875] [A loss: 0.765530, acc: 0.324219]\n",
"6213: [D loss: 0.700373, acc: 0.513672] [A loss: 0.826768, acc: 0.222656]\n",
"6214: [D loss: 0.702960, acc: 0.503906] [A loss: 0.805885, acc: 0.281250]\n",
"6215: [D loss: 0.684003, acc: 0.558594] [A loss: 0.823335, acc: 0.289062]\n",
"6216: [D loss: 0.700976, acc: 0.533203] [A loss: 0.800508, acc: 0.308594]\n",
"6217: [D loss: 0.715143, acc: 0.503906] [A loss: 0.954451, acc: 0.113281]\n",
"6218: [D loss: 0.698721, acc: 0.519531] [A loss: 0.684608, acc: 0.519531]\n",
"6219: [D loss: 0.696496, acc: 0.519531] [A loss: 0.953570, acc: 0.136719]\n",
"6220: [D loss: 0.705950, acc: 0.513672] [A loss: 0.759406, acc: 0.347656]\n",
"6221: [D loss: 0.697592, acc: 0.531250] [A loss: 0.918569, acc: 0.144531]\n",
"6222: [D loss: 0.700760, acc: 0.537109] [A loss: 0.783025, acc: 0.328125]\n",
"6223: [D loss: 0.701081, acc: 0.537109] [A loss: 0.846047, acc: 0.234375]\n",
"6224: [D loss: 0.710468, acc: 0.484375] [A loss: 0.729867, acc: 0.394531]\n",
"6225: [D loss: 0.685943, acc: 0.552734] [A loss: 0.893977, acc: 0.175781]\n",
"6226: [D loss: 0.693858, acc: 0.529297] [A loss: 0.760994, acc: 0.371094]\n",
"6227: [D loss: 0.695371, acc: 0.546875] [A loss: 0.782022, acc: 0.316406]\n",
"6228: [D loss: 0.708173, acc: 0.503906] [A loss: 0.962685, acc: 0.117188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6229: [D loss: 0.704064, acc: 0.513672] [A loss: 0.684796, acc: 0.574219]\n",
"6230: [D loss: 0.720999, acc: 0.517578] [A loss: 0.923535, acc: 0.109375]\n",
"6231: [D loss: 0.718237, acc: 0.480469] [A loss: 0.756474, acc: 0.421875]\n",
"6232: [D loss: 0.709763, acc: 0.478516] [A loss: 0.828841, acc: 0.250000]\n",
"6233: [D loss: 0.689749, acc: 0.539062] [A loss: 0.746763, acc: 0.398438]\n",
"6234: [D loss: 0.697061, acc: 0.533203] [A loss: 0.897623, acc: 0.144531]\n",
"6235: [D loss: 0.709600, acc: 0.488281] [A loss: 0.694594, acc: 0.500000]\n",
"6236: [D loss: 0.718826, acc: 0.503906] [A loss: 0.905272, acc: 0.148438]\n",
"6237: [D loss: 0.709716, acc: 0.490234] [A loss: 0.722168, acc: 0.441406]\n",
"6238: [D loss: 0.712285, acc: 0.513672] [A loss: 0.850856, acc: 0.187500]\n",
"6239: [D loss: 0.687832, acc: 0.529297] [A loss: 0.762709, acc: 0.355469]\n",
"6240: [D loss: 0.697626, acc: 0.542969] [A loss: 0.828419, acc: 0.242188]\n",
"6241: [D loss: 0.681350, acc: 0.568359] [A loss: 0.772254, acc: 0.320312]\n",
"6242: [D loss: 0.712364, acc: 0.511719] [A loss: 0.848560, acc: 0.210938]\n",
"6243: [D loss: 0.700195, acc: 0.503906] [A loss: 0.745018, acc: 0.386719]\n",
"6244: [D loss: 0.704424, acc: 0.513672] [A loss: 0.900886, acc: 0.171875]\n",
"6245: [D loss: 0.698800, acc: 0.507812] [A loss: 0.738364, acc: 0.445312]\n",
"6246: [D loss: 0.700354, acc: 0.527344] [A loss: 0.833131, acc: 0.261719]\n",
"6247: [D loss: 0.709827, acc: 0.507812] [A loss: 0.773481, acc: 0.351562]\n",
"6248: [D loss: 0.701495, acc: 0.544922] [A loss: 0.981388, acc: 0.101562]\n",
"6249: [D loss: 0.701991, acc: 0.501953] [A loss: 0.674889, acc: 0.566406]\n",
"6250: [D loss: 0.719755, acc: 0.535156] [A loss: 0.930231, acc: 0.117188]\n",
"6251: [D loss: 0.698169, acc: 0.525391] [A loss: 0.679903, acc: 0.550781]\n",
"6252: [D loss: 0.725741, acc: 0.507812] [A loss: 1.046218, acc: 0.066406]\n",
"6253: [D loss: 0.709996, acc: 0.533203] [A loss: 0.690451, acc: 0.558594]\n",
"6254: [D loss: 0.730932, acc: 0.494141] [A loss: 0.833742, acc: 0.230469]\n",
"6255: [D loss: 0.687557, acc: 0.542969] [A loss: 0.794906, acc: 0.320312]\n",
"6256: [D loss: 0.690487, acc: 0.562500] [A loss: 0.734136, acc: 0.417969]\n",
"6257: [D loss: 0.707515, acc: 0.531250] [A loss: 0.922629, acc: 0.113281]\n",
"6258: [D loss: 0.686674, acc: 0.558594] [A loss: 0.699540, acc: 0.519531]\n",
"6259: [D loss: 0.701397, acc: 0.552734] [A loss: 0.880731, acc: 0.187500]\n",
"6260: [D loss: 0.678724, acc: 0.576172] [A loss: 0.722761, acc: 0.425781]\n",
"6261: [D loss: 0.723807, acc: 0.468750] [A loss: 0.854968, acc: 0.222656]\n",
"6262: [D loss: 0.695187, acc: 0.531250] [A loss: 0.741341, acc: 0.425781]\n",
"6263: [D loss: 0.698012, acc: 0.539062] [A loss: 0.831920, acc: 0.246094]\n",
"6264: [D loss: 0.692328, acc: 0.541016] [A loss: 0.803008, acc: 0.300781]\n",
"6265: [D loss: 0.697624, acc: 0.550781] [A loss: 0.805649, acc: 0.273438]\n",
"6266: [D loss: 0.706644, acc: 0.494141] [A loss: 0.760633, acc: 0.386719]\n",
"6267: [D loss: 0.704596, acc: 0.523438] [A loss: 0.921061, acc: 0.152344]\n",
"6268: [D loss: 0.692814, acc: 0.523438] [A loss: 0.724665, acc: 0.433594]\n",
"6269: [D loss: 0.702289, acc: 0.507812] [A loss: 0.790324, acc: 0.308594]\n",
"6270: [D loss: 0.701565, acc: 0.517578] [A loss: 0.798039, acc: 0.328125]\n",
"6271: [D loss: 0.698631, acc: 0.539062] [A loss: 0.816505, acc: 0.242188]\n",
"6272: [D loss: 0.694766, acc: 0.533203] [A loss: 0.840132, acc: 0.261719]\n",
"6273: [D loss: 0.691808, acc: 0.552734] [A loss: 0.773458, acc: 0.347656]\n",
"6274: [D loss: 0.703674, acc: 0.500000] [A loss: 0.893737, acc: 0.164062]\n",
"6275: [D loss: 0.696945, acc: 0.511719] [A loss: 0.775193, acc: 0.343750]\n",
"6276: [D loss: 0.714611, acc: 0.492188] [A loss: 0.786009, acc: 0.320312]\n",
"6277: [D loss: 0.703378, acc: 0.525391] [A loss: 0.832719, acc: 0.214844]\n",
"6278: [D loss: 0.682294, acc: 0.560547] [A loss: 0.787562, acc: 0.328125]\n",
"6279: [D loss: 0.700404, acc: 0.531250] [A loss: 0.899797, acc: 0.148438]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6280: [D loss: 0.699996, acc: 0.533203] [A loss: 0.762454, acc: 0.441406]\n",
"6281: [D loss: 0.704656, acc: 0.521484] [A loss: 0.892382, acc: 0.187500]\n",
"6282: [D loss: 0.698969, acc: 0.513672] [A loss: 0.716287, acc: 0.441406]\n",
"6283: [D loss: 0.720662, acc: 0.484375] [A loss: 0.919583, acc: 0.128906]\n",
"6284: [D loss: 0.689289, acc: 0.533203] [A loss: 0.682922, acc: 0.546875]\n",
"6285: [D loss: 0.720047, acc: 0.501953] [A loss: 0.898376, acc: 0.183594]\n",
"6286: [D loss: 0.701980, acc: 0.527344] [A loss: 0.716658, acc: 0.484375]\n",
"6287: [D loss: 0.721396, acc: 0.488281] [A loss: 0.859568, acc: 0.230469]\n",
"6288: [D loss: 0.700901, acc: 0.492188] [A loss: 0.786201, acc: 0.324219]\n",
"6289: [D loss: 0.714692, acc: 0.500000] [A loss: 0.850616, acc: 0.218750]\n",
"6290: [D loss: 0.698408, acc: 0.519531] [A loss: 0.825010, acc: 0.253906]\n",
"6291: [D loss: 0.702502, acc: 0.521484] [A loss: 0.928835, acc: 0.121094]\n",
"6292: [D loss: 0.701684, acc: 0.541016] [A loss: 0.704885, acc: 0.496094]\n",
"6293: [D loss: 0.704136, acc: 0.529297] [A loss: 0.927012, acc: 0.101562]\n",
"6294: [D loss: 0.701638, acc: 0.523438] [A loss: 0.743142, acc: 0.445312]\n",
"6295: [D loss: 0.712991, acc: 0.511719] [A loss: 0.821800, acc: 0.257812]\n",
"6296: [D loss: 0.694022, acc: 0.544922] [A loss: 0.797582, acc: 0.304688]\n",
"6297: [D loss: 0.692827, acc: 0.544922] [A loss: 0.793935, acc: 0.339844]\n",
"6298: [D loss: 0.707857, acc: 0.503906] [A loss: 0.891949, acc: 0.191406]\n",
"6299: [D loss: 0.697746, acc: 0.525391] [A loss: 0.725541, acc: 0.417969]\n",
"6300: [D loss: 0.715863, acc: 0.496094] [A loss: 0.904316, acc: 0.125000]\n",
"6301: [D loss: 0.705542, acc: 0.488281] [A loss: 0.729354, acc: 0.429688]\n",
"6302: [D loss: 0.703854, acc: 0.527344] [A loss: 0.854691, acc: 0.191406]\n",
"6303: [D loss: 0.699806, acc: 0.505859] [A loss: 0.769923, acc: 0.343750]\n",
"6304: [D loss: 0.691118, acc: 0.542969] [A loss: 0.789930, acc: 0.339844]\n",
"6305: [D loss: 0.694644, acc: 0.529297] [A loss: 0.987894, acc: 0.105469]\n",
"6306: [D loss: 0.694749, acc: 0.537109] [A loss: 0.721224, acc: 0.425781]\n",
"6307: [D loss: 0.712045, acc: 0.501953] [A loss: 0.944860, acc: 0.121094]\n",
"6308: [D loss: 0.714323, acc: 0.494141] [A loss: 0.744761, acc: 0.347656]\n",
"6309: [D loss: 0.702369, acc: 0.511719] [A loss: 0.832081, acc: 0.250000]\n",
"6310: [D loss: 0.692270, acc: 0.519531] [A loss: 0.803082, acc: 0.277344]\n",
"6311: [D loss: 0.701229, acc: 0.513672] [A loss: 0.887262, acc: 0.207031]\n",
"6312: [D loss: 0.684509, acc: 0.548828] [A loss: 0.775358, acc: 0.335938]\n",
"6313: [D loss: 0.692327, acc: 0.523438] [A loss: 0.859450, acc: 0.222656]\n",
"6314: [D loss: 0.706090, acc: 0.525391] [A loss: 0.703646, acc: 0.468750]\n",
"6315: [D loss: 0.704648, acc: 0.505859] [A loss: 0.874821, acc: 0.167969]\n",
"6316: [D loss: 0.693892, acc: 0.544922] [A loss: 0.743600, acc: 0.378906]\n",
"6317: [D loss: 0.708457, acc: 0.519531] [A loss: 0.860453, acc: 0.207031]\n",
"6318: [D loss: 0.701113, acc: 0.515625] [A loss: 0.792877, acc: 0.339844]\n",
"6319: [D loss: 0.694082, acc: 0.527344] [A loss: 0.857731, acc: 0.238281]\n",
"6320: [D loss: 0.716671, acc: 0.496094] [A loss: 0.756958, acc: 0.386719]\n",
"6321: [D loss: 0.714690, acc: 0.492188] [A loss: 0.852724, acc: 0.191406]\n",
"6322: [D loss: 0.703996, acc: 0.537109] [A loss: 0.833500, acc: 0.246094]\n",
"6323: [D loss: 0.708233, acc: 0.503906] [A loss: 0.800617, acc: 0.328125]\n",
"6324: [D loss: 0.703120, acc: 0.517578] [A loss: 0.799558, acc: 0.285156]\n",
"6325: [D loss: 0.693617, acc: 0.537109] [A loss: 0.753548, acc: 0.386719]\n",
"6326: [D loss: 0.712201, acc: 0.498047] [A loss: 0.987120, acc: 0.171875]\n",
"6327: [D loss: 0.692502, acc: 0.541016] [A loss: 0.721413, acc: 0.480469]\n",
"6328: [D loss: 0.741144, acc: 0.468750] [A loss: 0.991551, acc: 0.101562]\n",
"6329: [D loss: 0.697736, acc: 0.529297] [A loss: 0.697293, acc: 0.531250]\n",
"6330: [D loss: 0.726294, acc: 0.484375] [A loss: 0.879756, acc: 0.164062]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6331: [D loss: 0.686387, acc: 0.546875] [A loss: 0.733272, acc: 0.425781]\n",
"6332: [D loss: 0.728072, acc: 0.496094] [A loss: 0.937724, acc: 0.121094]\n",
"6333: [D loss: 0.707982, acc: 0.503906] [A loss: 0.715603, acc: 0.464844]\n",
"6334: [D loss: 0.719931, acc: 0.503906] [A loss: 0.860309, acc: 0.187500]\n",
"6335: [D loss: 0.695641, acc: 0.531250] [A loss: 0.799892, acc: 0.281250]\n",
"6336: [D loss: 0.702332, acc: 0.519531] [A loss: 0.797031, acc: 0.296875]\n",
"6337: [D loss: 0.695256, acc: 0.523438] [A loss: 0.795563, acc: 0.289062]\n",
"6338: [D loss: 0.700143, acc: 0.525391] [A loss: 0.812713, acc: 0.296875]\n",
"6339: [D loss: 0.701752, acc: 0.535156] [A loss: 0.746281, acc: 0.410156]\n",
"6340: [D loss: 0.716543, acc: 0.521484] [A loss: 0.965201, acc: 0.070312]\n",
"6341: [D loss: 0.708057, acc: 0.498047] [A loss: 0.631989, acc: 0.675781]\n",
"6342: [D loss: 0.743838, acc: 0.496094] [A loss: 0.958178, acc: 0.078125]\n",
"6343: [D loss: 0.696589, acc: 0.544922] [A loss: 0.721659, acc: 0.441406]\n",
"6344: [D loss: 0.713969, acc: 0.509766] [A loss: 0.849143, acc: 0.195312]\n",
"6345: [D loss: 0.709372, acc: 0.501953] [A loss: 0.845975, acc: 0.183594]\n",
"6346: [D loss: 0.723717, acc: 0.478516] [A loss: 0.821868, acc: 0.238281]\n",
"6347: [D loss: 0.688082, acc: 0.566406] [A loss: 0.733543, acc: 0.425781]\n",
"6348: [D loss: 0.735903, acc: 0.468750] [A loss: 0.859574, acc: 0.242188]\n",
"6349: [D loss: 0.704374, acc: 0.488281] [A loss: 0.858400, acc: 0.226562]\n",
"6350: [D loss: 0.695439, acc: 0.558594] [A loss: 0.722520, acc: 0.457031]\n",
"6351: [D loss: 0.709096, acc: 0.529297] [A loss: 0.837251, acc: 0.230469]\n",
"6352: [D loss: 0.691173, acc: 0.544922] [A loss: 0.720015, acc: 0.472656]\n",
"6353: [D loss: 0.704515, acc: 0.529297] [A loss: 0.841803, acc: 0.167969]\n",
"6354: [D loss: 0.682120, acc: 0.576172] [A loss: 0.766813, acc: 0.390625]\n",
"6355: [D loss: 0.717932, acc: 0.500000] [A loss: 0.894050, acc: 0.171875]\n",
"6356: [D loss: 0.698894, acc: 0.501953] [A loss: 0.823907, acc: 0.242188]\n",
"6357: [D loss: 0.697089, acc: 0.558594] [A loss: 0.825973, acc: 0.238281]\n",
"6358: [D loss: 0.696650, acc: 0.541016] [A loss: 0.879090, acc: 0.171875]\n",
"6359: [D loss: 0.707738, acc: 0.511719] [A loss: 0.789578, acc: 0.351562]\n",
"6360: [D loss: 0.706503, acc: 0.501953] [A loss: 0.828473, acc: 0.277344]\n",
"6361: [D loss: 0.697946, acc: 0.521484] [A loss: 0.783086, acc: 0.285156]\n",
"6362: [D loss: 0.709675, acc: 0.505859] [A loss: 0.796236, acc: 0.277344]\n",
"6363: [D loss: 0.700243, acc: 0.513672] [A loss: 0.769343, acc: 0.351562]\n",
"6364: [D loss: 0.718165, acc: 0.517578] [A loss: 1.062001, acc: 0.062500]\n",
"6365: [D loss: 0.698067, acc: 0.552734] [A loss: 0.695358, acc: 0.523438]\n",
"6366: [D loss: 0.706522, acc: 0.527344] [A loss: 0.877618, acc: 0.187500]\n",
"6367: [D loss: 0.703199, acc: 0.527344] [A loss: 0.717248, acc: 0.460938]\n",
"6368: [D loss: 0.714942, acc: 0.519531] [A loss: 0.886869, acc: 0.156250]\n",
"6369: [D loss: 0.694437, acc: 0.537109] [A loss: 0.720692, acc: 0.468750]\n",
"6370: [D loss: 0.716839, acc: 0.496094] [A loss: 0.854008, acc: 0.187500]\n",
"6371: [D loss: 0.694247, acc: 0.527344] [A loss: 0.773580, acc: 0.339844]\n",
"6372: [D loss: 0.706585, acc: 0.480469] [A loss: 0.843468, acc: 0.218750]\n",
"6373: [D loss: 0.695245, acc: 0.531250] [A loss: 0.763924, acc: 0.339844]\n",
"6374: [D loss: 0.705929, acc: 0.517578] [A loss: 1.011829, acc: 0.105469]\n",
"6375: [D loss: 0.704081, acc: 0.525391] [A loss: 0.702631, acc: 0.492188]\n",
"6376: [D loss: 0.737911, acc: 0.500000] [A loss: 0.908113, acc: 0.144531]\n",
"6377: [D loss: 0.708666, acc: 0.494141] [A loss: 0.715836, acc: 0.453125]\n",
"6378: [D loss: 0.714104, acc: 0.515625] [A loss: 0.860484, acc: 0.171875]\n",
"6379: [D loss: 0.690636, acc: 0.513672] [A loss: 0.718778, acc: 0.449219]\n",
"6380: [D loss: 0.727075, acc: 0.498047] [A loss: 0.887581, acc: 0.152344]\n",
"6381: [D loss: 0.707070, acc: 0.511719] [A loss: 0.725548, acc: 0.437500]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6382: [D loss: 0.720190, acc: 0.513672] [A loss: 0.831496, acc: 0.226562]\n",
"6383: [D loss: 0.708131, acc: 0.484375] [A loss: 0.868543, acc: 0.234375]\n",
"6384: [D loss: 0.718321, acc: 0.474609] [A loss: 0.812031, acc: 0.281250]\n",
"6385: [D loss: 0.700149, acc: 0.515625] [A loss: 0.776751, acc: 0.316406]\n",
"6386: [D loss: 0.701451, acc: 0.523438] [A loss: 0.874871, acc: 0.179688]\n",
"6387: [D loss: 0.688870, acc: 0.535156] [A loss: 0.796045, acc: 0.308594]\n",
"6388: [D loss: 0.691694, acc: 0.521484] [A loss: 0.779015, acc: 0.320312]\n",
"6389: [D loss: 0.715413, acc: 0.482422] [A loss: 0.822088, acc: 0.281250]\n",
"6390: [D loss: 0.701298, acc: 0.509766] [A loss: 0.753965, acc: 0.410156]\n",
"6391: [D loss: 0.718092, acc: 0.515625] [A loss: 0.857252, acc: 0.160156]\n",
"6392: [D loss: 0.701136, acc: 0.527344] [A loss: 0.844965, acc: 0.195312]\n",
"6393: [D loss: 0.685144, acc: 0.548828] [A loss: 0.794065, acc: 0.269531]\n",
"6394: [D loss: 0.714957, acc: 0.494141] [A loss: 0.895696, acc: 0.128906]\n",
"6395: [D loss: 0.686245, acc: 0.542969] [A loss: 0.770235, acc: 0.324219]\n",
"6396: [D loss: 0.698009, acc: 0.537109] [A loss: 0.855479, acc: 0.214844]\n",
"6397: [D loss: 0.713135, acc: 0.490234] [A loss: 0.788332, acc: 0.300781]\n",
"6398: [D loss: 0.711443, acc: 0.533203] [A loss: 0.800759, acc: 0.289062]\n",
"6399: [D loss: 0.711109, acc: 0.519531] [A loss: 0.821593, acc: 0.292969]\n",
"6400: [D loss: 0.703353, acc: 0.515625] [A loss: 0.849698, acc: 0.191406]\n",
"6401: [D loss: 0.685889, acc: 0.562500] [A loss: 0.766386, acc: 0.335938]\n",
"6402: [D loss: 0.708173, acc: 0.515625] [A loss: 0.828961, acc: 0.273438]\n",
"6403: [D loss: 0.706629, acc: 0.513672] [A loss: 0.853470, acc: 0.195312]\n",
"6404: [D loss: 0.687896, acc: 0.564453] [A loss: 0.881040, acc: 0.183594]\n",
"6405: [D loss: 0.705257, acc: 0.507812] [A loss: 0.745351, acc: 0.398438]\n",
"6406: [D loss: 0.713534, acc: 0.500000] [A loss: 0.946575, acc: 0.093750]\n",
"6407: [D loss: 0.692250, acc: 0.544922] [A loss: 0.762388, acc: 0.382812]\n",
"6408: [D loss: 0.704838, acc: 0.527344] [A loss: 0.873011, acc: 0.148438]\n",
"6409: [D loss: 0.701007, acc: 0.494141] [A loss: 0.689887, acc: 0.511719]\n",
"6410: [D loss: 0.726876, acc: 0.523438] [A loss: 0.879760, acc: 0.175781]\n",
"6411: [D loss: 0.687836, acc: 0.535156] [A loss: 0.772184, acc: 0.347656]\n",
"6412: [D loss: 0.707902, acc: 0.484375] [A loss: 0.814214, acc: 0.199219]\n",
"6413: [D loss: 0.710303, acc: 0.501953] [A loss: 0.877867, acc: 0.207031]\n",
"6414: [D loss: 0.701113, acc: 0.503906] [A loss: 0.833704, acc: 0.257812]\n",
"6415: [D loss: 0.706328, acc: 0.500000] [A loss: 0.820862, acc: 0.226562]\n",
"6416: [D loss: 0.700268, acc: 0.542969] [A loss: 0.785962, acc: 0.304688]\n",
"6417: [D loss: 0.695374, acc: 0.548828] [A loss: 0.837738, acc: 0.234375]\n",
"6418: [D loss: 0.696172, acc: 0.521484] [A loss: 0.729313, acc: 0.453125]\n",
"6419: [D loss: 0.708523, acc: 0.517578] [A loss: 1.027176, acc: 0.121094]\n",
"6420: [D loss: 0.699674, acc: 0.525391] [A loss: 0.715742, acc: 0.484375]\n",
"6421: [D loss: 0.730881, acc: 0.480469] [A loss: 0.859993, acc: 0.218750]\n",
"6422: [D loss: 0.692178, acc: 0.535156] [A loss: 0.702170, acc: 0.570312]\n",
"6423: [D loss: 0.697408, acc: 0.513672] [A loss: 0.909469, acc: 0.195312]\n",
"6424: [D loss: 0.709908, acc: 0.513672] [A loss: 0.743668, acc: 0.410156]\n",
"6425: [D loss: 0.700357, acc: 0.542969] [A loss: 0.873620, acc: 0.156250]\n",
"6426: [D loss: 0.696686, acc: 0.546875] [A loss: 0.787868, acc: 0.328125]\n",
"6427: [D loss: 0.692765, acc: 0.515625] [A loss: 0.789525, acc: 0.324219]\n",
"6428: [D loss: 0.703718, acc: 0.523438] [A loss: 0.841080, acc: 0.246094]\n",
"6429: [D loss: 0.692339, acc: 0.523438] [A loss: 0.692232, acc: 0.523438]\n",
"6430: [D loss: 0.709779, acc: 0.531250] [A loss: 0.977249, acc: 0.113281]\n",
"6431: [D loss: 0.699990, acc: 0.501953] [A loss: 0.703444, acc: 0.519531]\n",
"6432: [D loss: 0.723787, acc: 0.509766] [A loss: 0.847062, acc: 0.250000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6433: [D loss: 0.690065, acc: 0.537109] [A loss: 0.898151, acc: 0.132812]\n",
"6434: [D loss: 0.685972, acc: 0.566406] [A loss: 0.714077, acc: 0.484375]\n",
"6435: [D loss: 0.716884, acc: 0.527344] [A loss: 0.877138, acc: 0.179688]\n",
"6436: [D loss: 0.693817, acc: 0.542969] [A loss: 0.701198, acc: 0.546875]\n",
"6437: [D loss: 0.710186, acc: 0.529297] [A loss: 0.858565, acc: 0.226562]\n",
"6438: [D loss: 0.686912, acc: 0.542969] [A loss: 0.743236, acc: 0.367188]\n",
"6439: [D loss: 0.707527, acc: 0.505859] [A loss: 0.843737, acc: 0.226562]\n",
"6440: [D loss: 0.691488, acc: 0.533203] [A loss: 0.814264, acc: 0.250000]\n",
"6441: [D loss: 0.701851, acc: 0.531250] [A loss: 0.736458, acc: 0.441406]\n",
"6442: [D loss: 0.709095, acc: 0.511719] [A loss: 1.067324, acc: 0.062500]\n",
"6443: [D loss: 0.708574, acc: 0.500000] [A loss: 0.745743, acc: 0.386719]\n",
"6444: [D loss: 0.708161, acc: 0.498047] [A loss: 0.821587, acc: 0.226562]\n",
"6445: [D loss: 0.719410, acc: 0.478516] [A loss: 0.797929, acc: 0.300781]\n",
"6446: [D loss: 0.708161, acc: 0.501953] [A loss: 0.743973, acc: 0.445312]\n",
"6447: [D loss: 0.712450, acc: 0.515625] [A loss: 0.885240, acc: 0.125000]\n",
"6448: [D loss: 0.709053, acc: 0.521484] [A loss: 0.795201, acc: 0.304688]\n",
"6449: [D loss: 0.709781, acc: 0.492188] [A loss: 0.858044, acc: 0.214844]\n",
"6450: [D loss: 0.683302, acc: 0.554688] [A loss: 0.728159, acc: 0.460938]\n",
"6451: [D loss: 0.699960, acc: 0.531250] [A loss: 0.927420, acc: 0.136719]\n",
"6452: [D loss: 0.695534, acc: 0.544922] [A loss: 0.762261, acc: 0.378906]\n",
"6453: [D loss: 0.707372, acc: 0.492188] [A loss: 0.845155, acc: 0.230469]\n",
"6454: [D loss: 0.690687, acc: 0.552734] [A loss: 0.729406, acc: 0.433594]\n",
"6455: [D loss: 0.695145, acc: 0.558594] [A loss: 0.849816, acc: 0.167969]\n",
"6456: [D loss: 0.684003, acc: 0.576172] [A loss: 0.762873, acc: 0.355469]\n",
"6457: [D loss: 0.714074, acc: 0.542969] [A loss: 0.964398, acc: 0.117188]\n",
"6458: [D loss: 0.694722, acc: 0.535156] [A loss: 0.711251, acc: 0.464844]\n",
"6459: [D loss: 0.709282, acc: 0.519531] [A loss: 0.856241, acc: 0.214844]\n",
"6460: [D loss: 0.706881, acc: 0.519531] [A loss: 0.779999, acc: 0.316406]\n",
"6461: [D loss: 0.708206, acc: 0.500000] [A loss: 0.872254, acc: 0.199219]\n",
"6462: [D loss: 0.691493, acc: 0.539062] [A loss: 0.757506, acc: 0.378906]\n",
"6463: [D loss: 0.703685, acc: 0.517578] [A loss: 0.896650, acc: 0.195312]\n",
"6464: [D loss: 0.698379, acc: 0.509766] [A loss: 0.854731, acc: 0.253906]\n",
"6465: [D loss: 0.698430, acc: 0.501953] [A loss: 0.713930, acc: 0.472656]\n",
"6466: [D loss: 0.699608, acc: 0.527344] [A loss: 0.960393, acc: 0.097656]\n",
"6467: [D loss: 0.708886, acc: 0.525391] [A loss: 0.857033, acc: 0.179688]\n",
"6468: [D loss: 0.697060, acc: 0.537109] [A loss: 0.773164, acc: 0.332031]\n",
"6469: [D loss: 0.700407, acc: 0.541016] [A loss: 0.820402, acc: 0.253906]\n",
"6470: [D loss: 0.690931, acc: 0.554688] [A loss: 0.831466, acc: 0.214844]\n",
"6471: [D loss: 0.702817, acc: 0.507812] [A loss: 0.826179, acc: 0.308594]\n",
"6472: [D loss: 0.709339, acc: 0.482422] [A loss: 0.806820, acc: 0.250000]\n",
"6473: [D loss: 0.709910, acc: 0.505859] [A loss: 0.839605, acc: 0.242188]\n",
"6474: [D loss: 0.706698, acc: 0.519531] [A loss: 0.811856, acc: 0.265625]\n",
"6475: [D loss: 0.707644, acc: 0.509766] [A loss: 0.909588, acc: 0.140625]\n",
"6476: [D loss: 0.701720, acc: 0.517578] [A loss: 0.730747, acc: 0.460938]\n",
"6477: [D loss: 0.710420, acc: 0.531250] [A loss: 0.867583, acc: 0.183594]\n",
"6478: [D loss: 0.696231, acc: 0.527344] [A loss: 0.770340, acc: 0.363281]\n",
"6479: [D loss: 0.718643, acc: 0.500000] [A loss: 0.963001, acc: 0.152344]\n",
"6480: [D loss: 0.693422, acc: 0.550781] [A loss: 0.742077, acc: 0.429688]\n",
"6481: [D loss: 0.726457, acc: 0.519531] [A loss: 0.839328, acc: 0.226562]\n",
"6482: [D loss: 0.710297, acc: 0.484375] [A loss: 0.779429, acc: 0.320312]\n",
"6483: [D loss: 0.701200, acc: 0.505859] [A loss: 0.835758, acc: 0.285156]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6484: [D loss: 0.702228, acc: 0.501953] [A loss: 0.800125, acc: 0.281250]\n",
"6485: [D loss: 0.695587, acc: 0.517578] [A loss: 0.867059, acc: 0.171875]\n",
"6486: [D loss: 0.707692, acc: 0.519531] [A loss: 0.763790, acc: 0.355469]\n",
"6487: [D loss: 0.704612, acc: 0.513672] [A loss: 0.863817, acc: 0.214844]\n",
"6488: [D loss: 0.698572, acc: 0.531250] [A loss: 0.709097, acc: 0.476562]\n",
"6489: [D loss: 0.724164, acc: 0.505859] [A loss: 0.952089, acc: 0.156250]\n",
"6490: [D loss: 0.693463, acc: 0.550781] [A loss: 0.768591, acc: 0.355469]\n",
"6491: [D loss: 0.718525, acc: 0.482422] [A loss: 0.824559, acc: 0.234375]\n",
"6492: [D loss: 0.693164, acc: 0.548828] [A loss: 0.741301, acc: 0.394531]\n",
"6493: [D loss: 0.705014, acc: 0.562500] [A loss: 0.872844, acc: 0.160156]\n",
"6494: [D loss: 0.704543, acc: 0.515625] [A loss: 0.721498, acc: 0.519531]\n",
"6495: [D loss: 0.710545, acc: 0.533203] [A loss: 1.077239, acc: 0.078125]\n",
"6496: [D loss: 0.706381, acc: 0.523438] [A loss: 0.731317, acc: 0.460938]\n",
"6497: [D loss: 0.693092, acc: 0.523438] [A loss: 0.843338, acc: 0.210938]\n",
"6498: [D loss: 0.714879, acc: 0.505859] [A loss: 0.830370, acc: 0.246094]\n",
"6499: [D loss: 0.696694, acc: 0.529297] [A loss: 0.781085, acc: 0.375000]\n",
"6500: [D loss: 0.689271, acc: 0.580078] [A loss: 0.847642, acc: 0.253906]\n",
"6501: [D loss: 0.706594, acc: 0.509766] [A loss: 0.784896, acc: 0.355469]\n",
"6502: [D loss: 0.695520, acc: 0.546875] [A loss: 0.897595, acc: 0.167969]\n",
"6503: [D loss: 0.691486, acc: 0.541016] [A loss: 0.736409, acc: 0.429688]\n",
"6504: [D loss: 0.698792, acc: 0.509766] [A loss: 0.852688, acc: 0.222656]\n",
"6505: [D loss: 0.711765, acc: 0.490234] [A loss: 0.790372, acc: 0.328125]\n",
"6506: [D loss: 0.715201, acc: 0.501953] [A loss: 0.840665, acc: 0.230469]\n",
"6507: [D loss: 0.715966, acc: 0.494141] [A loss: 0.747666, acc: 0.437500]\n",
"6508: [D loss: 0.693515, acc: 0.544922] [A loss: 0.924022, acc: 0.109375]\n",
"6509: [D loss: 0.699567, acc: 0.507812] [A loss: 0.751972, acc: 0.355469]\n",
"6510: [D loss: 0.690839, acc: 0.541016] [A loss: 0.855096, acc: 0.175781]\n",
"6511: [D loss: 0.702291, acc: 0.531250] [A loss: 0.765417, acc: 0.371094]\n",
"6512: [D loss: 0.710720, acc: 0.513672] [A loss: 0.842699, acc: 0.226562]\n",
"6513: [D loss: 0.697005, acc: 0.535156] [A loss: 0.740998, acc: 0.433594]\n",
"6514: [D loss: 0.703438, acc: 0.541016] [A loss: 0.846945, acc: 0.210938]\n",
"6515: [D loss: 0.684661, acc: 0.554688] [A loss: 0.745109, acc: 0.402344]\n",
"6516: [D loss: 0.706706, acc: 0.527344] [A loss: 0.932566, acc: 0.093750]\n",
"6517: [D loss: 0.689689, acc: 0.558594] [A loss: 0.753072, acc: 0.339844]\n",
"6518: [D loss: 0.694127, acc: 0.541016] [A loss: 1.025244, acc: 0.078125]\n",
"6519: [D loss: 0.710988, acc: 0.500000] [A loss: 0.699040, acc: 0.480469]\n",
"6520: [D loss: 0.721264, acc: 0.511719] [A loss: 0.908047, acc: 0.113281]\n",
"6521: [D loss: 0.711424, acc: 0.496094] [A loss: 0.718503, acc: 0.507812]\n",
"6522: [D loss: 0.737851, acc: 0.509766] [A loss: 1.041186, acc: 0.109375]\n",
"6523: [D loss: 0.694533, acc: 0.527344] [A loss: 0.677959, acc: 0.542969]\n",
"6524: [D loss: 0.720385, acc: 0.505859] [A loss: 0.912273, acc: 0.136719]\n",
"6525: [D loss: 0.693676, acc: 0.525391] [A loss: 0.729912, acc: 0.414062]\n",
"6526: [D loss: 0.697010, acc: 0.517578] [A loss: 0.853802, acc: 0.207031]\n",
"6527: [D loss: 0.691803, acc: 0.542969] [A loss: 0.708733, acc: 0.511719]\n",
"6528: [D loss: 0.714462, acc: 0.501953] [A loss: 0.893647, acc: 0.156250]\n",
"6529: [D loss: 0.699936, acc: 0.517578] [A loss: 0.716290, acc: 0.496094]\n",
"6530: [D loss: 0.710905, acc: 0.507812] [A loss: 0.843866, acc: 0.214844]\n",
"6531: [D loss: 0.694853, acc: 0.556641] [A loss: 0.762433, acc: 0.347656]\n",
"6532: [D loss: 0.685142, acc: 0.544922] [A loss: 0.830187, acc: 0.246094]\n",
"6533: [D loss: 0.706414, acc: 0.541016] [A loss: 0.874774, acc: 0.152344]\n",
"6534: [D loss: 0.682556, acc: 0.529297] [A loss: 0.736438, acc: 0.406250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6535: [D loss: 0.697456, acc: 0.541016] [A loss: 0.943456, acc: 0.109375]\n",
"6536: [D loss: 0.699741, acc: 0.533203] [A loss: 0.723074, acc: 0.437500]\n",
"6537: [D loss: 0.720879, acc: 0.509766] [A loss: 0.822687, acc: 0.250000]\n",
"6538: [D loss: 0.700998, acc: 0.517578] [A loss: 0.787790, acc: 0.300781]\n",
"6539: [D loss: 0.713373, acc: 0.470703] [A loss: 0.731795, acc: 0.433594]\n",
"6540: [D loss: 0.712773, acc: 0.501953] [A loss: 0.891580, acc: 0.156250]\n",
"6541: [D loss: 0.694889, acc: 0.513672] [A loss: 0.715501, acc: 0.488281]\n",
"6542: [D loss: 0.716444, acc: 0.507812] [A loss: 0.904187, acc: 0.152344]\n",
"6543: [D loss: 0.696908, acc: 0.511719] [A loss: 0.760143, acc: 0.363281]\n",
"6544: [D loss: 0.699786, acc: 0.541016] [A loss: 0.855565, acc: 0.191406]\n",
"6545: [D loss: 0.680940, acc: 0.560547] [A loss: 0.707232, acc: 0.488281]\n",
"6546: [D loss: 0.726867, acc: 0.505859] [A loss: 0.920633, acc: 0.125000]\n",
"6547: [D loss: 0.707994, acc: 0.507812] [A loss: 0.690410, acc: 0.546875]\n",
"6548: [D loss: 0.696345, acc: 0.550781] [A loss: 0.852229, acc: 0.191406]\n",
"6549: [D loss: 0.698526, acc: 0.515625] [A loss: 0.780451, acc: 0.312500]\n",
"6550: [D loss: 0.711614, acc: 0.511719] [A loss: 0.910915, acc: 0.175781]\n",
"6551: [D loss: 0.698048, acc: 0.556641] [A loss: 0.856901, acc: 0.207031]\n",
"6552: [D loss: 0.705621, acc: 0.498047] [A loss: 0.815874, acc: 0.253906]\n",
"6553: [D loss: 0.692053, acc: 0.529297] [A loss: 0.734047, acc: 0.425781]\n",
"6554: [D loss: 0.706280, acc: 0.503906] [A loss: 0.849406, acc: 0.222656]\n",
"6555: [D loss: 0.699760, acc: 0.523438] [A loss: 0.821143, acc: 0.261719]\n",
"6556: [D loss: 0.701816, acc: 0.523438] [A loss: 0.814995, acc: 0.246094]\n",
"6557: [D loss: 0.697658, acc: 0.527344] [A loss: 0.839285, acc: 0.265625]\n",
"6558: [D loss: 0.701132, acc: 0.515625] [A loss: 0.799165, acc: 0.332031]\n",
"6559: [D loss: 0.715493, acc: 0.519531] [A loss: 0.976447, acc: 0.144531]\n",
"6560: [D loss: 0.704039, acc: 0.503906] [A loss: 0.729578, acc: 0.382812]\n",
"6561: [D loss: 0.714611, acc: 0.537109] [A loss: 0.872097, acc: 0.152344]\n",
"6562: [D loss: 0.707887, acc: 0.498047] [A loss: 0.731461, acc: 0.378906]\n",
"6563: [D loss: 0.707659, acc: 0.511719] [A loss: 0.789198, acc: 0.289062]\n",
"6564: [D loss: 0.713453, acc: 0.507812] [A loss: 0.800632, acc: 0.277344]\n",
"6565: [D loss: 0.705658, acc: 0.519531] [A loss: 0.802619, acc: 0.304688]\n",
"6566: [D loss: 0.713841, acc: 0.470703] [A loss: 0.823142, acc: 0.242188]\n",
"6567: [D loss: 0.696607, acc: 0.533203] [A loss: 0.784526, acc: 0.300781]\n",
"6568: [D loss: 0.716080, acc: 0.472656] [A loss: 0.907531, acc: 0.113281]\n",
"6569: [D loss: 0.702996, acc: 0.498047] [A loss: 0.803659, acc: 0.304688]\n",
"6570: [D loss: 0.708292, acc: 0.511719] [A loss: 0.874016, acc: 0.167969]\n",
"6571: [D loss: 0.709466, acc: 0.533203] [A loss: 0.731809, acc: 0.394531]\n",
"6572: [D loss: 0.712049, acc: 0.509766] [A loss: 0.896735, acc: 0.140625]\n",
"6573: [D loss: 0.698120, acc: 0.542969] [A loss: 0.727155, acc: 0.492188]\n",
"6574: [D loss: 0.720057, acc: 0.511719] [A loss: 0.854891, acc: 0.191406]\n",
"6575: [D loss: 0.696731, acc: 0.529297] [A loss: 0.725303, acc: 0.453125]\n",
"6576: [D loss: 0.700914, acc: 0.511719] [A loss: 0.885322, acc: 0.207031]\n",
"6577: [D loss: 0.692882, acc: 0.544922] [A loss: 0.849888, acc: 0.242188]\n",
"6578: [D loss: 0.703584, acc: 0.529297] [A loss: 0.853378, acc: 0.183594]\n",
"6579: [D loss: 0.696260, acc: 0.531250] [A loss: 0.749356, acc: 0.402344]\n",
"6580: [D loss: 0.700247, acc: 0.505859] [A loss: 0.949745, acc: 0.070312]\n",
"6581: [D loss: 0.706539, acc: 0.505859] [A loss: 0.748970, acc: 0.402344]\n",
"6582: [D loss: 0.708655, acc: 0.513672] [A loss: 0.880887, acc: 0.152344]\n",
"6583: [D loss: 0.696236, acc: 0.521484] [A loss: 0.697827, acc: 0.484375]\n",
"6584: [D loss: 0.709132, acc: 0.523438] [A loss: 0.943453, acc: 0.097656]\n",
"6585: [D loss: 0.685066, acc: 0.541016] [A loss: 0.702931, acc: 0.492188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6586: [D loss: 0.725896, acc: 0.490234] [A loss: 0.863144, acc: 0.164062]\n",
"6587: [D loss: 0.695833, acc: 0.529297] [A loss: 0.744339, acc: 0.410156]\n",
"6588: [D loss: 0.713566, acc: 0.527344] [A loss: 0.857031, acc: 0.191406]\n",
"6589: [D loss: 0.695612, acc: 0.548828] [A loss: 0.744406, acc: 0.375000]\n",
"6590: [D loss: 0.703613, acc: 0.509766] [A loss: 0.783750, acc: 0.328125]\n",
"6591: [D loss: 0.716561, acc: 0.460938] [A loss: 0.827606, acc: 0.207031]\n",
"6592: [D loss: 0.707250, acc: 0.496094] [A loss: 0.796556, acc: 0.261719]\n",
"6593: [D loss: 0.697353, acc: 0.509766] [A loss: 0.812775, acc: 0.238281]\n",
"6594: [D loss: 0.691309, acc: 0.537109] [A loss: 0.765914, acc: 0.363281]\n",
"6595: [D loss: 0.696678, acc: 0.505859] [A loss: 0.813708, acc: 0.265625]\n",
"6596: [D loss: 0.702176, acc: 0.515625] [A loss: 0.753278, acc: 0.398438]\n",
"6597: [D loss: 0.704661, acc: 0.529297] [A loss: 0.782969, acc: 0.308594]\n",
"6598: [D loss: 0.698134, acc: 0.533203] [A loss: 0.868476, acc: 0.179688]\n",
"6599: [D loss: 0.701600, acc: 0.492188] [A loss: 0.801699, acc: 0.273438]\n",
"6600: [D loss: 0.717772, acc: 0.494141] [A loss: 0.785086, acc: 0.343750]\n",
"6601: [D loss: 0.700126, acc: 0.541016] [A loss: 0.975922, acc: 0.171875]\n",
"6602: [D loss: 0.703809, acc: 0.519531] [A loss: 0.736285, acc: 0.441406]\n",
"6603: [D loss: 0.704211, acc: 0.513672] [A loss: 0.830059, acc: 0.253906]\n",
"6604: [D loss: 0.692123, acc: 0.533203] [A loss: 0.734819, acc: 0.414062]\n",
"6605: [D loss: 0.711895, acc: 0.509766] [A loss: 0.889521, acc: 0.152344]\n",
"6606: [D loss: 0.697279, acc: 0.523438] [A loss: 0.678637, acc: 0.585938]\n",
"6607: [D loss: 0.710376, acc: 0.515625] [A loss: 0.843504, acc: 0.199219]\n",
"6608: [D loss: 0.706799, acc: 0.511719] [A loss: 0.773636, acc: 0.402344]\n",
"6609: [D loss: 0.707392, acc: 0.509766] [A loss: 0.869454, acc: 0.179688]\n",
"6610: [D loss: 0.699875, acc: 0.501953] [A loss: 0.749479, acc: 0.425781]\n",
"6611: [D loss: 0.705779, acc: 0.501953] [A loss: 0.765187, acc: 0.351562]\n",
"6612: [D loss: 0.715216, acc: 0.503906] [A loss: 0.887699, acc: 0.156250]\n",
"6613: [D loss: 0.706215, acc: 0.517578] [A loss: 0.749466, acc: 0.386719]\n",
"6614: [D loss: 0.711317, acc: 0.509766] [A loss: 0.864866, acc: 0.199219]\n",
"6615: [D loss: 0.703239, acc: 0.513672] [A loss: 0.703562, acc: 0.492188]\n",
"6616: [D loss: 0.707377, acc: 0.527344] [A loss: 0.874781, acc: 0.156250]\n",
"6617: [D loss: 0.708036, acc: 0.500000] [A loss: 0.744522, acc: 0.417969]\n",
"6618: [D loss: 0.701305, acc: 0.525391] [A loss: 0.813350, acc: 0.281250]\n",
"6619: [D loss: 0.694757, acc: 0.535156] [A loss: 0.808181, acc: 0.281250]\n",
"6620: [D loss: 0.691391, acc: 0.542969] [A loss: 0.738716, acc: 0.406250]\n",
"6621: [D loss: 0.709402, acc: 0.525391] [A loss: 0.967301, acc: 0.070312]\n",
"6622: [D loss: 0.702811, acc: 0.533203] [A loss: 0.755195, acc: 0.390625]\n",
"6623: [D loss: 0.710375, acc: 0.531250] [A loss: 0.805235, acc: 0.277344]\n",
"6624: [D loss: 0.691966, acc: 0.542969] [A loss: 0.814884, acc: 0.242188]\n",
"6625: [D loss: 0.696019, acc: 0.513672] [A loss: 0.836913, acc: 0.250000]\n",
"6626: [D loss: 0.687558, acc: 0.560547] [A loss: 0.789989, acc: 0.296875]\n",
"6627: [D loss: 0.686701, acc: 0.544922] [A loss: 0.806680, acc: 0.246094]\n",
"6628: [D loss: 0.704477, acc: 0.503906] [A loss: 0.799657, acc: 0.269531]\n",
"6629: [D loss: 0.697732, acc: 0.517578] [A loss: 0.873897, acc: 0.183594]\n",
"6630: [D loss: 0.693158, acc: 0.527344] [A loss: 0.757664, acc: 0.359375]\n",
"6631: [D loss: 0.687878, acc: 0.560547] [A loss: 0.864219, acc: 0.187500]\n",
"6632: [D loss: 0.712842, acc: 0.498047] [A loss: 0.815571, acc: 0.316406]\n",
"6633: [D loss: 0.710024, acc: 0.513672] [A loss: 0.923047, acc: 0.160156]\n",
"6634: [D loss: 0.693192, acc: 0.554688] [A loss: 0.705786, acc: 0.500000]\n",
"6635: [D loss: 0.713233, acc: 0.507812] [A loss: 0.863280, acc: 0.187500]\n",
"6636: [D loss: 0.706429, acc: 0.494141] [A loss: 0.720717, acc: 0.437500]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6637: [D loss: 0.716246, acc: 0.484375] [A loss: 0.821142, acc: 0.281250]\n",
"6638: [D loss: 0.707562, acc: 0.492188] [A loss: 0.762468, acc: 0.367188]\n",
"6639: [D loss: 0.712319, acc: 0.488281] [A loss: 0.772291, acc: 0.316406]\n",
"6640: [D loss: 0.692990, acc: 0.519531] [A loss: 0.827516, acc: 0.179688]\n",
"6641: [D loss: 0.703055, acc: 0.525391] [A loss: 0.748179, acc: 0.406250]\n",
"6642: [D loss: 0.709565, acc: 0.521484] [A loss: 0.984460, acc: 0.101562]\n",
"6643: [D loss: 0.694674, acc: 0.548828] [A loss: 0.723435, acc: 0.429688]\n",
"6644: [D loss: 0.722527, acc: 0.513672] [A loss: 0.819504, acc: 0.226562]\n",
"6645: [D loss: 0.687584, acc: 0.558594] [A loss: 0.711337, acc: 0.539062]\n",
"6646: [D loss: 0.705159, acc: 0.519531] [A loss: 0.790130, acc: 0.355469]\n",
"6647: [D loss: 0.692352, acc: 0.539062] [A loss: 0.789608, acc: 0.300781]\n",
"6648: [D loss: 0.691910, acc: 0.548828] [A loss: 0.830142, acc: 0.296875]\n",
"6649: [D loss: 0.698760, acc: 0.505859] [A loss: 0.937846, acc: 0.109375]\n",
"6650: [D loss: 0.695918, acc: 0.533203] [A loss: 0.745592, acc: 0.410156]\n",
"6651: [D loss: 0.717071, acc: 0.500000] [A loss: 0.919978, acc: 0.074219]\n",
"6652: [D loss: 0.695779, acc: 0.546875] [A loss: 0.701151, acc: 0.480469]\n",
"6653: [D loss: 0.709580, acc: 0.544922] [A loss: 0.849530, acc: 0.203125]\n",
"6654: [D loss: 0.696018, acc: 0.494141] [A loss: 0.745706, acc: 0.371094]\n",
"6655: [D loss: 0.716593, acc: 0.515625] [A loss: 0.913697, acc: 0.136719]\n",
"6656: [D loss: 0.698062, acc: 0.533203] [A loss: 0.717118, acc: 0.453125]\n",
"6657: [D loss: 0.712175, acc: 0.505859] [A loss: 0.940146, acc: 0.089844]\n",
"6658: [D loss: 0.687204, acc: 0.539062] [A loss: 0.674413, acc: 0.582031]\n",
"6659: [D loss: 0.719724, acc: 0.486328] [A loss: 0.803089, acc: 0.277344]\n",
"6660: [D loss: 0.699708, acc: 0.503906] [A loss: 0.759132, acc: 0.394531]\n",
"6661: [D loss: 0.711341, acc: 0.501953] [A loss: 0.839088, acc: 0.199219]\n",
"6662: [D loss: 0.683470, acc: 0.568359] [A loss: 0.781562, acc: 0.320312]\n",
"6663: [D loss: 0.693103, acc: 0.544922] [A loss: 0.881160, acc: 0.156250]\n",
"6664: [D loss: 0.692348, acc: 0.542969] [A loss: 0.726021, acc: 0.460938]\n",
"6665: [D loss: 0.720053, acc: 0.490234] [A loss: 0.922696, acc: 0.109375]\n",
"6666: [D loss: 0.700574, acc: 0.517578] [A loss: 0.768794, acc: 0.343750]\n",
"6667: [D loss: 0.715308, acc: 0.486328] [A loss: 0.827268, acc: 0.230469]\n",
"6668: [D loss: 0.704934, acc: 0.492188] [A loss: 0.747492, acc: 0.375000]\n",
"6669: [D loss: 0.702492, acc: 0.517578] [A loss: 0.771012, acc: 0.351562]\n",
"6670: [D loss: 0.710517, acc: 0.507812] [A loss: 0.772618, acc: 0.316406]\n",
"6671: [D loss: 0.699941, acc: 0.552734] [A loss: 0.828921, acc: 0.265625]\n",
"6672: [D loss: 0.693709, acc: 0.566406] [A loss: 0.773110, acc: 0.359375]\n",
"6673: [D loss: 0.707295, acc: 0.460938] [A loss: 0.882028, acc: 0.152344]\n",
"6674: [D loss: 0.702835, acc: 0.515625] [A loss: 0.743424, acc: 0.421875]\n",
"6675: [D loss: 0.701287, acc: 0.525391] [A loss: 0.849631, acc: 0.214844]\n",
"6676: [D loss: 0.708911, acc: 0.507812] [A loss: 0.784044, acc: 0.332031]\n",
"6677: [D loss: 0.707296, acc: 0.535156] [A loss: 0.876160, acc: 0.234375]\n",
"6678: [D loss: 0.687268, acc: 0.541016] [A loss: 0.808237, acc: 0.257812]\n",
"6679: [D loss: 0.704313, acc: 0.492188] [A loss: 0.811096, acc: 0.234375]\n",
"6680: [D loss: 0.691503, acc: 0.525391] [A loss: 0.775009, acc: 0.320312]\n",
"6681: [D loss: 0.697834, acc: 0.496094] [A loss: 0.903618, acc: 0.156250]\n",
"6682: [D loss: 0.694230, acc: 0.529297] [A loss: 0.763583, acc: 0.351562]\n",
"6683: [D loss: 0.698803, acc: 0.507812] [A loss: 0.840146, acc: 0.199219]\n",
"6684: [D loss: 0.703868, acc: 0.486328] [A loss: 0.725576, acc: 0.425781]\n",
"6685: [D loss: 0.728565, acc: 0.490234] [A loss: 1.029445, acc: 0.085938]\n",
"6686: [D loss: 0.700336, acc: 0.539062] [A loss: 0.708978, acc: 0.457031]\n",
"6687: [D loss: 0.714622, acc: 0.500000] [A loss: 0.883250, acc: 0.152344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6688: [D loss: 0.714005, acc: 0.496094] [A loss: 0.767768, acc: 0.332031]\n",
"6689: [D loss: 0.696815, acc: 0.542969] [A loss: 0.754473, acc: 0.375000]\n",
"6690: [D loss: 0.709403, acc: 0.509766] [A loss: 0.859367, acc: 0.199219]\n",
"6691: [D loss: 0.693061, acc: 0.539062] [A loss: 0.735944, acc: 0.449219]\n",
"6692: [D loss: 0.705152, acc: 0.529297] [A loss: 0.889344, acc: 0.156250]\n",
"6693: [D loss: 0.713008, acc: 0.484375] [A loss: 0.719326, acc: 0.429688]\n",
"6694: [D loss: 0.709373, acc: 0.513672] [A loss: 0.920577, acc: 0.121094]\n",
"6695: [D loss: 0.691915, acc: 0.535156] [A loss: 0.742931, acc: 0.390625]\n",
"6696: [D loss: 0.715140, acc: 0.541016] [A loss: 0.848431, acc: 0.179688]\n",
"6697: [D loss: 0.699957, acc: 0.533203] [A loss: 0.758245, acc: 0.367188]\n",
"6698: [D loss: 0.704897, acc: 0.513672] [A loss: 0.860769, acc: 0.199219]\n",
"6699: [D loss: 0.696108, acc: 0.521484] [A loss: 0.743824, acc: 0.402344]\n",
"6700: [D loss: 0.701287, acc: 0.535156] [A loss: 0.817089, acc: 0.242188]\n",
"6701: [D loss: 0.701111, acc: 0.527344] [A loss: 0.765312, acc: 0.375000]\n",
"6702: [D loss: 0.706341, acc: 0.505859] [A loss: 0.880428, acc: 0.183594]\n",
"6703: [D loss: 0.711249, acc: 0.468750] [A loss: 0.767109, acc: 0.355469]\n",
"6704: [D loss: 0.726337, acc: 0.498047] [A loss: 0.929384, acc: 0.121094]\n",
"6705: [D loss: 0.707466, acc: 0.498047] [A loss: 0.760115, acc: 0.339844]\n",
"6706: [D loss: 0.723380, acc: 0.498047] [A loss: 0.819126, acc: 0.238281]\n",
"6707: [D loss: 0.695781, acc: 0.539062] [A loss: 0.785089, acc: 0.320312]\n",
"6708: [D loss: 0.700160, acc: 0.537109] [A loss: 0.852974, acc: 0.195312]\n",
"6709: [D loss: 0.691578, acc: 0.533203] [A loss: 0.761128, acc: 0.367188]\n",
"6710: [D loss: 0.701093, acc: 0.527344] [A loss: 0.824392, acc: 0.265625]\n",
"6711: [D loss: 0.696797, acc: 0.531250] [A loss: 0.750554, acc: 0.371094]\n",
"6712: [D loss: 0.711058, acc: 0.505859] [A loss: 0.834906, acc: 0.234375]\n",
"6713: [D loss: 0.710154, acc: 0.496094] [A loss: 0.737486, acc: 0.437500]\n",
"6714: [D loss: 0.702436, acc: 0.523438] [A loss: 0.879159, acc: 0.164062]\n",
"6715: [D loss: 0.694283, acc: 0.515625] [A loss: 0.721111, acc: 0.433594]\n",
"6716: [D loss: 0.707514, acc: 0.519531] [A loss: 0.878726, acc: 0.160156]\n",
"6717: [D loss: 0.721367, acc: 0.472656] [A loss: 0.746423, acc: 0.421875]\n",
"6718: [D loss: 0.716474, acc: 0.466797] [A loss: 0.972819, acc: 0.109375]\n",
"6719: [D loss: 0.709561, acc: 0.480469] [A loss: 0.772396, acc: 0.324219]\n",
"6720: [D loss: 0.695576, acc: 0.515625] [A loss: 0.900709, acc: 0.183594]\n",
"6721: [D loss: 0.685266, acc: 0.539062] [A loss: 0.766819, acc: 0.304688]\n",
"6722: [D loss: 0.723721, acc: 0.490234] [A loss: 0.825066, acc: 0.218750]\n",
"6723: [D loss: 0.694547, acc: 0.505859] [A loss: 0.802112, acc: 0.296875]\n",
"6724: [D loss: 0.695580, acc: 0.513672] [A loss: 0.779006, acc: 0.328125]\n",
"6725: [D loss: 0.704797, acc: 0.501953] [A loss: 0.934190, acc: 0.070312]\n",
"6726: [D loss: 0.689027, acc: 0.535156] [A loss: 0.686860, acc: 0.550781]\n",
"6727: [D loss: 0.721115, acc: 0.507812] [A loss: 0.932365, acc: 0.082031]\n",
"6728: [D loss: 0.704629, acc: 0.513672] [A loss: 0.754754, acc: 0.355469]\n",
"6729: [D loss: 0.701040, acc: 0.515625] [A loss: 0.883408, acc: 0.140625]\n",
"6730: [D loss: 0.686213, acc: 0.544922] [A loss: 0.725384, acc: 0.445312]\n",
"6731: [D loss: 0.714844, acc: 0.511719] [A loss: 0.799045, acc: 0.292969]\n",
"6732: [D loss: 0.691515, acc: 0.531250] [A loss: 0.809702, acc: 0.265625]\n",
"6733: [D loss: 0.691942, acc: 0.556641] [A loss: 0.692445, acc: 0.527344]\n",
"6734: [D loss: 0.720773, acc: 0.503906] [A loss: 0.934457, acc: 0.113281]\n",
"6735: [D loss: 0.683962, acc: 0.554688] [A loss: 0.705089, acc: 0.496094]\n",
"6736: [D loss: 0.719758, acc: 0.501953] [A loss: 0.829646, acc: 0.242188]\n",
"6737: [D loss: 0.685289, acc: 0.580078] [A loss: 0.799719, acc: 0.246094]\n",
"6738: [D loss: 0.699971, acc: 0.531250] [A loss: 0.846909, acc: 0.195312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6739: [D loss: 0.701616, acc: 0.517578] [A loss: 0.863921, acc: 0.210938]\n",
"6740: [D loss: 0.686711, acc: 0.558594] [A loss: 0.720774, acc: 0.476562]\n",
"6741: [D loss: 0.700091, acc: 0.552734] [A loss: 0.910437, acc: 0.148438]\n",
"6742: [D loss: 0.700370, acc: 0.523438] [A loss: 0.757711, acc: 0.382812]\n",
"6743: [D loss: 0.699975, acc: 0.525391] [A loss: 0.800621, acc: 0.300781]\n",
"6744: [D loss: 0.703215, acc: 0.509766] [A loss: 0.792130, acc: 0.289062]\n",
"6745: [D loss: 0.703401, acc: 0.531250] [A loss: 0.731705, acc: 0.433594]\n",
"6746: [D loss: 0.704559, acc: 0.511719] [A loss: 0.934086, acc: 0.085938]\n",
"6747: [D loss: 0.702005, acc: 0.535156] [A loss: 0.682310, acc: 0.539062]\n",
"6748: [D loss: 0.711385, acc: 0.529297] [A loss: 0.841239, acc: 0.222656]\n",
"6749: [D loss: 0.698914, acc: 0.513672] [A loss: 0.726869, acc: 0.429688]\n",
"6750: [D loss: 0.705473, acc: 0.509766] [A loss: 0.836501, acc: 0.226562]\n",
"6751: [D loss: 0.700516, acc: 0.505859] [A loss: 0.769169, acc: 0.332031]\n",
"6752: [D loss: 0.707449, acc: 0.498047] [A loss: 0.828262, acc: 0.222656]\n",
"6753: [D loss: 0.711294, acc: 0.482422] [A loss: 0.797232, acc: 0.285156]\n",
"6754: [D loss: 0.711924, acc: 0.523438] [A loss: 0.816902, acc: 0.246094]\n",
"6755: [D loss: 0.699688, acc: 0.527344] [A loss: 0.881009, acc: 0.183594]\n",
"6756: [D loss: 0.699475, acc: 0.509766] [A loss: 0.765732, acc: 0.300781]\n",
"6757: [D loss: 0.698686, acc: 0.544922] [A loss: 0.881213, acc: 0.152344]\n",
"6758: [D loss: 0.697310, acc: 0.525391] [A loss: 0.805985, acc: 0.253906]\n",
"6759: [D loss: 0.699042, acc: 0.533203] [A loss: 0.810635, acc: 0.253906]\n",
"6760: [D loss: 0.688895, acc: 0.525391] [A loss: 0.813119, acc: 0.250000]\n",
"6761: [D loss: 0.709337, acc: 0.496094] [A loss: 0.823333, acc: 0.222656]\n",
"6762: [D loss: 0.714133, acc: 0.492188] [A loss: 0.838750, acc: 0.214844]\n",
"6763: [D loss: 0.716426, acc: 0.468750] [A loss: 0.858497, acc: 0.164062]\n",
"6764: [D loss: 0.717091, acc: 0.488281] [A loss: 0.816582, acc: 0.261719]\n",
"6765: [D loss: 0.692309, acc: 0.544922] [A loss: 0.793441, acc: 0.308594]\n",
"6766: [D loss: 0.714109, acc: 0.531250] [A loss: 1.034069, acc: 0.035156]\n",
"6767: [D loss: 0.702962, acc: 0.517578] [A loss: 0.688704, acc: 0.515625]\n",
"6768: [D loss: 0.712926, acc: 0.498047] [A loss: 0.899695, acc: 0.148438]\n",
"6769: [D loss: 0.703969, acc: 0.509766] [A loss: 0.731474, acc: 0.429688]\n",
"6770: [D loss: 0.699550, acc: 0.515625] [A loss: 0.816936, acc: 0.289062]\n",
"6771: [D loss: 0.708750, acc: 0.492188] [A loss: 0.909307, acc: 0.097656]\n",
"6772: [D loss: 0.702279, acc: 0.500000] [A loss: 0.727531, acc: 0.437500]\n",
"6773: [D loss: 0.695531, acc: 0.527344] [A loss: 0.931046, acc: 0.113281]\n",
"6774: [D loss: 0.704923, acc: 0.519531] [A loss: 0.742320, acc: 0.382812]\n",
"6775: [D loss: 0.716004, acc: 0.501953] [A loss: 0.852747, acc: 0.203125]\n",
"6776: [D loss: 0.700760, acc: 0.501953] [A loss: 0.761475, acc: 0.386719]\n",
"6777: [D loss: 0.712416, acc: 0.523438] [A loss: 0.892089, acc: 0.117188]\n",
"6778: [D loss: 0.689228, acc: 0.548828] [A loss: 0.713858, acc: 0.464844]\n",
"6779: [D loss: 0.701700, acc: 0.523438] [A loss: 0.820143, acc: 0.253906]\n",
"6780: [D loss: 0.699251, acc: 0.503906] [A loss: 0.772867, acc: 0.332031]\n",
"6781: [D loss: 0.699444, acc: 0.533203] [A loss: 0.788116, acc: 0.261719]\n",
"6782: [D loss: 0.694040, acc: 0.527344] [A loss: 0.810098, acc: 0.285156]\n",
"6783: [D loss: 0.711096, acc: 0.500000] [A loss: 0.748356, acc: 0.390625]\n",
"6784: [D loss: 0.699762, acc: 0.527344] [A loss: 0.790832, acc: 0.277344]\n",
"6785: [D loss: 0.701291, acc: 0.511719] [A loss: 0.824347, acc: 0.273438]\n",
"6786: [D loss: 0.712902, acc: 0.527344] [A loss: 0.872332, acc: 0.152344]\n",
"6787: [D loss: 0.697792, acc: 0.523438] [A loss: 0.733023, acc: 0.394531]\n",
"6788: [D loss: 0.710623, acc: 0.515625] [A loss: 0.833128, acc: 0.226562]\n",
"6789: [D loss: 0.708389, acc: 0.515625] [A loss: 0.771934, acc: 0.339844]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6790: [D loss: 0.722953, acc: 0.464844] [A loss: 0.898542, acc: 0.156250]\n",
"6791: [D loss: 0.695200, acc: 0.515625] [A loss: 0.706856, acc: 0.515625]\n",
"6792: [D loss: 0.743536, acc: 0.496094] [A loss: 1.064004, acc: 0.054688]\n",
"6793: [D loss: 0.684306, acc: 0.539062] [A loss: 0.700578, acc: 0.523438]\n",
"6794: [D loss: 0.749306, acc: 0.496094] [A loss: 0.927184, acc: 0.125000]\n",
"6795: [D loss: 0.712408, acc: 0.482422] [A loss: 0.742521, acc: 0.378906]\n",
"6796: [D loss: 0.716175, acc: 0.498047] [A loss: 0.901299, acc: 0.132812]\n",
"6797: [D loss: 0.717854, acc: 0.474609] [A loss: 0.720904, acc: 0.460938]\n",
"6798: [D loss: 0.697575, acc: 0.535156] [A loss: 0.888513, acc: 0.183594]\n",
"6799: [D loss: 0.704873, acc: 0.488281] [A loss: 0.723191, acc: 0.460938]\n",
"6800: [D loss: 0.700782, acc: 0.507812] [A loss: 0.803259, acc: 0.269531]\n",
"6801: [D loss: 0.690102, acc: 0.546875] [A loss: 0.777731, acc: 0.343750]\n",
"6802: [D loss: 0.717014, acc: 0.501953] [A loss: 0.769805, acc: 0.312500]\n",
"6803: [D loss: 0.700850, acc: 0.513672] [A loss: 0.796997, acc: 0.320312]\n",
"6804: [D loss: 0.702767, acc: 0.480469] [A loss: 0.771738, acc: 0.308594]\n",
"6805: [D loss: 0.714059, acc: 0.509766] [A loss: 0.799677, acc: 0.285156]\n",
"6806: [D loss: 0.698889, acc: 0.525391] [A loss: 0.734403, acc: 0.414062]\n",
"6807: [D loss: 0.684950, acc: 0.541016] [A loss: 0.835531, acc: 0.226562]\n",
"6808: [D loss: 0.699082, acc: 0.521484] [A loss: 0.745731, acc: 0.367188]\n",
"6809: [D loss: 0.710607, acc: 0.503906] [A loss: 0.819684, acc: 0.257812]\n",
"6810: [D loss: 0.711578, acc: 0.498047] [A loss: 0.764802, acc: 0.347656]\n",
"6811: [D loss: 0.698078, acc: 0.535156] [A loss: 0.803666, acc: 0.273438]\n",
"6812: [D loss: 0.712235, acc: 0.488281] [A loss: 0.796301, acc: 0.300781]\n",
"6813: [D loss: 0.690901, acc: 0.533203] [A loss: 0.792032, acc: 0.289062]\n",
"6814: [D loss: 0.709827, acc: 0.494141] [A loss: 0.870595, acc: 0.191406]\n",
"6815: [D loss: 0.708518, acc: 0.501953] [A loss: 0.762708, acc: 0.339844]\n",
"6816: [D loss: 0.713132, acc: 0.500000] [A loss: 0.865545, acc: 0.214844]\n",
"6817: [D loss: 0.703776, acc: 0.498047] [A loss: 0.720776, acc: 0.441406]\n",
"6818: [D loss: 0.699954, acc: 0.509766] [A loss: 0.852388, acc: 0.160156]\n",
"6819: [D loss: 0.706016, acc: 0.482422] [A loss: 0.768102, acc: 0.339844]\n",
"6820: [D loss: 0.697064, acc: 0.511719] [A loss: 0.833370, acc: 0.203125]\n",
"6821: [D loss: 0.702022, acc: 0.503906] [A loss: 0.703769, acc: 0.484375]\n",
"6822: [D loss: 0.715901, acc: 0.515625] [A loss: 0.927034, acc: 0.105469]\n",
"6823: [D loss: 0.694731, acc: 0.544922] [A loss: 0.704512, acc: 0.496094]\n",
"6824: [D loss: 0.713061, acc: 0.498047] [A loss: 0.855696, acc: 0.156250]\n",
"6825: [D loss: 0.686798, acc: 0.568359] [A loss: 0.737392, acc: 0.390625]\n",
"6826: [D loss: 0.701065, acc: 0.531250] [A loss: 0.800317, acc: 0.265625]\n",
"6827: [D loss: 0.693869, acc: 0.511719] [A loss: 0.770092, acc: 0.367188]\n",
"6828: [D loss: 0.697922, acc: 0.513672] [A loss: 0.850277, acc: 0.230469]\n",
"6829: [D loss: 0.700934, acc: 0.498047] [A loss: 0.788556, acc: 0.335938]\n",
"6830: [D loss: 0.703684, acc: 0.492188] [A loss: 0.837572, acc: 0.183594]\n",
"6831: [D loss: 0.695324, acc: 0.529297] [A loss: 0.785176, acc: 0.289062]\n",
"6832: [D loss: 0.703753, acc: 0.517578] [A loss: 0.817450, acc: 0.210938]\n",
"6833: [D loss: 0.699749, acc: 0.523438] [A loss: 0.769420, acc: 0.355469]\n",
"6834: [D loss: 0.696123, acc: 0.529297] [A loss: 0.772487, acc: 0.332031]\n",
"6835: [D loss: 0.694813, acc: 0.537109] [A loss: 0.810040, acc: 0.230469]\n",
"6836: [D loss: 0.698646, acc: 0.525391] [A loss: 0.827493, acc: 0.242188]\n",
"6837: [D loss: 0.695988, acc: 0.511719] [A loss: 0.886818, acc: 0.128906]\n",
"6838: [D loss: 0.696241, acc: 0.537109] [A loss: 0.709306, acc: 0.484375]\n",
"6839: [D loss: 0.712890, acc: 0.515625] [A loss: 0.895126, acc: 0.164062]\n",
"6840: [D loss: 0.690442, acc: 0.562500] [A loss: 0.751862, acc: 0.347656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6841: [D loss: 0.708083, acc: 0.498047] [A loss: 0.811349, acc: 0.281250]\n",
"6842: [D loss: 0.697861, acc: 0.525391] [A loss: 0.968782, acc: 0.121094]\n",
"6843: [D loss: 0.684845, acc: 0.562500] [A loss: 0.703425, acc: 0.492188]\n",
"6844: [D loss: 0.703890, acc: 0.509766] [A loss: 0.855776, acc: 0.187500]\n",
"6845: [D loss: 0.709255, acc: 0.505859] [A loss: 0.788468, acc: 0.273438]\n",
"6846: [D loss: 0.702021, acc: 0.513672] [A loss: 0.783253, acc: 0.296875]\n",
"6847: [D loss: 0.690473, acc: 0.507812] [A loss: 0.766819, acc: 0.347656]\n",
"6848: [D loss: 0.703868, acc: 0.523438] [A loss: 0.846905, acc: 0.207031]\n",
"6849: [D loss: 0.690668, acc: 0.535156] [A loss: 0.716664, acc: 0.476562]\n",
"6850: [D loss: 0.704006, acc: 0.525391] [A loss: 0.886315, acc: 0.140625]\n",
"6851: [D loss: 0.706050, acc: 0.490234] [A loss: 0.666359, acc: 0.585938]\n",
"6852: [D loss: 0.724412, acc: 0.500000] [A loss: 1.052691, acc: 0.074219]\n",
"6853: [D loss: 0.687443, acc: 0.560547] [A loss: 0.675000, acc: 0.554688]\n",
"6854: [D loss: 0.717141, acc: 0.525391] [A loss: 0.846087, acc: 0.214844]\n",
"6855: [D loss: 0.695249, acc: 0.525391] [A loss: 0.722913, acc: 0.457031]\n",
"6856: [D loss: 0.706798, acc: 0.535156] [A loss: 0.818838, acc: 0.214844]\n",
"6857: [D loss: 0.693133, acc: 0.507812] [A loss: 0.745713, acc: 0.417969]\n",
"6858: [D loss: 0.702096, acc: 0.501953] [A loss: 0.847937, acc: 0.207031]\n",
"6859: [D loss: 0.703089, acc: 0.505859] [A loss: 0.714177, acc: 0.488281]\n",
"6860: [D loss: 0.723008, acc: 0.494141] [A loss: 0.804520, acc: 0.281250]\n",
"6861: [D loss: 0.705755, acc: 0.496094] [A loss: 0.745176, acc: 0.375000]\n",
"6862: [D loss: 0.704098, acc: 0.523438] [A loss: 0.844811, acc: 0.199219]\n",
"6863: [D loss: 0.697117, acc: 0.525391] [A loss: 0.707141, acc: 0.464844]\n",
"6864: [D loss: 0.695085, acc: 0.533203] [A loss: 0.841151, acc: 0.226562]\n",
"6865: [D loss: 0.698104, acc: 0.517578] [A loss: 0.790517, acc: 0.324219]\n",
"6866: [D loss: 0.698209, acc: 0.501953] [A loss: 0.805637, acc: 0.250000]\n",
"6867: [D loss: 0.698611, acc: 0.511719] [A loss: 0.800946, acc: 0.253906]\n",
"6868: [D loss: 0.715909, acc: 0.509766] [A loss: 0.858656, acc: 0.183594]\n",
"6869: [D loss: 0.687573, acc: 0.548828] [A loss: 0.742700, acc: 0.390625]\n",
"6870: [D loss: 0.712065, acc: 0.498047] [A loss: 0.807107, acc: 0.285156]\n",
"6871: [D loss: 0.702839, acc: 0.509766] [A loss: 0.836194, acc: 0.218750]\n",
"6872: [D loss: 0.690512, acc: 0.544922] [A loss: 0.818311, acc: 0.230469]\n",
"6873: [D loss: 0.711606, acc: 0.462891] [A loss: 0.799319, acc: 0.273438]\n",
"6874: [D loss: 0.703131, acc: 0.521484] [A loss: 0.853668, acc: 0.175781]\n",
"6875: [D loss: 0.681670, acc: 0.529297] [A loss: 0.702229, acc: 0.500000]\n",
"6876: [D loss: 0.722094, acc: 0.492188] [A loss: 0.960010, acc: 0.089844]\n",
"6877: [D loss: 0.703084, acc: 0.500000] [A loss: 0.730958, acc: 0.453125]\n",
"6878: [D loss: 0.709797, acc: 0.527344] [A loss: 0.841852, acc: 0.187500]\n",
"6879: [D loss: 0.692586, acc: 0.525391] [A loss: 0.719856, acc: 0.457031]\n",
"6880: [D loss: 0.706379, acc: 0.511719] [A loss: 0.885809, acc: 0.152344]\n",
"6881: [D loss: 0.705447, acc: 0.482422] [A loss: 0.778487, acc: 0.355469]\n",
"6882: [D loss: 0.706504, acc: 0.521484] [A loss: 0.783619, acc: 0.308594]\n",
"6883: [D loss: 0.703906, acc: 0.503906] [A loss: 0.795536, acc: 0.285156]\n",
"6884: [D loss: 0.720420, acc: 0.455078] [A loss: 0.783358, acc: 0.300781]\n",
"6885: [D loss: 0.696826, acc: 0.539062] [A loss: 0.790683, acc: 0.296875]\n",
"6886: [D loss: 0.712498, acc: 0.486328] [A loss: 0.775565, acc: 0.328125]\n",
"6887: [D loss: 0.709733, acc: 0.531250] [A loss: 0.917231, acc: 0.132812]\n",
"6888: [D loss: 0.699417, acc: 0.515625] [A loss: 0.730219, acc: 0.425781]\n",
"6889: [D loss: 0.703641, acc: 0.503906] [A loss: 0.833248, acc: 0.218750]\n",
"6890: [D loss: 0.696261, acc: 0.550781] [A loss: 0.786115, acc: 0.292969]\n",
"6891: [D loss: 0.708648, acc: 0.490234] [A loss: 0.761505, acc: 0.363281]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6892: [D loss: 0.707622, acc: 0.500000] [A loss: 0.823643, acc: 0.226562]\n",
"6893: [D loss: 0.695038, acc: 0.509766] [A loss: 0.760358, acc: 0.378906]\n",
"6894: [D loss: 0.696247, acc: 0.527344] [A loss: 0.776470, acc: 0.296875]\n",
"6895: [D loss: 0.697416, acc: 0.515625] [A loss: 0.797957, acc: 0.250000]\n",
"6896: [D loss: 0.699827, acc: 0.501953] [A loss: 0.795816, acc: 0.316406]\n",
"6897: [D loss: 0.699674, acc: 0.515625] [A loss: 0.793522, acc: 0.328125]\n",
"6898: [D loss: 0.708076, acc: 0.503906] [A loss: 1.131134, acc: 0.035156]\n",
"6899: [D loss: 0.706101, acc: 0.511719] [A loss: 0.655011, acc: 0.617188]\n",
"6900: [D loss: 0.732595, acc: 0.503906] [A loss: 0.775924, acc: 0.328125]\n",
"6901: [D loss: 0.695458, acc: 0.521484] [A loss: 0.811689, acc: 0.253906]\n",
"6902: [D loss: 0.688593, acc: 0.548828] [A loss: 0.733742, acc: 0.421875]\n",
"6903: [D loss: 0.703421, acc: 0.527344] [A loss: 0.811839, acc: 0.261719]\n",
"6904: [D loss: 0.697597, acc: 0.548828] [A loss: 0.778727, acc: 0.335938]\n",
"6905: [D loss: 0.700059, acc: 0.507812] [A loss: 0.826461, acc: 0.210938]\n",
"6906: [D loss: 0.696201, acc: 0.513672] [A loss: 0.792733, acc: 0.292969]\n",
"6907: [D loss: 0.700336, acc: 0.544922] [A loss: 0.841312, acc: 0.199219]\n",
"6908: [D loss: 0.698222, acc: 0.519531] [A loss: 0.743011, acc: 0.421875]\n",
"6909: [D loss: 0.700543, acc: 0.515625] [A loss: 0.827693, acc: 0.277344]\n",
"6910: [D loss: 0.699847, acc: 0.537109] [A loss: 0.850045, acc: 0.179688]\n",
"6911: [D loss: 0.700701, acc: 0.498047] [A loss: 0.748659, acc: 0.414062]\n",
"6912: [D loss: 0.710068, acc: 0.509766] [A loss: 0.872664, acc: 0.152344]\n",
"6913: [D loss: 0.683921, acc: 0.539062] [A loss: 0.758688, acc: 0.328125]\n",
"6914: [D loss: 0.708687, acc: 0.505859] [A loss: 0.794916, acc: 0.281250]\n",
"6915: [D loss: 0.709958, acc: 0.482422] [A loss: 0.815911, acc: 0.234375]\n",
"6916: [D loss: 0.690753, acc: 0.562500] [A loss: 0.704021, acc: 0.468750]\n",
"6917: [D loss: 0.705090, acc: 0.494141] [A loss: 0.804218, acc: 0.230469]\n",
"6918: [D loss: 0.703746, acc: 0.525391] [A loss: 0.779922, acc: 0.328125]\n",
"6919: [D loss: 0.691494, acc: 0.529297] [A loss: 0.877583, acc: 0.125000]\n",
"6920: [D loss: 0.704115, acc: 0.501953] [A loss: 0.800823, acc: 0.242188]\n",
"6921: [D loss: 0.695963, acc: 0.509766] [A loss: 0.857137, acc: 0.171875]\n",
"6922: [D loss: 0.698761, acc: 0.511719] [A loss: 0.801818, acc: 0.285156]\n",
"6923: [D loss: 0.700170, acc: 0.525391] [A loss: 0.816182, acc: 0.242188]\n",
"6924: [D loss: 0.688976, acc: 0.521484] [A loss: 0.803687, acc: 0.261719]\n",
"6925: [D loss: 0.696541, acc: 0.515625] [A loss: 0.864654, acc: 0.183594]\n",
"6926: [D loss: 0.697516, acc: 0.550781] [A loss: 0.707305, acc: 0.492188]\n",
"6927: [D loss: 0.723023, acc: 0.466797] [A loss: 0.995409, acc: 0.082031]\n",
"6928: [D loss: 0.701319, acc: 0.523438] [A loss: 0.857683, acc: 0.214844]\n",
"6929: [D loss: 0.707425, acc: 0.482422] [A loss: 0.800407, acc: 0.289062]\n",
"6930: [D loss: 0.701717, acc: 0.542969] [A loss: 0.879609, acc: 0.175781]\n",
"6931: [D loss: 0.711577, acc: 0.519531] [A loss: 0.765909, acc: 0.332031]\n",
"6932: [D loss: 0.702226, acc: 0.554688] [A loss: 0.745797, acc: 0.363281]\n",
"6933: [D loss: 0.697959, acc: 0.546875] [A loss: 0.933500, acc: 0.078125]\n",
"6934: [D loss: 0.706145, acc: 0.509766] [A loss: 0.705988, acc: 0.500000]\n",
"6935: [D loss: 0.720381, acc: 0.474609] [A loss: 0.837177, acc: 0.191406]\n",
"6936: [D loss: 0.701392, acc: 0.523438] [A loss: 0.759535, acc: 0.324219]\n",
"6937: [D loss: 0.689075, acc: 0.533203] [A loss: 0.767121, acc: 0.304688]\n",
"6938: [D loss: 0.699956, acc: 0.525391] [A loss: 0.868263, acc: 0.179688]\n",
"6939: [D loss: 0.700014, acc: 0.511719] [A loss: 0.750417, acc: 0.351562]\n",
"6940: [D loss: 0.697379, acc: 0.531250] [A loss: 0.854526, acc: 0.156250]\n",
"6941: [D loss: 0.707366, acc: 0.507812] [A loss: 0.746065, acc: 0.449219]\n",
"6942: [D loss: 0.697240, acc: 0.531250] [A loss: 0.835784, acc: 0.218750]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6943: [D loss: 0.694813, acc: 0.519531] [A loss: 0.770356, acc: 0.335938]\n",
"6944: [D loss: 0.695827, acc: 0.542969] [A loss: 0.819136, acc: 0.246094]\n",
"6945: [D loss: 0.703287, acc: 0.529297] [A loss: 0.808378, acc: 0.312500]\n",
"6946: [D loss: 0.691055, acc: 0.541016] [A loss: 0.772964, acc: 0.312500]\n",
"6947: [D loss: 0.705780, acc: 0.523438] [A loss: 0.756182, acc: 0.406250]\n",
"6948: [D loss: 0.701084, acc: 0.531250] [A loss: 0.816812, acc: 0.257812]\n",
"6949: [D loss: 0.712004, acc: 0.505859] [A loss: 0.866321, acc: 0.207031]\n",
"6950: [D loss: 0.689516, acc: 0.556641] [A loss: 0.768536, acc: 0.351562]\n",
"6951: [D loss: 0.706777, acc: 0.513672] [A loss: 0.887001, acc: 0.121094]\n",
"6952: [D loss: 0.699446, acc: 0.503906] [A loss: 0.698819, acc: 0.503906]\n",
"6953: [D loss: 0.708469, acc: 0.539062] [A loss: 0.958787, acc: 0.082031]\n",
"6954: [D loss: 0.694601, acc: 0.548828] [A loss: 0.703514, acc: 0.503906]\n",
"6955: [D loss: 0.721526, acc: 0.517578] [A loss: 0.858867, acc: 0.171875]\n",
"6956: [D loss: 0.701501, acc: 0.517578] [A loss: 0.740563, acc: 0.410156]\n",
"6957: [D loss: 0.706722, acc: 0.490234] [A loss: 0.816400, acc: 0.257812]\n",
"6958: [D loss: 0.699803, acc: 0.503906] [A loss: 0.839075, acc: 0.214844]\n",
"6959: [D loss: 0.707998, acc: 0.500000] [A loss: 0.813848, acc: 0.250000]\n",
"6960: [D loss: 0.703294, acc: 0.533203] [A loss: 0.791185, acc: 0.292969]\n",
"6961: [D loss: 0.687283, acc: 0.566406] [A loss: 0.788139, acc: 0.328125]\n",
"6962: [D loss: 0.704368, acc: 0.488281] [A loss: 0.787047, acc: 0.316406]\n",
"6963: [D loss: 0.702554, acc: 0.488281] [A loss: 0.850437, acc: 0.210938]\n",
"6964: [D loss: 0.679491, acc: 0.548828] [A loss: 0.757853, acc: 0.371094]\n",
"6965: [D loss: 0.706590, acc: 0.507812] [A loss: 0.851661, acc: 0.183594]\n",
"6966: [D loss: 0.706993, acc: 0.494141] [A loss: 0.785957, acc: 0.347656]\n",
"6967: [D loss: 0.695868, acc: 0.541016] [A loss: 0.830581, acc: 0.238281]\n",
"6968: [D loss: 0.706777, acc: 0.511719] [A loss: 0.826927, acc: 0.199219]\n",
"6969: [D loss: 0.705980, acc: 0.501953] [A loss: 0.800720, acc: 0.238281]\n",
"6970: [D loss: 0.710510, acc: 0.480469] [A loss: 0.821099, acc: 0.246094]\n",
"6971: [D loss: 0.695907, acc: 0.521484] [A loss: 0.806057, acc: 0.285156]\n",
"6972: [D loss: 0.697271, acc: 0.507812] [A loss: 0.772249, acc: 0.324219]\n",
"6973: [D loss: 0.695482, acc: 0.548828] [A loss: 0.812696, acc: 0.242188]\n",
"6974: [D loss: 0.707082, acc: 0.498047] [A loss: 0.950100, acc: 0.164062]\n",
"6975: [D loss: 0.693012, acc: 0.529297] [A loss: 0.725459, acc: 0.453125]\n",
"6976: [D loss: 0.704247, acc: 0.539062] [A loss: 0.905057, acc: 0.128906]\n",
"6977: [D loss: 0.699591, acc: 0.513672] [A loss: 0.736501, acc: 0.390625]\n",
"6978: [D loss: 0.697046, acc: 0.523438] [A loss: 0.856942, acc: 0.230469]\n",
"6979: [D loss: 0.714450, acc: 0.476562] [A loss: 0.761411, acc: 0.351562]\n",
"6980: [D loss: 0.718545, acc: 0.494141] [A loss: 0.884780, acc: 0.105469]\n",
"6981: [D loss: 0.701157, acc: 0.494141] [A loss: 0.658082, acc: 0.597656]\n",
"6982: [D loss: 0.707727, acc: 0.505859] [A loss: 0.870927, acc: 0.140625]\n",
"6983: [D loss: 0.693643, acc: 0.544922] [A loss: 0.760692, acc: 0.382812]\n",
"6984: [D loss: 0.717780, acc: 0.496094] [A loss: 0.810193, acc: 0.261719]\n",
"6985: [D loss: 0.696260, acc: 0.537109] [A loss: 0.840759, acc: 0.214844]\n",
"6986: [D loss: 0.701269, acc: 0.511719] [A loss: 0.812603, acc: 0.253906]\n",
"6987: [D loss: 0.697465, acc: 0.521484] [A loss: 0.744133, acc: 0.410156]\n",
"6988: [D loss: 0.707257, acc: 0.513672] [A loss: 0.895870, acc: 0.140625]\n",
"6989: [D loss: 0.698702, acc: 0.521484] [A loss: 0.692594, acc: 0.515625]\n",
"6990: [D loss: 0.718075, acc: 0.519531] [A loss: 0.906618, acc: 0.117188]\n",
"6991: [D loss: 0.700475, acc: 0.515625] [A loss: 0.696765, acc: 0.492188]\n",
"6992: [D loss: 0.708243, acc: 0.511719] [A loss: 0.839782, acc: 0.230469]\n",
"6993: [D loss: 0.690192, acc: 0.544922] [A loss: 0.787845, acc: 0.328125]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"6994: [D loss: 0.718872, acc: 0.505859] [A loss: 0.867435, acc: 0.156250]\n",
"6995: [D loss: 0.689502, acc: 0.539062] [A loss: 0.774246, acc: 0.304688]\n",
"6996: [D loss: 0.710852, acc: 0.519531] [A loss: 0.819229, acc: 0.203125]\n",
"6997: [D loss: 0.704726, acc: 0.505859] [A loss: 0.763251, acc: 0.351562]\n",
"6998: [D loss: 0.689104, acc: 0.562500] [A loss: 0.791003, acc: 0.289062]\n",
"6999: [D loss: 0.704526, acc: 0.507812] [A loss: 0.773820, acc: 0.312500]\n",
"7000: [D loss: 0.702122, acc: 0.501953] [A loss: 0.808814, acc: 0.253906]\n",
"7001: [D loss: 0.697383, acc: 0.531250] [A loss: 0.741711, acc: 0.402344]\n",
"7002: [D loss: 0.698079, acc: 0.546875] [A loss: 0.808597, acc: 0.253906]\n",
"7003: [D loss: 0.697751, acc: 0.519531] [A loss: 0.847206, acc: 0.164062]\n",
"7004: [D loss: 0.694333, acc: 0.554688] [A loss: 0.795301, acc: 0.250000]\n",
"7005: [D loss: 0.691597, acc: 0.500000] [A loss: 0.777967, acc: 0.312500]\n",
"7006: [D loss: 0.708013, acc: 0.519531] [A loss: 0.967229, acc: 0.101562]\n",
"7007: [D loss: 0.688272, acc: 0.541016] [A loss: 0.722076, acc: 0.445312]\n",
"7008: [D loss: 0.700095, acc: 0.533203] [A loss: 0.930940, acc: 0.117188]\n",
"7009: [D loss: 0.697538, acc: 0.529297] [A loss: 0.750426, acc: 0.375000]\n",
"7010: [D loss: 0.705624, acc: 0.503906] [A loss: 0.784112, acc: 0.300781]\n",
"7011: [D loss: 0.692450, acc: 0.544922] [A loss: 0.820980, acc: 0.253906]\n",
"7012: [D loss: 0.707342, acc: 0.484375] [A loss: 0.782391, acc: 0.277344]\n",
"7013: [D loss: 0.708366, acc: 0.464844] [A loss: 0.861205, acc: 0.175781]\n",
"7014: [D loss: 0.711825, acc: 0.500000] [A loss: 0.781582, acc: 0.320312]\n",
"7015: [D loss: 0.697906, acc: 0.525391] [A loss: 0.817320, acc: 0.242188]\n",
"7016: [D loss: 0.692424, acc: 0.519531] [A loss: 0.760191, acc: 0.335938]\n",
"7017: [D loss: 0.704722, acc: 0.509766] [A loss: 0.769816, acc: 0.320312]\n",
"7018: [D loss: 0.702635, acc: 0.511719] [A loss: 0.839996, acc: 0.207031]\n",
"7019: [D loss: 0.714949, acc: 0.464844] [A loss: 0.758599, acc: 0.347656]\n",
"7020: [D loss: 0.692377, acc: 0.533203] [A loss: 1.013331, acc: 0.054688]\n",
"7021: [D loss: 0.701702, acc: 0.513672] [A loss: 0.699171, acc: 0.507812]\n",
"7022: [D loss: 0.703221, acc: 0.542969] [A loss: 0.891726, acc: 0.152344]\n",
"7023: [D loss: 0.697207, acc: 0.533203] [A loss: 0.722112, acc: 0.484375]\n",
"7024: [D loss: 0.707502, acc: 0.537109] [A loss: 0.899350, acc: 0.136719]\n",
"7025: [D loss: 0.692162, acc: 0.529297] [A loss: 0.672980, acc: 0.585938]\n",
"7026: [D loss: 0.707329, acc: 0.533203] [A loss: 0.910224, acc: 0.089844]\n",
"7027: [D loss: 0.710708, acc: 0.509766] [A loss: 0.685465, acc: 0.558594]\n",
"7028: [D loss: 0.704387, acc: 0.517578] [A loss: 0.807467, acc: 0.277344]\n",
"7029: [D loss: 0.700453, acc: 0.523438] [A loss: 0.780547, acc: 0.324219]\n",
"7030: [D loss: 0.700788, acc: 0.513672] [A loss: 0.792542, acc: 0.265625]\n",
"7031: [D loss: 0.705775, acc: 0.509766] [A loss: 0.852831, acc: 0.167969]\n",
"7032: [D loss: 0.692701, acc: 0.517578] [A loss: 0.765109, acc: 0.347656]\n",
"7033: [D loss: 0.692016, acc: 0.544922] [A loss: 0.796085, acc: 0.289062]\n",
"7034: [D loss: 0.700065, acc: 0.523438] [A loss: 0.829272, acc: 0.246094]\n",
"7035: [D loss: 0.701185, acc: 0.462891] [A loss: 0.775154, acc: 0.289062]\n",
"7036: [D loss: 0.693575, acc: 0.541016] [A loss: 0.879184, acc: 0.214844]\n",
"7037: [D loss: 0.696022, acc: 0.513672] [A loss: 0.759943, acc: 0.375000]\n",
"7038: [D loss: 0.699781, acc: 0.527344] [A loss: 0.898096, acc: 0.128906]\n",
"7039: [D loss: 0.684858, acc: 0.546875] [A loss: 0.653003, acc: 0.625000]\n",
"7040: [D loss: 0.706956, acc: 0.529297] [A loss: 1.013354, acc: 0.027344]\n",
"7041: [D loss: 0.688867, acc: 0.546875] [A loss: 0.682279, acc: 0.539062]\n",
"7042: [D loss: 0.717466, acc: 0.498047] [A loss: 0.900143, acc: 0.113281]\n",
"7043: [D loss: 0.687545, acc: 0.554688] [A loss: 0.741752, acc: 0.394531]\n",
"7044: [D loss: 0.706899, acc: 0.496094] [A loss: 0.803623, acc: 0.226562]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7045: [D loss: 0.693839, acc: 0.515625] [A loss: 0.799717, acc: 0.269531]\n",
"7046: [D loss: 0.695736, acc: 0.513672] [A loss: 0.789746, acc: 0.261719]\n",
"7047: [D loss: 0.706271, acc: 0.488281] [A loss: 0.771668, acc: 0.339844]\n",
"7048: [D loss: 0.705338, acc: 0.503906] [A loss: 0.760767, acc: 0.335938]\n",
"7049: [D loss: 0.682331, acc: 0.562500] [A loss: 0.792696, acc: 0.371094]\n",
"7050: [D loss: 0.703937, acc: 0.513672] [A loss: 0.926056, acc: 0.140625]\n",
"7051: [D loss: 0.693109, acc: 0.542969] [A loss: 0.785940, acc: 0.304688]\n",
"7052: [D loss: 0.714015, acc: 0.496094] [A loss: 0.874499, acc: 0.183594]\n",
"7053: [D loss: 0.694410, acc: 0.535156] [A loss: 0.698144, acc: 0.500000]\n",
"7054: [D loss: 0.712624, acc: 0.503906] [A loss: 0.969128, acc: 0.078125]\n",
"7055: [D loss: 0.688005, acc: 0.548828] [A loss: 0.716536, acc: 0.468750]\n",
"7056: [D loss: 0.708037, acc: 0.525391] [A loss: 0.818629, acc: 0.199219]\n",
"7057: [D loss: 0.700443, acc: 0.535156] [A loss: 0.787139, acc: 0.355469]\n",
"7058: [D loss: 0.703226, acc: 0.507812] [A loss: 0.895476, acc: 0.187500]\n",
"7059: [D loss: 0.693823, acc: 0.529297] [A loss: 0.779404, acc: 0.304688]\n",
"7060: [D loss: 0.688717, acc: 0.541016] [A loss: 0.812673, acc: 0.226562]\n",
"7061: [D loss: 0.707453, acc: 0.529297] [A loss: 0.798107, acc: 0.269531]\n",
"7062: [D loss: 0.696553, acc: 0.556641] [A loss: 0.762475, acc: 0.355469]\n",
"7063: [D loss: 0.699631, acc: 0.519531] [A loss: 0.797363, acc: 0.246094]\n",
"7064: [D loss: 0.705817, acc: 0.511719] [A loss: 0.766033, acc: 0.339844]\n",
"7065: [D loss: 0.704607, acc: 0.509766] [A loss: 0.846120, acc: 0.179688]\n",
"7066: [D loss: 0.706171, acc: 0.513672] [A loss: 0.843826, acc: 0.230469]\n",
"7067: [D loss: 0.699819, acc: 0.503906] [A loss: 0.781636, acc: 0.316406]\n",
"7068: [D loss: 0.693710, acc: 0.550781] [A loss: 0.829052, acc: 0.210938]\n",
"7069: [D loss: 0.689770, acc: 0.552734] [A loss: 0.752165, acc: 0.332031]\n",
"7070: [D loss: 0.702740, acc: 0.505859] [A loss: 0.882912, acc: 0.179688]\n",
"7071: [D loss: 0.685813, acc: 0.533203] [A loss: 0.725546, acc: 0.453125]\n",
"7072: [D loss: 0.717624, acc: 0.509766] [A loss: 1.205479, acc: 0.042969]\n",
"7073: [D loss: 0.685392, acc: 0.568359] [A loss: 0.702233, acc: 0.503906]\n",
"7074: [D loss: 0.714867, acc: 0.521484] [A loss: 0.894020, acc: 0.121094]\n",
"7075: [D loss: 0.693453, acc: 0.527344] [A loss: 0.670753, acc: 0.570312]\n",
"7076: [D loss: 0.708314, acc: 0.511719] [A loss: 0.863362, acc: 0.171875]\n",
"7077: [D loss: 0.704681, acc: 0.494141] [A loss: 0.760693, acc: 0.363281]\n",
"7078: [D loss: 0.707838, acc: 0.482422] [A loss: 0.796290, acc: 0.296875]\n",
"7079: [D loss: 0.698104, acc: 0.500000] [A loss: 0.891544, acc: 0.207031]\n",
"7080: [D loss: 0.704347, acc: 0.492188] [A loss: 0.791083, acc: 0.277344]\n",
"7081: [D loss: 0.695753, acc: 0.519531] [A loss: 0.771831, acc: 0.308594]\n",
"7082: [D loss: 0.701886, acc: 0.554688] [A loss: 0.844243, acc: 0.218750]\n",
"7083: [D loss: 0.693304, acc: 0.552734] [A loss: 0.748535, acc: 0.371094]\n",
"7084: [D loss: 0.712440, acc: 0.505859] [A loss: 0.823496, acc: 0.246094]\n",
"7085: [D loss: 0.701630, acc: 0.503906] [A loss: 0.736368, acc: 0.402344]\n",
"7086: [D loss: 0.709037, acc: 0.531250] [A loss: 0.880533, acc: 0.132812]\n",
"7087: [D loss: 0.695648, acc: 0.558594] [A loss: 0.777466, acc: 0.312500]\n",
"7088: [D loss: 0.696476, acc: 0.533203] [A loss: 0.879006, acc: 0.156250]\n",
"7089: [D loss: 0.691729, acc: 0.544922] [A loss: 0.776625, acc: 0.347656]\n",
"7090: [D loss: 0.702290, acc: 0.519531] [A loss: 0.833005, acc: 0.257812]\n",
"7091: [D loss: 0.707338, acc: 0.521484] [A loss: 0.803214, acc: 0.257812]\n",
"7092: [D loss: 0.695156, acc: 0.525391] [A loss: 0.787955, acc: 0.289062]\n",
"7093: [D loss: 0.705044, acc: 0.531250] [A loss: 0.803204, acc: 0.332031]\n",
"7094: [D loss: 0.693517, acc: 0.539062] [A loss: 0.856299, acc: 0.191406]\n",
"7095: [D loss: 0.717145, acc: 0.455078] [A loss: 0.769724, acc: 0.382812]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7096: [D loss: 0.707442, acc: 0.503906] [A loss: 0.901451, acc: 0.113281]\n",
"7097: [D loss: 0.692838, acc: 0.541016] [A loss: 0.703464, acc: 0.503906]\n",
"7098: [D loss: 0.700646, acc: 0.503906] [A loss: 0.863858, acc: 0.164062]\n",
"7099: [D loss: 0.693656, acc: 0.558594] [A loss: 0.673239, acc: 0.554688]\n",
"7100: [D loss: 0.733516, acc: 0.490234] [A loss: 1.041027, acc: 0.054688]\n",
"7101: [D loss: 0.697842, acc: 0.517578] [A loss: 0.654251, acc: 0.585938]\n",
"7102: [D loss: 0.712986, acc: 0.500000] [A loss: 0.825401, acc: 0.222656]\n",
"7103: [D loss: 0.696042, acc: 0.539062] [A loss: 0.710342, acc: 0.468750]\n",
"7104: [D loss: 0.696353, acc: 0.546875] [A loss: 0.851285, acc: 0.214844]\n",
"7105: [D loss: 0.703018, acc: 0.519531] [A loss: 0.758786, acc: 0.359375]\n",
"7106: [D loss: 0.695577, acc: 0.529297] [A loss: 0.771730, acc: 0.335938]\n",
"7107: [D loss: 0.705116, acc: 0.505859] [A loss: 0.798779, acc: 0.296875]\n",
"7108: [D loss: 0.694371, acc: 0.527344] [A loss: 0.775171, acc: 0.332031]\n",
"7109: [D loss: 0.698765, acc: 0.531250] [A loss: 0.855159, acc: 0.207031]\n",
"7110: [D loss: 0.703591, acc: 0.515625] [A loss: 0.754126, acc: 0.382812]\n",
"7111: [D loss: 0.701109, acc: 0.519531] [A loss: 0.888796, acc: 0.195312]\n",
"7112: [D loss: 0.681188, acc: 0.562500] [A loss: 0.738678, acc: 0.425781]\n",
"7113: [D loss: 0.702402, acc: 0.511719] [A loss: 0.832385, acc: 0.234375]\n",
"7114: [D loss: 0.711512, acc: 0.527344] [A loss: 0.884962, acc: 0.136719]\n",
"7115: [D loss: 0.708747, acc: 0.496094] [A loss: 0.748797, acc: 0.402344]\n",
"7116: [D loss: 0.698310, acc: 0.527344] [A loss: 0.864206, acc: 0.160156]\n",
"7117: [D loss: 0.694718, acc: 0.511719] [A loss: 0.737201, acc: 0.453125]\n",
"7118: [D loss: 0.698856, acc: 0.523438] [A loss: 0.889850, acc: 0.132812]\n",
"7119: [D loss: 0.694633, acc: 0.533203] [A loss: 0.781797, acc: 0.316406]\n",
"7120: [D loss: 0.706549, acc: 0.521484] [A loss: 0.805793, acc: 0.261719]\n",
"7121: [D loss: 0.699142, acc: 0.521484] [A loss: 0.869496, acc: 0.148438]\n",
"7122: [D loss: 0.691887, acc: 0.537109] [A loss: 0.769960, acc: 0.328125]\n",
"7123: [D loss: 0.709320, acc: 0.500000] [A loss: 0.825971, acc: 0.210938]\n",
"7124: [D loss: 0.699105, acc: 0.525391] [A loss: 0.756185, acc: 0.378906]\n",
"7125: [D loss: 0.726268, acc: 0.500000] [A loss: 0.957984, acc: 0.082031]\n",
"7126: [D loss: 0.696905, acc: 0.542969] [A loss: 0.702431, acc: 0.488281]\n",
"7127: [D loss: 0.703547, acc: 0.539062] [A loss: 0.836996, acc: 0.222656]\n",
"7128: [D loss: 0.700109, acc: 0.529297] [A loss: 0.737666, acc: 0.414062]\n",
"7129: [D loss: 0.726354, acc: 0.498047] [A loss: 0.891858, acc: 0.117188]\n",
"7130: [D loss: 0.697591, acc: 0.501953] [A loss: 0.717066, acc: 0.460938]\n",
"7131: [D loss: 0.713938, acc: 0.523438] [A loss: 0.850062, acc: 0.207031]\n",
"7132: [D loss: 0.695133, acc: 0.531250] [A loss: 0.743042, acc: 0.410156]\n",
"7133: [D loss: 0.703335, acc: 0.523438] [A loss: 1.056687, acc: 0.117188]\n",
"7134: [D loss: 0.694872, acc: 0.535156] [A loss: 0.779503, acc: 0.316406]\n",
"7135: [D loss: 0.711389, acc: 0.507812] [A loss: 0.751145, acc: 0.367188]\n",
"7136: [D loss: 0.703269, acc: 0.505859] [A loss: 0.801620, acc: 0.328125]\n",
"7137: [D loss: 0.716110, acc: 0.503906] [A loss: 0.856663, acc: 0.156250]\n",
"7138: [D loss: 0.700355, acc: 0.513672] [A loss: 0.729001, acc: 0.441406]\n",
"7139: [D loss: 0.696980, acc: 0.515625] [A loss: 0.921286, acc: 0.101562]\n",
"7140: [D loss: 0.686713, acc: 0.539062] [A loss: 0.715085, acc: 0.464844]\n",
"7141: [D loss: 0.711925, acc: 0.505859] [A loss: 0.830844, acc: 0.199219]\n",
"7142: [D loss: 0.692598, acc: 0.539062] [A loss: 0.721328, acc: 0.476562]\n",
"7143: [D loss: 0.709098, acc: 0.517578] [A loss: 0.830350, acc: 0.238281]\n",
"7144: [D loss: 0.701174, acc: 0.505859] [A loss: 0.754007, acc: 0.382812]\n",
"7145: [D loss: 0.697114, acc: 0.542969] [A loss: 0.808579, acc: 0.261719]\n",
"7146: [D loss: 0.701866, acc: 0.505859] [A loss: 0.810622, acc: 0.246094]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7147: [D loss: 0.706956, acc: 0.488281] [A loss: 0.757069, acc: 0.367188]\n",
"7148: [D loss: 0.702318, acc: 0.507812] [A loss: 0.787967, acc: 0.300781]\n",
"7149: [D loss: 0.709621, acc: 0.498047] [A loss: 0.819331, acc: 0.234375]\n",
"7150: [D loss: 0.690063, acc: 0.550781] [A loss: 0.813989, acc: 0.273438]\n",
"7151: [D loss: 0.693847, acc: 0.542969] [A loss: 0.778332, acc: 0.324219]\n",
"7152: [D loss: 0.704427, acc: 0.527344] [A loss: 0.774612, acc: 0.332031]\n",
"7153: [D loss: 0.702204, acc: 0.535156] [A loss: 0.906010, acc: 0.113281]\n",
"7154: [D loss: 0.706661, acc: 0.525391] [A loss: 0.775493, acc: 0.316406]\n",
"7155: [D loss: 0.699262, acc: 0.529297] [A loss: 0.864295, acc: 0.175781]\n",
"7156: [D loss: 0.720120, acc: 0.458984] [A loss: 0.733813, acc: 0.460938]\n",
"7157: [D loss: 0.700772, acc: 0.503906] [A loss: 0.873139, acc: 0.164062]\n",
"7158: [D loss: 0.694648, acc: 0.537109] [A loss: 0.771917, acc: 0.375000]\n",
"7159: [D loss: 0.720326, acc: 0.460938] [A loss: 0.862568, acc: 0.175781]\n",
"7160: [D loss: 0.711402, acc: 0.488281] [A loss: 0.813305, acc: 0.269531]\n",
"7161: [D loss: 0.700909, acc: 0.539062] [A loss: 0.799109, acc: 0.253906]\n",
"7162: [D loss: 0.702500, acc: 0.507812] [A loss: 0.714215, acc: 0.441406]\n",
"7163: [D loss: 0.702520, acc: 0.527344] [A loss: 0.887499, acc: 0.136719]\n",
"7164: [D loss: 0.697903, acc: 0.535156] [A loss: 0.675437, acc: 0.578125]\n",
"7165: [D loss: 0.708534, acc: 0.525391] [A loss: 0.869713, acc: 0.156250]\n",
"7166: [D loss: 0.692184, acc: 0.531250] [A loss: 0.763752, acc: 0.332031]\n",
"7167: [D loss: 0.705419, acc: 0.509766] [A loss: 0.820329, acc: 0.222656]\n",
"7168: [D loss: 0.705734, acc: 0.490234] [A loss: 0.736709, acc: 0.398438]\n",
"7169: [D loss: 0.734075, acc: 0.503906] [A loss: 0.869012, acc: 0.175781]\n",
"7170: [D loss: 0.708391, acc: 0.503906] [A loss: 0.810902, acc: 0.265625]\n",
"7171: [D loss: 0.697770, acc: 0.527344] [A loss: 0.783196, acc: 0.304688]\n",
"7172: [D loss: 0.717307, acc: 0.464844] [A loss: 0.821365, acc: 0.218750]\n",
"7173: [D loss: 0.690475, acc: 0.542969] [A loss: 0.780402, acc: 0.324219]\n",
"7174: [D loss: 0.703912, acc: 0.498047] [A loss: 0.829472, acc: 0.199219]\n",
"7175: [D loss: 0.703531, acc: 0.517578] [A loss: 0.878172, acc: 0.167969]\n",
"7176: [D loss: 0.687572, acc: 0.556641] [A loss: 0.748228, acc: 0.417969]\n",
"7177: [D loss: 0.689008, acc: 0.542969] [A loss: 0.893060, acc: 0.152344]\n",
"7178: [D loss: 0.700988, acc: 0.517578] [A loss: 0.799977, acc: 0.265625]\n",
"7179: [D loss: 0.706579, acc: 0.500000] [A loss: 0.831367, acc: 0.230469]\n",
"7180: [D loss: 0.689056, acc: 0.542969] [A loss: 0.840182, acc: 0.230469]\n",
"7181: [D loss: 0.705425, acc: 0.507812] [A loss: 0.827227, acc: 0.210938]\n",
"7182: [D loss: 0.709398, acc: 0.503906] [A loss: 0.821016, acc: 0.281250]\n",
"7183: [D loss: 0.695095, acc: 0.541016] [A loss: 0.818475, acc: 0.296875]\n",
"7184: [D loss: 0.707872, acc: 0.519531] [A loss: 0.904582, acc: 0.144531]\n",
"7185: [D loss: 0.710264, acc: 0.503906] [A loss: 0.744700, acc: 0.378906]\n",
"7186: [D loss: 0.702315, acc: 0.537109] [A loss: 0.913153, acc: 0.085938]\n",
"7187: [D loss: 0.701370, acc: 0.501953] [A loss: 0.667141, acc: 0.574219]\n",
"7188: [D loss: 0.716589, acc: 0.525391] [A loss: 0.946797, acc: 0.085938]\n",
"7189: [D loss: 0.712415, acc: 0.503906] [A loss: 0.791629, acc: 0.371094]\n",
"7190: [D loss: 0.695490, acc: 0.507812] [A loss: 0.858494, acc: 0.175781]\n",
"7191: [D loss: 0.703567, acc: 0.500000] [A loss: 0.756303, acc: 0.367188]\n",
"7192: [D loss: 0.701772, acc: 0.517578] [A loss: 0.891030, acc: 0.117188]\n",
"7193: [D loss: 0.706648, acc: 0.480469] [A loss: 0.693351, acc: 0.570312]\n",
"7194: [D loss: 0.710583, acc: 0.519531] [A loss: 0.852459, acc: 0.210938]\n",
"7195: [D loss: 0.710305, acc: 0.478516] [A loss: 0.746856, acc: 0.375000]\n",
"7196: [D loss: 0.707392, acc: 0.515625] [A loss: 0.891885, acc: 0.117188]\n",
"7197: [D loss: 0.692020, acc: 0.544922] [A loss: 0.765869, acc: 0.359375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7198: [D loss: 0.709309, acc: 0.527344] [A loss: 0.813353, acc: 0.222656]\n",
"7199: [D loss: 0.712447, acc: 0.505859] [A loss: 0.846361, acc: 0.187500]\n",
"7200: [D loss: 0.701396, acc: 0.511719] [A loss: 0.814153, acc: 0.246094]\n",
"7201: [D loss: 0.700922, acc: 0.537109] [A loss: 0.822992, acc: 0.222656]\n",
"7202: [D loss: 0.696557, acc: 0.503906] [A loss: 0.777679, acc: 0.312500]\n",
"7203: [D loss: 0.708447, acc: 0.494141] [A loss: 0.831227, acc: 0.210938]\n",
"7204: [D loss: 0.693992, acc: 0.527344] [A loss: 0.834854, acc: 0.230469]\n",
"7205: [D loss: 0.702819, acc: 0.517578] [A loss: 0.862768, acc: 0.144531]\n",
"7206: [D loss: 0.705066, acc: 0.523438] [A loss: 0.829082, acc: 0.238281]\n",
"7207: [D loss: 0.705482, acc: 0.511719] [A loss: 0.803842, acc: 0.246094]\n",
"7208: [D loss: 0.700952, acc: 0.513672] [A loss: 0.843849, acc: 0.253906]\n",
"7209: [D loss: 0.707762, acc: 0.500000] [A loss: 0.845428, acc: 0.203125]\n",
"7210: [D loss: 0.696518, acc: 0.529297] [A loss: 0.829657, acc: 0.218750]\n",
"7211: [D loss: 0.682683, acc: 0.550781] [A loss: 0.727980, acc: 0.410156]\n",
"7212: [D loss: 0.719494, acc: 0.480469] [A loss: 1.035740, acc: 0.078125]\n",
"7213: [D loss: 0.708076, acc: 0.505859] [A loss: 0.736070, acc: 0.472656]\n",
"7214: [D loss: 0.700880, acc: 0.527344] [A loss: 0.897691, acc: 0.140625]\n",
"7215: [D loss: 0.690226, acc: 0.496094] [A loss: 0.746846, acc: 0.402344]\n",
"7216: [D loss: 0.721441, acc: 0.505859] [A loss: 0.918197, acc: 0.089844]\n",
"7217: [D loss: 0.691358, acc: 0.539062] [A loss: 0.704354, acc: 0.523438]\n",
"7218: [D loss: 0.722948, acc: 0.496094] [A loss: 1.006617, acc: 0.062500]\n",
"7219: [D loss: 0.711040, acc: 0.521484] [A loss: 0.720530, acc: 0.496094]\n",
"7220: [D loss: 0.710011, acc: 0.498047] [A loss: 0.846698, acc: 0.164062]\n",
"7221: [D loss: 0.682646, acc: 0.552734] [A loss: 0.752392, acc: 0.414062]\n",
"7222: [D loss: 0.698961, acc: 0.507812] [A loss: 0.820128, acc: 0.246094]\n",
"7223: [D loss: 0.704757, acc: 0.517578] [A loss: 0.881237, acc: 0.191406]\n",
"7224: [D loss: 0.714767, acc: 0.486328] [A loss: 0.756402, acc: 0.359375]\n",
"7225: [D loss: 0.694340, acc: 0.529297] [A loss: 0.870882, acc: 0.132812]\n",
"7226: [D loss: 0.700361, acc: 0.513672] [A loss: 0.759724, acc: 0.355469]\n",
"7227: [D loss: 0.705073, acc: 0.517578] [A loss: 0.838880, acc: 0.210938]\n",
"7228: [D loss: 0.698942, acc: 0.523438] [A loss: 0.755805, acc: 0.367188]\n",
"7229: [D loss: 0.698577, acc: 0.515625] [A loss: 0.837425, acc: 0.207031]\n",
"7230: [D loss: 0.695750, acc: 0.521484] [A loss: 0.724766, acc: 0.437500]\n",
"7231: [D loss: 0.717256, acc: 0.500000] [A loss: 0.889457, acc: 0.128906]\n",
"7232: [D loss: 0.688243, acc: 0.560547] [A loss: 0.768402, acc: 0.324219]\n",
"7233: [D loss: 0.710899, acc: 0.511719] [A loss: 0.835361, acc: 0.222656]\n",
"7234: [D loss: 0.682467, acc: 0.554688] [A loss: 0.794998, acc: 0.320312]\n",
"7235: [D loss: 0.690933, acc: 0.539062] [A loss: 0.824713, acc: 0.230469]\n",
"7236: [D loss: 0.697246, acc: 0.517578] [A loss: 0.771181, acc: 0.347656]\n",
"7237: [D loss: 0.685111, acc: 0.560547] [A loss: 0.825262, acc: 0.238281]\n",
"7238: [D loss: 0.714961, acc: 0.476562] [A loss: 0.849642, acc: 0.171875]\n",
"7239: [D loss: 0.688906, acc: 0.550781] [A loss: 0.737518, acc: 0.425781]\n",
"7240: [D loss: 0.704276, acc: 0.541016] [A loss: 0.930634, acc: 0.128906]\n",
"7241: [D loss: 0.700998, acc: 0.507812] [A loss: 0.706872, acc: 0.511719]\n",
"7242: [D loss: 0.718585, acc: 0.507812] [A loss: 0.917868, acc: 0.132812]\n",
"7243: [D loss: 0.687208, acc: 0.562500] [A loss: 0.664714, acc: 0.570312]\n",
"7244: [D loss: 0.743631, acc: 0.478516] [A loss: 0.932021, acc: 0.128906]\n",
"7245: [D loss: 0.677259, acc: 0.585938] [A loss: 0.740438, acc: 0.406250]\n",
"7246: [D loss: 0.700528, acc: 0.515625] [A loss: 0.850184, acc: 0.175781]\n",
"7247: [D loss: 0.705378, acc: 0.507812] [A loss: 0.774110, acc: 0.328125]\n",
"7248: [D loss: 0.719695, acc: 0.457031] [A loss: 0.847726, acc: 0.207031]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7249: [D loss: 0.706488, acc: 0.511719] [A loss: 0.756891, acc: 0.371094]\n",
"7250: [D loss: 0.705024, acc: 0.527344] [A loss: 0.843067, acc: 0.187500]\n",
"7251: [D loss: 0.706411, acc: 0.500000] [A loss: 0.788011, acc: 0.339844]\n",
"7252: [D loss: 0.718195, acc: 0.505859] [A loss: 0.873982, acc: 0.218750]\n",
"7253: [D loss: 0.711708, acc: 0.503906] [A loss: 0.860961, acc: 0.179688]\n",
"7254: [D loss: 0.696916, acc: 0.529297] [A loss: 0.751566, acc: 0.371094]\n",
"7255: [D loss: 0.700935, acc: 0.541016] [A loss: 0.927129, acc: 0.078125]\n",
"7256: [D loss: 0.699627, acc: 0.484375] [A loss: 0.681180, acc: 0.542969]\n",
"7257: [D loss: 0.717162, acc: 0.523438] [A loss: 0.930037, acc: 0.105469]\n",
"7258: [D loss: 0.709913, acc: 0.488281] [A loss: 0.725162, acc: 0.429688]\n",
"7259: [D loss: 0.709151, acc: 0.513672] [A loss: 0.858030, acc: 0.183594]\n",
"7260: [D loss: 0.716619, acc: 0.486328] [A loss: 0.757993, acc: 0.335938]\n",
"7261: [D loss: 0.705448, acc: 0.466797] [A loss: 0.834748, acc: 0.203125]\n",
"7262: [D loss: 0.692148, acc: 0.525391] [A loss: 0.761599, acc: 0.367188]\n",
"7263: [D loss: 0.707366, acc: 0.511719] [A loss: 0.811957, acc: 0.242188]\n",
"7264: [D loss: 0.710676, acc: 0.509766] [A loss: 0.833773, acc: 0.222656]\n",
"7265: [D loss: 0.693717, acc: 0.525391] [A loss: 0.783837, acc: 0.292969]\n",
"7266: [D loss: 0.701154, acc: 0.523438] [A loss: 0.802585, acc: 0.246094]\n",
"7267: [D loss: 0.693520, acc: 0.539062] [A loss: 0.885195, acc: 0.144531]\n",
"7268: [D loss: 0.697789, acc: 0.539062] [A loss: 0.757410, acc: 0.394531]\n",
"7269: [D loss: 0.696724, acc: 0.558594] [A loss: 0.932995, acc: 0.085938]\n",
"7270: [D loss: 0.693239, acc: 0.519531] [A loss: 0.695042, acc: 0.523438]\n",
"7271: [D loss: 0.717184, acc: 0.515625] [A loss: 0.916773, acc: 0.101562]\n",
"7272: [D loss: 0.690294, acc: 0.544922] [A loss: 0.675728, acc: 0.550781]\n",
"7273: [D loss: 0.718095, acc: 0.519531] [A loss: 0.944964, acc: 0.117188]\n",
"7274: [D loss: 0.696746, acc: 0.533203] [A loss: 0.720099, acc: 0.472656]\n",
"7275: [D loss: 0.706314, acc: 0.496094] [A loss: 0.804775, acc: 0.250000]\n",
"7276: [D loss: 0.705326, acc: 0.507812] [A loss: 0.754542, acc: 0.386719]\n",
"7277: [D loss: 0.710902, acc: 0.509766] [A loss: 0.785631, acc: 0.308594]\n",
"7278: [D loss: 0.704211, acc: 0.496094] [A loss: 0.788880, acc: 0.292969]\n",
"7279: [D loss: 0.694836, acc: 0.509766] [A loss: 0.779902, acc: 0.285156]\n",
"7280: [D loss: 0.702788, acc: 0.498047] [A loss: 0.836111, acc: 0.199219]\n",
"7281: [D loss: 0.710389, acc: 0.492188] [A loss: 0.841081, acc: 0.179688]\n",
"7282: [D loss: 0.696082, acc: 0.503906] [A loss: 0.755585, acc: 0.378906]\n",
"7283: [D loss: 0.705226, acc: 0.505859] [A loss: 0.957396, acc: 0.082031]\n",
"7284: [D loss: 0.698527, acc: 0.503906] [A loss: 0.718564, acc: 0.453125]\n",
"7285: [D loss: 0.696546, acc: 0.531250] [A loss: 0.891259, acc: 0.152344]\n",
"7286: [D loss: 0.701335, acc: 0.519531] [A loss: 0.709282, acc: 0.511719]\n",
"7287: [D loss: 0.693704, acc: 0.515625] [A loss: 0.833004, acc: 0.210938]\n",
"7288: [D loss: 0.699277, acc: 0.529297] [A loss: 0.762405, acc: 0.386719]\n",
"7289: [D loss: 0.710171, acc: 0.509766] [A loss: 0.877143, acc: 0.156250]\n",
"7290: [D loss: 0.697659, acc: 0.507812] [A loss: 0.756869, acc: 0.367188]\n",
"7291: [D loss: 0.718201, acc: 0.482422] [A loss: 0.982866, acc: 0.125000]\n",
"7292: [D loss: 0.712141, acc: 0.480469] [A loss: 0.859352, acc: 0.171875]\n",
"7293: [D loss: 0.700281, acc: 0.525391] [A loss: 0.760032, acc: 0.347656]\n",
"7294: [D loss: 0.719135, acc: 0.519531] [A loss: 0.949914, acc: 0.093750]\n",
"7295: [D loss: 0.704411, acc: 0.505859] [A loss: 0.675939, acc: 0.554688]\n",
"7296: [D loss: 0.702710, acc: 0.552734] [A loss: 0.937621, acc: 0.070312]\n",
"7297: [D loss: 0.702655, acc: 0.515625] [A loss: 0.661205, acc: 0.585938]\n",
"7298: [D loss: 0.718454, acc: 0.503906] [A loss: 0.853437, acc: 0.160156]\n",
"7299: [D loss: 0.692217, acc: 0.548828] [A loss: 0.744111, acc: 0.382812]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7300: [D loss: 0.709552, acc: 0.496094] [A loss: 0.815488, acc: 0.269531]\n",
"7301: [D loss: 0.710049, acc: 0.488281] [A loss: 0.792209, acc: 0.328125]\n",
"7302: [D loss: 0.696064, acc: 0.529297] [A loss: 0.787906, acc: 0.273438]\n",
"7303: [D loss: 0.704280, acc: 0.527344] [A loss: 0.790882, acc: 0.300781]\n",
"7304: [D loss: 0.704050, acc: 0.515625] [A loss: 0.764143, acc: 0.347656]\n",
"7305: [D loss: 0.699738, acc: 0.515625] [A loss: 0.799460, acc: 0.292969]\n",
"7306: [D loss: 0.704047, acc: 0.494141] [A loss: 0.767878, acc: 0.328125]\n",
"7307: [D loss: 0.705925, acc: 0.509766] [A loss: 0.803810, acc: 0.304688]\n",
"7308: [D loss: 0.686305, acc: 0.541016] [A loss: 0.731925, acc: 0.402344]\n",
"7309: [D loss: 0.716020, acc: 0.513672] [A loss: 0.915337, acc: 0.117188]\n",
"7310: [D loss: 0.691851, acc: 0.542969] [A loss: 0.710083, acc: 0.480469]\n",
"7311: [D loss: 0.707215, acc: 0.521484] [A loss: 0.854941, acc: 0.199219]\n",
"7312: [D loss: 0.707859, acc: 0.505859] [A loss: 0.714665, acc: 0.476562]\n",
"7313: [D loss: 0.701376, acc: 0.498047] [A loss: 0.836231, acc: 0.214844]\n",
"7314: [D loss: 0.707787, acc: 0.509766] [A loss: 0.737031, acc: 0.425781]\n",
"7315: [D loss: 0.691095, acc: 0.533203] [A loss: 0.824482, acc: 0.253906]\n",
"7316: [D loss: 0.692521, acc: 0.537109] [A loss: 0.711345, acc: 0.484375]\n",
"7317: [D loss: 0.698303, acc: 0.535156] [A loss: 0.856384, acc: 0.171875]\n",
"7318: [D loss: 0.705714, acc: 0.501953] [A loss: 0.784062, acc: 0.335938]\n",
"7319: [D loss: 0.700128, acc: 0.537109] [A loss: 0.839420, acc: 0.199219]\n",
"7320: [D loss: 0.698931, acc: 0.505859] [A loss: 0.800924, acc: 0.308594]\n",
"7321: [D loss: 0.705525, acc: 0.541016] [A loss: 0.876457, acc: 0.144531]\n",
"7322: [D loss: 0.696749, acc: 0.541016] [A loss: 0.776027, acc: 0.316406]\n",
"7323: [D loss: 0.704140, acc: 0.527344] [A loss: 0.821597, acc: 0.250000]\n",
"7324: [D loss: 0.704082, acc: 0.494141] [A loss: 0.866586, acc: 0.222656]\n",
"7325: [D loss: 0.702849, acc: 0.527344] [A loss: 0.757805, acc: 0.343750]\n",
"7326: [D loss: 0.702118, acc: 0.529297] [A loss: 0.832322, acc: 0.222656]\n",
"7327: [D loss: 0.689180, acc: 0.556641] [A loss: 0.784558, acc: 0.332031]\n",
"7328: [D loss: 0.701782, acc: 0.521484] [A loss: 0.891384, acc: 0.167969]\n",
"7329: [D loss: 0.704223, acc: 0.503906] [A loss: 0.749883, acc: 0.367188]\n",
"7330: [D loss: 0.715017, acc: 0.507812] [A loss: 0.861645, acc: 0.164062]\n",
"7331: [D loss: 0.705212, acc: 0.492188] [A loss: 0.776600, acc: 0.316406]\n",
"7332: [D loss: 0.697600, acc: 0.535156] [A loss: 0.863390, acc: 0.175781]\n",
"7333: [D loss: 0.705646, acc: 0.507812] [A loss: 0.741415, acc: 0.425781]\n",
"7334: [D loss: 0.707723, acc: 0.505859] [A loss: 0.844658, acc: 0.191406]\n",
"7335: [D loss: 0.691106, acc: 0.539062] [A loss: 0.784564, acc: 0.324219]\n",
"7336: [D loss: 0.709473, acc: 0.500000] [A loss: 0.856446, acc: 0.203125]\n",
"7337: [D loss: 0.693887, acc: 0.542969] [A loss: 0.723096, acc: 0.433594]\n",
"7338: [D loss: 0.714535, acc: 0.496094] [A loss: 0.926344, acc: 0.121094]\n",
"7339: [D loss: 0.707159, acc: 0.468750] [A loss: 0.742319, acc: 0.394531]\n",
"7340: [D loss: 0.702062, acc: 0.519531] [A loss: 0.922264, acc: 0.152344]\n",
"7341: [D loss: 0.697856, acc: 0.515625] [A loss: 0.754692, acc: 0.378906]\n",
"7342: [D loss: 0.694636, acc: 0.544922] [A loss: 0.820348, acc: 0.250000]\n",
"7343: [D loss: 0.697658, acc: 0.515625] [A loss: 0.729139, acc: 0.421875]\n",
"7344: [D loss: 0.713248, acc: 0.498047] [A loss: 0.864784, acc: 0.191406]\n",
"7345: [D loss: 0.703970, acc: 0.498047] [A loss: 0.738317, acc: 0.382812]\n",
"7346: [D loss: 0.713100, acc: 0.486328] [A loss: 0.925912, acc: 0.085938]\n",
"7347: [D loss: 0.706906, acc: 0.501953] [A loss: 0.712633, acc: 0.460938]\n",
"7348: [D loss: 0.707207, acc: 0.517578] [A loss: 0.872065, acc: 0.183594]\n",
"7349: [D loss: 0.715288, acc: 0.484375] [A loss: 0.738499, acc: 0.386719]\n",
"7350: [D loss: 0.704542, acc: 0.505859] [A loss: 0.891521, acc: 0.128906]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7351: [D loss: 0.696880, acc: 0.533203] [A loss: 0.745638, acc: 0.390625]\n",
"7352: [D loss: 0.704372, acc: 0.544922] [A loss: 0.831317, acc: 0.152344]\n",
"7353: [D loss: 0.705992, acc: 0.496094] [A loss: 0.774190, acc: 0.351562]\n",
"7354: [D loss: 0.701815, acc: 0.505859] [A loss: 0.849024, acc: 0.261719]\n",
"7355: [D loss: 0.701643, acc: 0.513672] [A loss: 0.792128, acc: 0.296875]\n",
"7356: [D loss: 0.701415, acc: 0.544922] [A loss: 0.850314, acc: 0.191406]\n",
"7357: [D loss: 0.710805, acc: 0.478516] [A loss: 0.859096, acc: 0.214844]\n",
"7358: [D loss: 0.703181, acc: 0.498047] [A loss: 0.770284, acc: 0.292969]\n",
"7359: [D loss: 0.703727, acc: 0.519531] [A loss: 0.852962, acc: 0.207031]\n",
"7360: [D loss: 0.692291, acc: 0.517578] [A loss: 0.759086, acc: 0.328125]\n",
"7361: [D loss: 0.710188, acc: 0.503906] [A loss: 0.835299, acc: 0.210938]\n",
"7362: [D loss: 0.702713, acc: 0.503906] [A loss: 0.838692, acc: 0.238281]\n",
"7363: [D loss: 0.698482, acc: 0.509766] [A loss: 0.797318, acc: 0.324219]\n",
"7364: [D loss: 0.709945, acc: 0.498047] [A loss: 0.893780, acc: 0.140625]\n",
"7365: [D loss: 0.700762, acc: 0.519531] [A loss: 0.793348, acc: 0.289062]\n",
"7366: [D loss: 0.697284, acc: 0.529297] [A loss: 0.826531, acc: 0.226562]\n",
"7367: [D loss: 0.690984, acc: 0.544922] [A loss: 0.780545, acc: 0.320312]\n",
"7368: [D loss: 0.703043, acc: 0.527344] [A loss: 0.873526, acc: 0.144531]\n",
"7369: [D loss: 0.704845, acc: 0.482422] [A loss: 0.708535, acc: 0.488281]\n",
"7370: [D loss: 0.721319, acc: 0.496094] [A loss: 0.933659, acc: 0.113281]\n",
"7371: [D loss: 0.697357, acc: 0.542969] [A loss: 0.727327, acc: 0.398438]\n",
"7372: [D loss: 0.719172, acc: 0.498047] [A loss: 0.893671, acc: 0.109375]\n",
"7373: [D loss: 0.711353, acc: 0.492188] [A loss: 0.725220, acc: 0.445312]\n",
"7374: [D loss: 0.706071, acc: 0.505859] [A loss: 0.789099, acc: 0.347656]\n",
"7375: [D loss: 0.694614, acc: 0.509766] [A loss: 0.788243, acc: 0.308594]\n",
"7376: [D loss: 0.703969, acc: 0.498047] [A loss: 0.789773, acc: 0.289062]\n",
"7377: [D loss: 0.699658, acc: 0.500000] [A loss: 0.789545, acc: 0.269531]\n",
"7378: [D loss: 0.696818, acc: 0.505859] [A loss: 0.781643, acc: 0.320312]\n",
"7379: [D loss: 0.698711, acc: 0.503906] [A loss: 0.807830, acc: 0.285156]\n",
"7380: [D loss: 0.697011, acc: 0.562500] [A loss: 0.844534, acc: 0.207031]\n",
"7381: [D loss: 0.693592, acc: 0.544922] [A loss: 0.758857, acc: 0.347656]\n",
"7382: [D loss: 0.724071, acc: 0.507812] [A loss: 1.034597, acc: 0.046875]\n",
"7383: [D loss: 0.690510, acc: 0.550781] [A loss: 0.692102, acc: 0.519531]\n",
"7384: [D loss: 0.730584, acc: 0.488281] [A loss: 0.818198, acc: 0.265625]\n",
"7385: [D loss: 0.697737, acc: 0.529297] [A loss: 0.721438, acc: 0.445312]\n",
"7386: [D loss: 0.706704, acc: 0.523438] [A loss: 0.842122, acc: 0.156250]\n",
"7387: [D loss: 0.710326, acc: 0.500000] [A loss: 0.753619, acc: 0.386719]\n",
"7388: [D loss: 0.705018, acc: 0.529297] [A loss: 0.836580, acc: 0.238281]\n",
"7389: [D loss: 0.704134, acc: 0.498047] [A loss: 0.776800, acc: 0.300781]\n",
"7390: [D loss: 0.703820, acc: 0.525391] [A loss: 0.852446, acc: 0.160156]\n",
"7391: [D loss: 0.699272, acc: 0.511719] [A loss: 0.754155, acc: 0.359375]\n",
"7392: [D loss: 0.698713, acc: 0.523438] [A loss: 0.821691, acc: 0.242188]\n",
"7393: [D loss: 0.706694, acc: 0.486328] [A loss: 0.823452, acc: 0.214844]\n",
"7394: [D loss: 0.705380, acc: 0.490234] [A loss: 0.810978, acc: 0.269531]\n",
"7395: [D loss: 0.689921, acc: 0.537109] [A loss: 0.843287, acc: 0.214844]\n",
"7396: [D loss: 0.710559, acc: 0.486328] [A loss: 0.743651, acc: 0.417969]\n",
"7397: [D loss: 0.706363, acc: 0.513672] [A loss: 0.882382, acc: 0.171875]\n",
"7398: [D loss: 0.696569, acc: 0.523438] [A loss: 0.813484, acc: 0.289062]\n",
"7399: [D loss: 0.696880, acc: 0.531250] [A loss: 0.894374, acc: 0.144531]\n",
"7400: [D loss: 0.696511, acc: 0.521484] [A loss: 0.698259, acc: 0.511719]\n",
"7401: [D loss: 0.705646, acc: 0.513672] [A loss: 0.922625, acc: 0.132812]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7402: [D loss: 0.711395, acc: 0.498047] [A loss: 0.695310, acc: 0.511719]\n",
"7403: [D loss: 0.720955, acc: 0.480469] [A loss: 0.889246, acc: 0.164062]\n",
"7404: [D loss: 0.698477, acc: 0.501953] [A loss: 0.723344, acc: 0.421875]\n",
"7405: [D loss: 0.704744, acc: 0.531250] [A loss: 0.830086, acc: 0.238281]\n",
"7406: [D loss: 0.703672, acc: 0.521484] [A loss: 0.774321, acc: 0.351562]\n",
"7407: [D loss: 0.702672, acc: 0.531250] [A loss: 0.771808, acc: 0.332031]\n",
"7408: [D loss: 0.715560, acc: 0.496094] [A loss: 0.804579, acc: 0.261719]\n",
"7409: [D loss: 0.689434, acc: 0.537109] [A loss: 0.755664, acc: 0.367188]\n",
"7410: [D loss: 0.698203, acc: 0.525391] [A loss: 0.832333, acc: 0.226562]\n",
"7411: [D loss: 0.709887, acc: 0.488281] [A loss: 0.824042, acc: 0.253906]\n",
"7412: [D loss: 0.710296, acc: 0.511719] [A loss: 0.865447, acc: 0.160156]\n",
"7413: [D loss: 0.705984, acc: 0.503906] [A loss: 0.735622, acc: 0.406250]\n",
"7414: [D loss: 0.716792, acc: 0.480469] [A loss: 0.821882, acc: 0.226562]\n",
"7415: [D loss: 0.700458, acc: 0.513672] [A loss: 0.807789, acc: 0.238281]\n",
"7416: [D loss: 0.690548, acc: 0.562500] [A loss: 0.836792, acc: 0.203125]\n",
"7417: [D loss: 0.695025, acc: 0.533203] [A loss: 0.769401, acc: 0.343750]\n",
"7418: [D loss: 0.708246, acc: 0.494141] [A loss: 0.857855, acc: 0.179688]\n",
"7419: [D loss: 0.704090, acc: 0.523438] [A loss: 0.724245, acc: 0.402344]\n",
"7420: [D loss: 0.722664, acc: 0.515625] [A loss: 0.916954, acc: 0.105469]\n",
"7421: [D loss: 0.691863, acc: 0.535156] [A loss: 0.676939, acc: 0.550781]\n",
"7422: [D loss: 0.729662, acc: 0.476562] [A loss: 0.956617, acc: 0.070312]\n",
"7423: [D loss: 0.703440, acc: 0.500000] [A loss: 0.706988, acc: 0.468750]\n",
"7424: [D loss: 0.699893, acc: 0.527344] [A loss: 0.801125, acc: 0.257812]\n",
"7425: [D loss: 0.701181, acc: 0.492188] [A loss: 0.769771, acc: 0.304688]\n",
"7426: [D loss: 0.704171, acc: 0.496094] [A loss: 0.802472, acc: 0.277344]\n",
"7427: [D loss: 0.693254, acc: 0.531250] [A loss: 0.863886, acc: 0.187500]\n",
"7428: [D loss: 0.692853, acc: 0.531250] [A loss: 0.741632, acc: 0.378906]\n",
"7429: [D loss: 0.706156, acc: 0.507812] [A loss: 0.935226, acc: 0.097656]\n",
"7430: [D loss: 0.700775, acc: 0.531250] [A loss: 0.692411, acc: 0.535156]\n",
"7431: [D loss: 0.702419, acc: 0.535156] [A loss: 0.858779, acc: 0.175781]\n",
"7432: [D loss: 0.685464, acc: 0.560547] [A loss: 0.759404, acc: 0.390625]\n",
"7433: [D loss: 0.698773, acc: 0.544922] [A loss: 0.814587, acc: 0.292969]\n",
"7434: [D loss: 0.694879, acc: 0.503906] [A loss: 0.825488, acc: 0.246094]\n",
"7435: [D loss: 0.692429, acc: 0.537109] [A loss: 0.823125, acc: 0.261719]\n",
"7436: [D loss: 0.702454, acc: 0.509766] [A loss: 0.806183, acc: 0.277344]\n",
"7437: [D loss: 0.696630, acc: 0.550781] [A loss: 0.820074, acc: 0.226562]\n",
"7438: [D loss: 0.699116, acc: 0.501953] [A loss: 0.754053, acc: 0.410156]\n",
"7439: [D loss: 0.697337, acc: 0.511719] [A loss: 0.884156, acc: 0.136719]\n",
"7440: [D loss: 0.684011, acc: 0.535156] [A loss: 0.702580, acc: 0.484375]\n",
"7441: [D loss: 0.710407, acc: 0.498047] [A loss: 0.981796, acc: 0.089844]\n",
"7442: [D loss: 0.697145, acc: 0.519531] [A loss: 0.701452, acc: 0.468750]\n",
"7443: [D loss: 0.715198, acc: 0.519531] [A loss: 0.863644, acc: 0.183594]\n",
"7444: [D loss: 0.693602, acc: 0.517578] [A loss: 0.774741, acc: 0.347656]\n",
"7445: [D loss: 0.702926, acc: 0.531250] [A loss: 0.872834, acc: 0.187500]\n",
"7446: [D loss: 0.721273, acc: 0.464844] [A loss: 0.791154, acc: 0.281250]\n",
"7447: [D loss: 0.693746, acc: 0.515625] [A loss: 0.839136, acc: 0.199219]\n",
"7448: [D loss: 0.697505, acc: 0.542969] [A loss: 0.772871, acc: 0.320312]\n",
"7449: [D loss: 0.690564, acc: 0.556641] [A loss: 0.784184, acc: 0.316406]\n",
"7450: [D loss: 0.699094, acc: 0.523438] [A loss: 0.878815, acc: 0.136719]\n",
"7451: [D loss: 0.708033, acc: 0.492188] [A loss: 0.726083, acc: 0.425781]\n",
"7452: [D loss: 0.719396, acc: 0.500000] [A loss: 0.948579, acc: 0.078125]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7453: [D loss: 0.698252, acc: 0.521484] [A loss: 0.725955, acc: 0.417969]\n",
"7454: [D loss: 0.706309, acc: 0.535156] [A loss: 0.843041, acc: 0.222656]\n",
"7455: [D loss: 0.691019, acc: 0.562500] [A loss: 0.748637, acc: 0.378906]\n",
"7456: [D loss: 0.697604, acc: 0.544922] [A loss: 0.923151, acc: 0.117188]\n",
"7457: [D loss: 0.704993, acc: 0.490234] [A loss: 0.739815, acc: 0.417969]\n",
"7458: [D loss: 0.721301, acc: 0.488281] [A loss: 0.902240, acc: 0.109375]\n",
"7459: [D loss: 0.690042, acc: 0.537109] [A loss: 0.804051, acc: 0.304688]\n",
"7460: [D loss: 0.708869, acc: 0.521484] [A loss: 0.773508, acc: 0.281250]\n",
"7461: [D loss: 0.720735, acc: 0.472656] [A loss: 0.851637, acc: 0.171875]\n",
"7462: [D loss: 0.686926, acc: 0.535156] [A loss: 0.762760, acc: 0.343750]\n",
"7463: [D loss: 0.708816, acc: 0.503906] [A loss: 0.933400, acc: 0.128906]\n",
"7464: [D loss: 0.711252, acc: 0.464844] [A loss: 0.701717, acc: 0.546875]\n",
"7465: [D loss: 0.709610, acc: 0.494141] [A loss: 0.842776, acc: 0.187500]\n",
"7466: [D loss: 0.695388, acc: 0.525391] [A loss: 0.730369, acc: 0.414062]\n",
"7467: [D loss: 0.697777, acc: 0.537109] [A loss: 0.806259, acc: 0.246094]\n",
"7468: [D loss: 0.698494, acc: 0.525391] [A loss: 0.800983, acc: 0.277344]\n",
"7469: [D loss: 0.690798, acc: 0.539062] [A loss: 0.834748, acc: 0.214844]\n",
"7470: [D loss: 0.703441, acc: 0.521484] [A loss: 0.835907, acc: 0.222656]\n",
"7471: [D loss: 0.699833, acc: 0.515625] [A loss: 0.805060, acc: 0.230469]\n",
"7472: [D loss: 0.712694, acc: 0.486328] [A loss: 0.840841, acc: 0.218750]\n",
"7473: [D loss: 0.694268, acc: 0.548828] [A loss: 0.818595, acc: 0.257812]\n",
"7474: [D loss: 0.691695, acc: 0.531250] [A loss: 0.790784, acc: 0.289062]\n",
"7475: [D loss: 0.705766, acc: 0.515625] [A loss: 0.899361, acc: 0.140625]\n",
"7476: [D loss: 0.700338, acc: 0.525391] [A loss: 0.708919, acc: 0.457031]\n",
"7477: [D loss: 0.705425, acc: 0.511719] [A loss: 0.931260, acc: 0.109375]\n",
"7478: [D loss: 0.698906, acc: 0.523438] [A loss: 0.715802, acc: 0.453125]\n",
"7479: [D loss: 0.709370, acc: 0.509766] [A loss: 0.887509, acc: 0.140625]\n",
"7480: [D loss: 0.706722, acc: 0.505859] [A loss: 0.726752, acc: 0.457031]\n",
"7481: [D loss: 0.697614, acc: 0.531250] [A loss: 0.921719, acc: 0.144531]\n",
"7482: [D loss: 0.693668, acc: 0.539062] [A loss: 0.757636, acc: 0.363281]\n",
"7483: [D loss: 0.707720, acc: 0.478516] [A loss: 0.942387, acc: 0.097656]\n",
"7484: [D loss: 0.687214, acc: 0.539062] [A loss: 0.745952, acc: 0.390625]\n",
"7485: [D loss: 0.697748, acc: 0.503906] [A loss: 0.826277, acc: 0.234375]\n",
"7486: [D loss: 0.706779, acc: 0.492188] [A loss: 0.714624, acc: 0.460938]\n",
"7487: [D loss: 0.710357, acc: 0.521484] [A loss: 0.888533, acc: 0.109375]\n",
"7488: [D loss: 0.699321, acc: 0.505859] [A loss: 0.732540, acc: 0.382812]\n",
"7489: [D loss: 0.713406, acc: 0.501953] [A loss: 0.854330, acc: 0.183594]\n",
"7490: [D loss: 0.693999, acc: 0.517578] [A loss: 0.720299, acc: 0.457031]\n",
"7491: [D loss: 0.700484, acc: 0.523438] [A loss: 0.861190, acc: 0.175781]\n",
"7492: [D loss: 0.707167, acc: 0.509766] [A loss: 0.704191, acc: 0.507812]\n",
"7493: [D loss: 0.713176, acc: 0.521484] [A loss: 0.958637, acc: 0.062500]\n",
"7494: [D loss: 0.690871, acc: 0.554688] [A loss: 0.697843, acc: 0.492188]\n",
"7495: [D loss: 0.729065, acc: 0.470703] [A loss: 0.844373, acc: 0.230469]\n",
"7496: [D loss: 0.696075, acc: 0.519531] [A loss: 0.712708, acc: 0.492188]\n",
"7497: [D loss: 0.704073, acc: 0.523438] [A loss: 0.980417, acc: 0.113281]\n",
"7498: [D loss: 0.689372, acc: 0.539062] [A loss: 0.746404, acc: 0.421875]\n",
"7499: [D loss: 0.713419, acc: 0.517578] [A loss: 0.938426, acc: 0.105469]\n",
"7500: [D loss: 0.692179, acc: 0.542969] [A loss: 0.733561, acc: 0.441406]\n",
"7501: [D loss: 0.708109, acc: 0.511719] [A loss: 0.896013, acc: 0.160156]\n",
"7502: [D loss: 0.705710, acc: 0.511719] [A loss: 0.739405, acc: 0.402344]\n",
"7503: [D loss: 0.706544, acc: 0.529297] [A loss: 0.868941, acc: 0.187500]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7504: [D loss: 0.691764, acc: 0.505859] [A loss: 0.736135, acc: 0.382812]\n",
"7505: [D loss: 0.698956, acc: 0.513672] [A loss: 0.847699, acc: 0.171875]\n",
"7506: [D loss: 0.706643, acc: 0.507812] [A loss: 0.794919, acc: 0.289062]\n",
"7507: [D loss: 0.700908, acc: 0.501953] [A loss: 0.825834, acc: 0.207031]\n",
"7508: [D loss: 0.694263, acc: 0.517578] [A loss: 0.767438, acc: 0.300781]\n",
"7509: [D loss: 0.697774, acc: 0.556641] [A loss: 0.885940, acc: 0.140625]\n",
"7510: [D loss: 0.694265, acc: 0.519531] [A loss: 0.758865, acc: 0.351562]\n",
"7511: [D loss: 0.696903, acc: 0.521484] [A loss: 0.804677, acc: 0.285156]\n",
"7512: [D loss: 0.693369, acc: 0.537109] [A loss: 0.803693, acc: 0.277344]\n",
"7513: [D loss: 0.690955, acc: 0.550781] [A loss: 0.787859, acc: 0.312500]\n",
"7514: [D loss: 0.699913, acc: 0.527344] [A loss: 0.796764, acc: 0.289062]\n",
"7515: [D loss: 0.699276, acc: 0.501953] [A loss: 0.790718, acc: 0.234375]\n",
"7516: [D loss: 0.706562, acc: 0.513672] [A loss: 0.797422, acc: 0.265625]\n",
"7517: [D loss: 0.694215, acc: 0.523438] [A loss: 0.733547, acc: 0.414062]\n",
"7518: [D loss: 0.714570, acc: 0.507812] [A loss: 1.008177, acc: 0.074219]\n",
"7519: [D loss: 0.705418, acc: 0.498047] [A loss: 0.601624, acc: 0.753906]\n",
"7520: [D loss: 0.760632, acc: 0.488281] [A loss: 0.943487, acc: 0.097656]\n",
"7521: [D loss: 0.706086, acc: 0.527344] [A loss: 0.753193, acc: 0.359375]\n",
"7522: [D loss: 0.712308, acc: 0.503906] [A loss: 0.793491, acc: 0.285156]\n",
"7523: [D loss: 0.711922, acc: 0.505859] [A loss: 0.791510, acc: 0.324219]\n",
"7524: [D loss: 0.700815, acc: 0.533203] [A loss: 0.754925, acc: 0.359375]\n",
"7525: [D loss: 0.710265, acc: 0.519531] [A loss: 0.801062, acc: 0.292969]\n",
"7526: [D loss: 0.694559, acc: 0.539062] [A loss: 0.785469, acc: 0.289062]\n",
"7527: [D loss: 0.700418, acc: 0.503906] [A loss: 0.818291, acc: 0.281250]\n",
"7528: [D loss: 0.689032, acc: 0.529297] [A loss: 0.875044, acc: 0.199219]\n",
"7529: [D loss: 0.704290, acc: 0.503906] [A loss: 0.765254, acc: 0.332031]\n",
"7530: [D loss: 0.699391, acc: 0.521484] [A loss: 0.864616, acc: 0.109375]\n",
"7531: [D loss: 0.686858, acc: 0.541016] [A loss: 0.736927, acc: 0.402344]\n",
"7532: [D loss: 0.714890, acc: 0.494141] [A loss: 0.874281, acc: 0.128906]\n",
"7533: [D loss: 0.694397, acc: 0.541016] [A loss: 0.740885, acc: 0.402344]\n",
"7534: [D loss: 0.698852, acc: 0.546875] [A loss: 0.854132, acc: 0.171875]\n",
"7535: [D loss: 0.697038, acc: 0.529297] [A loss: 0.753667, acc: 0.371094]\n",
"7536: [D loss: 0.703356, acc: 0.513672] [A loss: 0.957003, acc: 0.097656]\n",
"7537: [D loss: 0.680811, acc: 0.531250] [A loss: 0.726038, acc: 0.414062]\n",
"7538: [D loss: 0.704548, acc: 0.531250] [A loss: 0.807171, acc: 0.238281]\n",
"7539: [D loss: 0.703789, acc: 0.517578] [A loss: 0.755810, acc: 0.343750]\n",
"7540: [D loss: 0.703157, acc: 0.509766] [A loss: 0.857563, acc: 0.160156]\n",
"7541: [D loss: 0.704434, acc: 0.501953] [A loss: 0.703436, acc: 0.527344]\n",
"7542: [D loss: 0.704121, acc: 0.529297] [A loss: 0.893270, acc: 0.156250]\n",
"7543: [D loss: 0.700637, acc: 0.509766] [A loss: 0.717496, acc: 0.484375]\n",
"7544: [D loss: 0.707276, acc: 0.517578] [A loss: 0.860799, acc: 0.144531]\n",
"7545: [D loss: 0.685884, acc: 0.550781] [A loss: 0.808901, acc: 0.281250]\n",
"7546: [D loss: 0.711250, acc: 0.515625] [A loss: 0.901437, acc: 0.125000]\n",
"7547: [D loss: 0.703044, acc: 0.505859] [A loss: 0.726304, acc: 0.437500]\n",
"7548: [D loss: 0.727904, acc: 0.476562] [A loss: 0.844920, acc: 0.191406]\n",
"7549: [D loss: 0.715220, acc: 0.476562] [A loss: 0.790571, acc: 0.312500]\n",
"7550: [D loss: 0.689980, acc: 0.552734] [A loss: 0.778778, acc: 0.289062]\n",
"7551: [D loss: 0.693118, acc: 0.562500] [A loss: 0.888645, acc: 0.152344]\n",
"7552: [D loss: 0.699250, acc: 0.521484] [A loss: 0.748137, acc: 0.371094]\n",
"7553: [D loss: 0.707175, acc: 0.511719] [A loss: 0.933795, acc: 0.109375]\n",
"7554: [D loss: 0.711417, acc: 0.458984] [A loss: 0.749513, acc: 0.359375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7555: [D loss: 0.710994, acc: 0.509766] [A loss: 0.838079, acc: 0.218750]\n",
"7556: [D loss: 0.699435, acc: 0.529297] [A loss: 0.702375, acc: 0.488281]\n",
"7557: [D loss: 0.706181, acc: 0.500000] [A loss: 0.853512, acc: 0.187500]\n",
"7558: [D loss: 0.706936, acc: 0.511719] [A loss: 0.781656, acc: 0.269531]\n",
"7559: [D loss: 0.698413, acc: 0.542969] [A loss: 0.893937, acc: 0.121094]\n",
"7560: [D loss: 0.698291, acc: 0.523438] [A loss: 0.821192, acc: 0.253906]\n",
"7561: [D loss: 0.707603, acc: 0.496094] [A loss: 0.879752, acc: 0.175781]\n",
"7562: [D loss: 0.695514, acc: 0.517578] [A loss: 0.723082, acc: 0.468750]\n",
"7563: [D loss: 0.714176, acc: 0.513672] [A loss: 0.913629, acc: 0.121094]\n",
"7564: [D loss: 0.701688, acc: 0.482422] [A loss: 0.720896, acc: 0.410156]\n",
"7565: [D loss: 0.711298, acc: 0.527344] [A loss: 0.804147, acc: 0.242188]\n",
"7566: [D loss: 0.693909, acc: 0.533203] [A loss: 0.792748, acc: 0.312500]\n",
"7567: [D loss: 0.695673, acc: 0.544922] [A loss: 0.815781, acc: 0.230469]\n",
"7568: [D loss: 0.712681, acc: 0.486328] [A loss: 0.832428, acc: 0.218750]\n",
"7569: [D loss: 0.703120, acc: 0.515625] [A loss: 0.777946, acc: 0.316406]\n",
"7570: [D loss: 0.694785, acc: 0.533203] [A loss: 0.875779, acc: 0.175781]\n",
"7571: [D loss: 0.688494, acc: 0.552734] [A loss: 0.794993, acc: 0.281250]\n",
"7572: [D loss: 0.714358, acc: 0.507812] [A loss: 0.954544, acc: 0.097656]\n",
"7573: [D loss: 0.688859, acc: 0.548828] [A loss: 0.668220, acc: 0.578125]\n",
"7574: [D loss: 0.706291, acc: 0.523438] [A loss: 0.883732, acc: 0.121094]\n",
"7575: [D loss: 0.691468, acc: 0.527344] [A loss: 0.801075, acc: 0.281250]\n",
"7576: [D loss: 0.710908, acc: 0.521484] [A loss: 0.813629, acc: 0.261719]\n",
"7577: [D loss: 0.691799, acc: 0.570312] [A loss: 0.793372, acc: 0.320312]\n",
"7578: [D loss: 0.690489, acc: 0.544922] [A loss: 0.784498, acc: 0.265625]\n",
"7579: [D loss: 0.685348, acc: 0.560547] [A loss: 0.748369, acc: 0.406250]\n",
"7580: [D loss: 0.706314, acc: 0.521484] [A loss: 0.944499, acc: 0.152344]\n",
"7581: [D loss: 0.705987, acc: 0.492188] [A loss: 0.703416, acc: 0.511719]\n",
"7582: [D loss: 0.697605, acc: 0.539062] [A loss: 0.835117, acc: 0.171875]\n",
"7583: [D loss: 0.696930, acc: 0.525391] [A loss: 0.701412, acc: 0.507812]\n",
"7584: [D loss: 0.707965, acc: 0.517578] [A loss: 0.861174, acc: 0.191406]\n",
"7585: [D loss: 0.699479, acc: 0.501953] [A loss: 0.766954, acc: 0.328125]\n",
"7586: [D loss: 0.699106, acc: 0.539062] [A loss: 0.824084, acc: 0.195312]\n",
"7587: [D loss: 0.707477, acc: 0.490234] [A loss: 0.804505, acc: 0.250000]\n",
"7588: [D loss: 0.704538, acc: 0.505859] [A loss: 0.801270, acc: 0.257812]\n",
"7589: [D loss: 0.702584, acc: 0.501953] [A loss: 0.785358, acc: 0.300781]\n",
"7590: [D loss: 0.701039, acc: 0.511719] [A loss: 0.794021, acc: 0.304688]\n",
"7591: [D loss: 0.707551, acc: 0.484375] [A loss: 0.766966, acc: 0.355469]\n",
"7592: [D loss: 0.705091, acc: 0.503906] [A loss: 0.815734, acc: 0.207031]\n",
"7593: [D loss: 0.705443, acc: 0.511719] [A loss: 0.846156, acc: 0.230469]\n",
"7594: [D loss: 0.696305, acc: 0.515625] [A loss: 0.815053, acc: 0.257812]\n",
"7595: [D loss: 0.698398, acc: 0.541016] [A loss: 0.864678, acc: 0.164062]\n",
"7596: [D loss: 0.690323, acc: 0.548828] [A loss: 0.850894, acc: 0.187500]\n",
"7597: [D loss: 0.689062, acc: 0.544922] [A loss: 0.733326, acc: 0.437500]\n",
"7598: [D loss: 0.715133, acc: 0.492188] [A loss: 0.911467, acc: 0.078125]\n",
"7599: [D loss: 0.703896, acc: 0.513672] [A loss: 0.689614, acc: 0.574219]\n",
"7600: [D loss: 0.712948, acc: 0.501953] [A loss: 0.890588, acc: 0.171875]\n",
"7601: [D loss: 0.696783, acc: 0.523438] [A loss: 0.702380, acc: 0.492188]\n",
"7602: [D loss: 0.706270, acc: 0.513672] [A loss: 0.826629, acc: 0.265625]\n",
"7603: [D loss: 0.713316, acc: 0.484375] [A loss: 0.782552, acc: 0.304688]\n",
"7604: [D loss: 0.708917, acc: 0.496094] [A loss: 0.794901, acc: 0.281250]\n",
"7605: [D loss: 0.697673, acc: 0.537109] [A loss: 0.835268, acc: 0.210938]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7606: [D loss: 0.693471, acc: 0.548828] [A loss: 0.777902, acc: 0.316406]\n",
"7607: [D loss: 0.688302, acc: 0.554688] [A loss: 0.898551, acc: 0.132812]\n",
"7608: [D loss: 0.704899, acc: 0.500000] [A loss: 0.727128, acc: 0.406250]\n",
"7609: [D loss: 0.702147, acc: 0.533203] [A loss: 0.941644, acc: 0.113281]\n",
"7610: [D loss: 0.694269, acc: 0.515625] [A loss: 0.701647, acc: 0.500000]\n",
"7611: [D loss: 0.706121, acc: 0.507812] [A loss: 0.891815, acc: 0.117188]\n",
"7612: [D loss: 0.683250, acc: 0.552734] [A loss: 0.732956, acc: 0.425781]\n",
"7613: [D loss: 0.702322, acc: 0.529297] [A loss: 0.843348, acc: 0.152344]\n",
"7614: [D loss: 0.712012, acc: 0.515625] [A loss: 0.816731, acc: 0.289062]\n",
"7615: [D loss: 0.711576, acc: 0.480469] [A loss: 0.828570, acc: 0.164062]\n",
"7616: [D loss: 0.704147, acc: 0.521484] [A loss: 0.858449, acc: 0.195312]\n",
"7617: [D loss: 0.709062, acc: 0.498047] [A loss: 0.839851, acc: 0.171875]\n",
"7618: [D loss: 0.699380, acc: 0.544922] [A loss: 0.842210, acc: 0.187500]\n",
"7619: [D loss: 0.706506, acc: 0.503906] [A loss: 0.738970, acc: 0.378906]\n",
"7620: [D loss: 0.696125, acc: 0.529297] [A loss: 0.848930, acc: 0.183594]\n",
"7621: [D loss: 0.690802, acc: 0.544922] [A loss: 0.735728, acc: 0.433594]\n",
"7622: [D loss: 0.705665, acc: 0.515625] [A loss: 0.865922, acc: 0.136719]\n",
"7623: [D loss: 0.698323, acc: 0.531250] [A loss: 0.729439, acc: 0.421875]\n",
"7624: [D loss: 0.705541, acc: 0.500000] [A loss: 0.848300, acc: 0.156250]\n",
"7625: [D loss: 0.687124, acc: 0.570312] [A loss: 0.868697, acc: 0.183594]\n",
"7626: [D loss: 0.696807, acc: 0.525391] [A loss: 0.746547, acc: 0.425781]\n",
"7627: [D loss: 0.688985, acc: 0.529297] [A loss: 0.864847, acc: 0.144531]\n",
"7628: [D loss: 0.704993, acc: 0.500000] [A loss: 0.745065, acc: 0.390625]\n",
"7629: [D loss: 0.706414, acc: 0.503906] [A loss: 0.898453, acc: 0.164062]\n",
"7630: [D loss: 0.703480, acc: 0.521484] [A loss: 0.845478, acc: 0.203125]\n",
"7631: [D loss: 0.687667, acc: 0.519531] [A loss: 0.754218, acc: 0.378906]\n",
"7632: [D loss: 0.710212, acc: 0.490234] [A loss: 0.866587, acc: 0.167969]\n",
"7633: [D loss: 0.703179, acc: 0.500000] [A loss: 0.734421, acc: 0.445312]\n",
"7634: [D loss: 0.714029, acc: 0.500000] [A loss: 0.929037, acc: 0.070312]\n",
"7635: [D loss: 0.700854, acc: 0.523438] [A loss: 0.712918, acc: 0.464844]\n",
"7636: [D loss: 0.707869, acc: 0.527344] [A loss: 0.868601, acc: 0.140625]\n",
"7637: [D loss: 0.702834, acc: 0.533203] [A loss: 0.695875, acc: 0.468750]\n",
"7638: [D loss: 0.720103, acc: 0.500000] [A loss: 0.837126, acc: 0.187500]\n",
"7639: [D loss: 0.710755, acc: 0.505859] [A loss: 0.882941, acc: 0.187500]\n",
"7640: [D loss: 0.708382, acc: 0.488281] [A loss: 0.725470, acc: 0.402344]\n",
"7641: [D loss: 0.697264, acc: 0.519531] [A loss: 0.794138, acc: 0.277344]\n",
"7642: [D loss: 0.686512, acc: 0.566406] [A loss: 0.830889, acc: 0.175781]\n",
"7643: [D loss: 0.699147, acc: 0.542969] [A loss: 0.799425, acc: 0.285156]\n",
"7644: [D loss: 0.697732, acc: 0.541016] [A loss: 0.849567, acc: 0.222656]\n",
"7645: [D loss: 0.698195, acc: 0.556641] [A loss: 0.753939, acc: 0.378906]\n",
"7646: [D loss: 0.711777, acc: 0.509766] [A loss: 0.887887, acc: 0.164062]\n",
"7647: [D loss: 0.695597, acc: 0.519531] [A loss: 0.790507, acc: 0.289062]\n",
"7648: [D loss: 0.712782, acc: 0.500000] [A loss: 0.837003, acc: 0.179688]\n",
"7649: [D loss: 0.691725, acc: 0.527344] [A loss: 0.779342, acc: 0.355469]\n",
"7650: [D loss: 0.705686, acc: 0.519531] [A loss: 0.915756, acc: 0.105469]\n",
"7651: [D loss: 0.704021, acc: 0.517578] [A loss: 0.715136, acc: 0.464844]\n",
"7652: [D loss: 0.708056, acc: 0.519531] [A loss: 0.881261, acc: 0.164062]\n",
"7653: [D loss: 0.702731, acc: 0.515625] [A loss: 0.694349, acc: 0.535156]\n",
"7654: [D loss: 0.706253, acc: 0.523438] [A loss: 0.899221, acc: 0.144531]\n",
"7655: [D loss: 0.706352, acc: 0.488281] [A loss: 0.719551, acc: 0.464844]\n",
"7656: [D loss: 0.719259, acc: 0.517578] [A loss: 0.889753, acc: 0.171875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7657: [D loss: 0.686085, acc: 0.562500] [A loss: 0.831688, acc: 0.226562]\n",
"7658: [D loss: 0.699519, acc: 0.535156] [A loss: 0.845298, acc: 0.179688]\n",
"7659: [D loss: 0.705348, acc: 0.490234] [A loss: 0.768588, acc: 0.355469]\n",
"7660: [D loss: 0.705435, acc: 0.535156] [A loss: 0.795193, acc: 0.285156]\n",
"7661: [D loss: 0.702754, acc: 0.537109] [A loss: 0.791018, acc: 0.320312]\n",
"7662: [D loss: 0.686350, acc: 0.566406] [A loss: 0.776456, acc: 0.300781]\n",
"7663: [D loss: 0.710561, acc: 0.488281] [A loss: 0.800997, acc: 0.277344]\n",
"7664: [D loss: 0.712498, acc: 0.480469] [A loss: 0.788243, acc: 0.281250]\n",
"7665: [D loss: 0.702070, acc: 0.509766] [A loss: 0.748058, acc: 0.378906]\n",
"7666: [D loss: 0.700808, acc: 0.507812] [A loss: 0.876080, acc: 0.156250]\n",
"7667: [D loss: 0.690412, acc: 0.550781] [A loss: 0.781105, acc: 0.328125]\n",
"7668: [D loss: 0.695775, acc: 0.531250] [A loss: 0.825090, acc: 0.214844]\n",
"7669: [D loss: 0.687087, acc: 0.552734] [A loss: 0.792136, acc: 0.246094]\n",
"7670: [D loss: 0.698960, acc: 0.509766] [A loss: 0.786605, acc: 0.285156]\n",
"7671: [D loss: 0.690348, acc: 0.556641] [A loss: 0.758199, acc: 0.351562]\n",
"7672: [D loss: 0.700129, acc: 0.537109] [A loss: 0.911001, acc: 0.113281]\n",
"7673: [D loss: 0.690395, acc: 0.517578] [A loss: 0.678956, acc: 0.554688]\n",
"7674: [D loss: 0.739365, acc: 0.486328] [A loss: 1.027086, acc: 0.019531]\n",
"7675: [D loss: 0.712495, acc: 0.505859] [A loss: 0.746537, acc: 0.378906]\n",
"7676: [D loss: 0.708342, acc: 0.503906] [A loss: 0.785411, acc: 0.292969]\n",
"7677: [D loss: 0.692313, acc: 0.523438] [A loss: 0.770133, acc: 0.300781]\n",
"7678: [D loss: 0.694062, acc: 0.537109] [A loss: 0.773537, acc: 0.332031]\n",
"7679: [D loss: 0.708236, acc: 0.484375] [A loss: 0.799986, acc: 0.246094]\n",
"7680: [D loss: 0.697992, acc: 0.517578] [A loss: 0.795347, acc: 0.289062]\n",
"7681: [D loss: 0.700142, acc: 0.509766] [A loss: 0.869976, acc: 0.167969]\n",
"7682: [D loss: 0.708763, acc: 0.525391] [A loss: 0.806244, acc: 0.265625]\n",
"7683: [D loss: 0.697834, acc: 0.509766] [A loss: 0.826315, acc: 0.218750]\n",
"7684: [D loss: 0.705268, acc: 0.484375] [A loss: 0.803598, acc: 0.261719]\n",
"7685: [D loss: 0.689093, acc: 0.554688] [A loss: 0.844171, acc: 0.238281]\n",
"7686: [D loss: 0.691957, acc: 0.552734] [A loss: 0.811299, acc: 0.250000]\n",
"7687: [D loss: 0.700235, acc: 0.546875] [A loss: 0.813972, acc: 0.234375]\n",
"7688: [D loss: 0.690791, acc: 0.539062] [A loss: 0.834938, acc: 0.207031]\n",
"7689: [D loss: 0.704781, acc: 0.503906] [A loss: 0.811478, acc: 0.253906]\n",
"7690: [D loss: 0.698144, acc: 0.531250] [A loss: 0.843471, acc: 0.187500]\n",
"7691: [D loss: 0.689085, acc: 0.505859] [A loss: 0.757892, acc: 0.394531]\n",
"7692: [D loss: 0.704745, acc: 0.511719] [A loss: 0.860167, acc: 0.164062]\n",
"7693: [D loss: 0.686933, acc: 0.527344] [A loss: 0.788437, acc: 0.296875]\n",
"7694: [D loss: 0.706802, acc: 0.496094] [A loss: 0.901699, acc: 0.105469]\n",
"7695: [D loss: 0.686896, acc: 0.533203] [A loss: 0.795566, acc: 0.296875]\n",
"7696: [D loss: 0.696591, acc: 0.527344] [A loss: 0.895070, acc: 0.109375]\n",
"7697: [D loss: 0.708331, acc: 0.519531] [A loss: 0.766656, acc: 0.363281]\n",
"7698: [D loss: 0.688999, acc: 0.562500] [A loss: 0.834145, acc: 0.218750]\n",
"7699: [D loss: 0.698269, acc: 0.529297] [A loss: 0.814152, acc: 0.261719]\n",
"7700: [D loss: 0.711856, acc: 0.501953] [A loss: 0.852082, acc: 0.246094]\n",
"7701: [D loss: 0.697081, acc: 0.509766] [A loss: 0.772296, acc: 0.300781]\n",
"7702: [D loss: 0.717149, acc: 0.496094] [A loss: 0.861253, acc: 0.167969]\n",
"7703: [D loss: 0.691565, acc: 0.560547] [A loss: 0.742617, acc: 0.398438]\n",
"7704: [D loss: 0.708056, acc: 0.511719] [A loss: 0.896800, acc: 0.132812]\n",
"7705: [D loss: 0.713700, acc: 0.472656] [A loss: 0.864303, acc: 0.238281]\n",
"7706: [D loss: 0.715943, acc: 0.484375] [A loss: 0.797818, acc: 0.246094]\n",
"7707: [D loss: 0.710983, acc: 0.496094] [A loss: 0.851242, acc: 0.195312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7708: [D loss: 0.690688, acc: 0.521484] [A loss: 0.771602, acc: 0.289062]\n",
"7709: [D loss: 0.710092, acc: 0.507812] [A loss: 0.926264, acc: 0.093750]\n",
"7710: [D loss: 0.699036, acc: 0.525391] [A loss: 0.707786, acc: 0.488281]\n",
"7711: [D loss: 0.713141, acc: 0.511719] [A loss: 0.871319, acc: 0.156250]\n",
"7712: [D loss: 0.702464, acc: 0.498047] [A loss: 0.730512, acc: 0.476562]\n",
"7713: [D loss: 0.700170, acc: 0.531250] [A loss: 0.872652, acc: 0.167969]\n",
"7714: [D loss: 0.701063, acc: 0.507812] [A loss: 0.804790, acc: 0.246094]\n",
"7715: [D loss: 0.691599, acc: 0.546875] [A loss: 0.809806, acc: 0.242188]\n",
"7716: [D loss: 0.696848, acc: 0.529297] [A loss: 0.846867, acc: 0.222656]\n",
"7717: [D loss: 0.698852, acc: 0.500000] [A loss: 0.784447, acc: 0.285156]\n",
"7718: [D loss: 0.697838, acc: 0.511719] [A loss: 0.858942, acc: 0.167969]\n",
"7719: [D loss: 0.683803, acc: 0.564453] [A loss: 0.816419, acc: 0.222656]\n",
"7720: [D loss: 0.693330, acc: 0.525391] [A loss: 0.904253, acc: 0.156250]\n",
"7721: [D loss: 0.702659, acc: 0.509766] [A loss: 0.806719, acc: 0.324219]\n",
"7722: [D loss: 0.710625, acc: 0.535156] [A loss: 0.892870, acc: 0.152344]\n",
"7723: [D loss: 0.699086, acc: 0.492188] [A loss: 0.768993, acc: 0.296875]\n",
"7724: [D loss: 0.694018, acc: 0.513672] [A loss: 0.829303, acc: 0.214844]\n",
"7725: [D loss: 0.697960, acc: 0.525391] [A loss: 0.753404, acc: 0.347656]\n",
"7726: [D loss: 0.698495, acc: 0.521484] [A loss: 0.875336, acc: 0.160156]\n",
"7727: [D loss: 0.697940, acc: 0.505859] [A loss: 0.694131, acc: 0.523438]\n",
"7728: [D loss: 0.709872, acc: 0.519531] [A loss: 0.886205, acc: 0.132812]\n",
"7729: [D loss: 0.706439, acc: 0.490234] [A loss: 0.755174, acc: 0.417969]\n",
"7730: [D loss: 0.715915, acc: 0.500000] [A loss: 0.900051, acc: 0.105469]\n",
"7731: [D loss: 0.697431, acc: 0.521484] [A loss: 0.730071, acc: 0.425781]\n",
"7732: [D loss: 0.704971, acc: 0.556641] [A loss: 0.873336, acc: 0.214844]\n",
"7733: [D loss: 0.705201, acc: 0.503906] [A loss: 0.850891, acc: 0.218750]\n",
"7734: [D loss: 0.710548, acc: 0.505859] [A loss: 0.807021, acc: 0.281250]\n",
"7735: [D loss: 0.712281, acc: 0.500000] [A loss: 0.779290, acc: 0.371094]\n",
"7736: [D loss: 0.702149, acc: 0.511719] [A loss: 0.864592, acc: 0.171875]\n",
"7737: [D loss: 0.696728, acc: 0.533203] [A loss: 0.829252, acc: 0.230469]\n",
"7738: [D loss: 0.695483, acc: 0.537109] [A loss: 0.790533, acc: 0.320312]\n",
"7739: [D loss: 0.705744, acc: 0.515625] [A loss: 0.825345, acc: 0.226562]\n",
"7740: [D loss: 0.687825, acc: 0.548828] [A loss: 0.791317, acc: 0.308594]\n",
"7741: [D loss: 0.699976, acc: 0.523438] [A loss: 0.840487, acc: 0.226562]\n",
"7742: [D loss: 0.694801, acc: 0.548828] [A loss: 0.758191, acc: 0.390625]\n",
"7743: [D loss: 0.715373, acc: 0.501953] [A loss: 0.911325, acc: 0.113281]\n",
"7744: [D loss: 0.705314, acc: 0.517578] [A loss: 0.699813, acc: 0.542969]\n",
"7745: [D loss: 0.726585, acc: 0.513672] [A loss: 0.895239, acc: 0.136719]\n",
"7746: [D loss: 0.691590, acc: 0.541016] [A loss: 0.735774, acc: 0.425781]\n",
"7747: [D loss: 0.709978, acc: 0.519531] [A loss: 0.815571, acc: 0.285156]\n",
"7748: [D loss: 0.700420, acc: 0.527344] [A loss: 0.740961, acc: 0.394531]\n",
"7749: [D loss: 0.698889, acc: 0.515625] [A loss: 0.911699, acc: 0.125000]\n",
"7750: [D loss: 0.693217, acc: 0.531250] [A loss: 0.684014, acc: 0.546875]\n",
"7751: [D loss: 0.717088, acc: 0.519531] [A loss: 0.887597, acc: 0.132812]\n",
"7752: [D loss: 0.706549, acc: 0.513672] [A loss: 0.763910, acc: 0.359375]\n",
"7753: [D loss: 0.693302, acc: 0.531250] [A loss: 0.829700, acc: 0.253906]\n",
"7754: [D loss: 0.694372, acc: 0.517578] [A loss: 0.737682, acc: 0.382812]\n",
"7755: [D loss: 0.703535, acc: 0.509766] [A loss: 0.846181, acc: 0.234375]\n",
"7756: [D loss: 0.706677, acc: 0.501953] [A loss: 0.814411, acc: 0.242188]\n",
"7757: [D loss: 0.698697, acc: 0.523438] [A loss: 0.842886, acc: 0.203125]\n",
"7758: [D loss: 0.686052, acc: 0.529297] [A loss: 0.817991, acc: 0.234375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7759: [D loss: 0.692502, acc: 0.556641] [A loss: 0.820503, acc: 0.250000]\n",
"7760: [D loss: 0.702462, acc: 0.513672] [A loss: 0.840620, acc: 0.222656]\n",
"7761: [D loss: 0.708134, acc: 0.515625] [A loss: 0.869685, acc: 0.179688]\n",
"7762: [D loss: 0.689969, acc: 0.541016] [A loss: 0.786087, acc: 0.281250]\n",
"7763: [D loss: 0.711478, acc: 0.515625] [A loss: 0.879641, acc: 0.164062]\n",
"7764: [D loss: 0.709481, acc: 0.500000] [A loss: 0.753968, acc: 0.386719]\n",
"7765: [D loss: 0.699951, acc: 0.509766] [A loss: 0.825214, acc: 0.226562]\n",
"7766: [D loss: 0.702828, acc: 0.521484] [A loss: 0.884086, acc: 0.132812]\n",
"7767: [D loss: 0.705781, acc: 0.500000] [A loss: 0.719721, acc: 0.460938]\n",
"7768: [D loss: 0.703287, acc: 0.519531] [A loss: 0.936985, acc: 0.113281]\n",
"7769: [D loss: 0.693777, acc: 0.525391] [A loss: 0.704714, acc: 0.511719]\n",
"7770: [D loss: 0.707319, acc: 0.515625] [A loss: 0.885763, acc: 0.152344]\n",
"7771: [D loss: 0.699573, acc: 0.533203] [A loss: 0.730272, acc: 0.445312]\n",
"7772: [D loss: 0.711469, acc: 0.501953] [A loss: 0.824807, acc: 0.238281]\n",
"7773: [D loss: 0.696382, acc: 0.535156] [A loss: 0.782542, acc: 0.292969]\n",
"7774: [D loss: 0.692411, acc: 0.550781] [A loss: 0.779032, acc: 0.300781]\n",
"7775: [D loss: 0.709911, acc: 0.511719] [A loss: 0.792227, acc: 0.273438]\n",
"7776: [D loss: 0.715017, acc: 0.509766] [A loss: 0.848778, acc: 0.187500]\n",
"7777: [D loss: 0.687970, acc: 0.525391] [A loss: 0.799217, acc: 0.257812]\n",
"7778: [D loss: 0.719339, acc: 0.500000] [A loss: 0.879757, acc: 0.148438]\n",
"7779: [D loss: 0.694717, acc: 0.541016] [A loss: 0.796208, acc: 0.316406]\n",
"7780: [D loss: 0.702493, acc: 0.515625] [A loss: 0.838782, acc: 0.164062]\n",
"7781: [D loss: 0.699733, acc: 0.525391] [A loss: 0.838958, acc: 0.191406]\n",
"7782: [D loss: 0.706724, acc: 0.505859] [A loss: 0.776994, acc: 0.316406]\n",
"7783: [D loss: 0.688254, acc: 0.531250] [A loss: 0.824976, acc: 0.203125]\n",
"7784: [D loss: 0.704567, acc: 0.488281] [A loss: 0.810104, acc: 0.261719]\n",
"7785: [D loss: 0.692889, acc: 0.564453] [A loss: 0.865529, acc: 0.175781]\n",
"7786: [D loss: 0.692260, acc: 0.541016] [A loss: 0.683840, acc: 0.566406]\n",
"7787: [D loss: 0.715350, acc: 0.505859] [A loss: 0.940761, acc: 0.074219]\n",
"7788: [D loss: 0.703333, acc: 0.515625] [A loss: 0.698075, acc: 0.519531]\n",
"7789: [D loss: 0.718359, acc: 0.519531] [A loss: 0.909127, acc: 0.128906]\n",
"7790: [D loss: 0.707394, acc: 0.515625] [A loss: 0.718383, acc: 0.429688]\n",
"7791: [D loss: 0.718289, acc: 0.494141] [A loss: 0.850014, acc: 0.175781]\n",
"7792: [D loss: 0.707038, acc: 0.505859] [A loss: 0.810262, acc: 0.250000]\n",
"7793: [D loss: 0.695636, acc: 0.515625] [A loss: 0.800138, acc: 0.250000]\n",
"7794: [D loss: 0.706058, acc: 0.501953] [A loss: 0.828939, acc: 0.226562]\n",
"7795: [D loss: 0.707261, acc: 0.515625] [A loss: 0.792406, acc: 0.312500]\n",
"7796: [D loss: 0.699501, acc: 0.494141] [A loss: 0.822357, acc: 0.222656]\n",
"7797: [D loss: 0.697996, acc: 0.507812] [A loss: 0.809675, acc: 0.324219]\n",
"7798: [D loss: 0.706510, acc: 0.527344] [A loss: 0.919123, acc: 0.113281]\n",
"7799: [D loss: 0.701876, acc: 0.525391] [A loss: 0.669187, acc: 0.578125]\n",
"7800: [D loss: 0.714762, acc: 0.517578] [A loss: 0.921742, acc: 0.089844]\n",
"7801: [D loss: 0.694480, acc: 0.523438] [A loss: 0.714035, acc: 0.472656]\n",
"7802: [D loss: 0.712195, acc: 0.521484] [A loss: 0.842395, acc: 0.203125]\n",
"7803: [D loss: 0.691051, acc: 0.523438] [A loss: 0.759822, acc: 0.351562]\n",
"7804: [D loss: 0.700278, acc: 0.521484] [A loss: 0.801831, acc: 0.277344]\n",
"7805: [D loss: 0.701431, acc: 0.531250] [A loss: 0.791604, acc: 0.265625]\n",
"7806: [D loss: 0.689177, acc: 0.550781] [A loss: 0.749855, acc: 0.382812]\n",
"7807: [D loss: 0.694318, acc: 0.523438] [A loss: 0.807141, acc: 0.238281]\n",
"7808: [D loss: 0.692706, acc: 0.537109] [A loss: 0.805022, acc: 0.257812]\n",
"7809: [D loss: 0.697946, acc: 0.525391] [A loss: 0.797587, acc: 0.273438]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7810: [D loss: 0.714377, acc: 0.460938] [A loss: 0.815169, acc: 0.261719]\n",
"7811: [D loss: 0.701557, acc: 0.515625] [A loss: 0.767439, acc: 0.351562]\n",
"7812: [D loss: 0.699582, acc: 0.523438] [A loss: 0.894395, acc: 0.160156]\n",
"7813: [D loss: 0.704125, acc: 0.500000] [A loss: 0.785260, acc: 0.277344]\n",
"7814: [D loss: 0.707525, acc: 0.507812] [A loss: 0.912946, acc: 0.105469]\n",
"7815: [D loss: 0.707831, acc: 0.476562] [A loss: 0.729432, acc: 0.410156]\n",
"7816: [D loss: 0.710429, acc: 0.525391] [A loss: 0.916100, acc: 0.097656]\n",
"7817: [D loss: 0.706822, acc: 0.500000] [A loss: 0.716079, acc: 0.496094]\n",
"7818: [D loss: 0.692411, acc: 0.568359] [A loss: 0.819966, acc: 0.257812]\n",
"7819: [D loss: 0.704554, acc: 0.517578] [A loss: 0.836296, acc: 0.226562]\n",
"7820: [D loss: 0.702622, acc: 0.527344] [A loss: 0.811084, acc: 0.257812]\n",
"7821: [D loss: 0.712382, acc: 0.468750] [A loss: 0.786194, acc: 0.296875]\n",
"7822: [D loss: 0.711817, acc: 0.501953] [A loss: 0.847374, acc: 0.183594]\n",
"7823: [D loss: 0.697596, acc: 0.494141] [A loss: 0.794019, acc: 0.289062]\n",
"7824: [D loss: 0.703781, acc: 0.498047] [A loss: 0.802970, acc: 0.261719]\n",
"7825: [D loss: 0.704279, acc: 0.533203] [A loss: 0.869847, acc: 0.152344]\n",
"7826: [D loss: 0.707669, acc: 0.498047] [A loss: 0.771638, acc: 0.332031]\n",
"7827: [D loss: 0.687337, acc: 0.558594] [A loss: 0.869938, acc: 0.171875]\n",
"7828: [D loss: 0.706233, acc: 0.507812] [A loss: 0.767173, acc: 0.398438]\n",
"7829: [D loss: 0.722293, acc: 0.511719] [A loss: 0.972390, acc: 0.062500]\n",
"7830: [D loss: 0.694992, acc: 0.515625] [A loss: 0.735330, acc: 0.437500]\n",
"7831: [D loss: 0.703281, acc: 0.533203] [A loss: 0.946535, acc: 0.093750]\n",
"7832: [D loss: 0.695286, acc: 0.525391] [A loss: 0.754010, acc: 0.394531]\n",
"7833: [D loss: 0.712687, acc: 0.498047] [A loss: 0.830924, acc: 0.226562]\n",
"7834: [D loss: 0.708007, acc: 0.505859] [A loss: 0.742041, acc: 0.421875]\n",
"7835: [D loss: 0.709383, acc: 0.511719] [A loss: 0.821132, acc: 0.203125]\n",
"7836: [D loss: 0.685737, acc: 0.529297] [A loss: 0.753277, acc: 0.351562]\n",
"7837: [D loss: 0.707254, acc: 0.525391] [A loss: 0.860147, acc: 0.152344]\n",
"7838: [D loss: 0.690702, acc: 0.548828] [A loss: 0.740253, acc: 0.390625]\n",
"7839: [D loss: 0.711506, acc: 0.486328] [A loss: 0.871342, acc: 0.164062]\n",
"7840: [D loss: 0.706663, acc: 0.500000] [A loss: 0.791733, acc: 0.289062]\n",
"7841: [D loss: 0.706603, acc: 0.501953] [A loss: 0.866140, acc: 0.195312]\n",
"7842: [D loss: 0.701629, acc: 0.503906] [A loss: 0.734695, acc: 0.421875]\n",
"7843: [D loss: 0.692713, acc: 0.542969] [A loss: 0.812751, acc: 0.281250]\n",
"7844: [D loss: 0.708100, acc: 0.503906] [A loss: 0.881857, acc: 0.171875]\n",
"7845: [D loss: 0.706982, acc: 0.492188] [A loss: 0.832499, acc: 0.187500]\n",
"7846: [D loss: 0.685972, acc: 0.552734] [A loss: 0.753147, acc: 0.386719]\n",
"7847: [D loss: 0.692277, acc: 0.519531] [A loss: 0.851321, acc: 0.179688]\n",
"7848: [D loss: 0.696847, acc: 0.519531] [A loss: 0.782619, acc: 0.355469]\n",
"7849: [D loss: 0.701606, acc: 0.517578] [A loss: 0.891602, acc: 0.144531]\n",
"7850: [D loss: 0.696579, acc: 0.548828] [A loss: 0.781986, acc: 0.335938]\n",
"7851: [D loss: 0.687667, acc: 0.552734] [A loss: 0.902454, acc: 0.105469]\n",
"7852: [D loss: 0.708669, acc: 0.500000] [A loss: 0.753469, acc: 0.378906]\n",
"7853: [D loss: 0.709873, acc: 0.521484] [A loss: 0.927639, acc: 0.105469]\n",
"7854: [D loss: 0.693422, acc: 0.513672] [A loss: 0.693342, acc: 0.531250]\n",
"7855: [D loss: 0.736399, acc: 0.500000] [A loss: 1.012954, acc: 0.027344]\n",
"7856: [D loss: 0.714282, acc: 0.490234] [A loss: 0.764585, acc: 0.382812]\n",
"7857: [D loss: 0.716907, acc: 0.501953] [A loss: 0.859615, acc: 0.179688]\n",
"7858: [D loss: 0.694821, acc: 0.513672] [A loss: 0.707443, acc: 0.492188]\n",
"7859: [D loss: 0.703271, acc: 0.501953] [A loss: 0.844364, acc: 0.226562]\n",
"7860: [D loss: 0.706890, acc: 0.501953] [A loss: 0.749728, acc: 0.390625]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7861: [D loss: 0.703010, acc: 0.484375] [A loss: 0.850967, acc: 0.187500]\n",
"7862: [D loss: 0.695590, acc: 0.525391] [A loss: 0.764494, acc: 0.335938]\n",
"7863: [D loss: 0.707624, acc: 0.509766] [A loss: 0.785497, acc: 0.308594]\n",
"7864: [D loss: 0.711778, acc: 0.513672] [A loss: 0.921907, acc: 0.144531]\n",
"7865: [D loss: 0.694400, acc: 0.537109] [A loss: 0.747174, acc: 0.410156]\n",
"7866: [D loss: 0.693140, acc: 0.548828] [A loss: 0.832448, acc: 0.218750]\n",
"7867: [D loss: 0.710249, acc: 0.498047] [A loss: 0.785610, acc: 0.300781]\n",
"7868: [D loss: 0.701959, acc: 0.542969] [A loss: 0.787764, acc: 0.308594]\n",
"7869: [D loss: 0.701407, acc: 0.517578] [A loss: 0.830443, acc: 0.218750]\n",
"7870: [D loss: 0.698104, acc: 0.527344] [A loss: 0.767366, acc: 0.382812]\n",
"7871: [D loss: 0.707128, acc: 0.517578] [A loss: 0.916023, acc: 0.125000]\n",
"7872: [D loss: 0.690582, acc: 0.537109] [A loss: 0.739648, acc: 0.410156]\n",
"7873: [D loss: 0.706303, acc: 0.501953] [A loss: 0.897689, acc: 0.125000]\n",
"7874: [D loss: 0.693904, acc: 0.505859] [A loss: 0.730807, acc: 0.476562]\n",
"7875: [D loss: 0.695319, acc: 0.544922] [A loss: 0.824929, acc: 0.257812]\n",
"7876: [D loss: 0.715222, acc: 0.501953] [A loss: 0.831614, acc: 0.226562]\n",
"7877: [D loss: 0.703903, acc: 0.490234] [A loss: 0.814218, acc: 0.285156]\n",
"7878: [D loss: 0.700380, acc: 0.507812] [A loss: 0.827090, acc: 0.250000]\n",
"7879: [D loss: 0.698900, acc: 0.517578] [A loss: 0.831325, acc: 0.261719]\n",
"7880: [D loss: 0.692511, acc: 0.519531] [A loss: 0.819686, acc: 0.234375]\n",
"7881: [D loss: 0.699531, acc: 0.527344] [A loss: 0.797894, acc: 0.292969]\n",
"7882: [D loss: 0.702963, acc: 0.503906] [A loss: 0.808822, acc: 0.203125]\n",
"7883: [D loss: 0.689981, acc: 0.523438] [A loss: 0.808468, acc: 0.253906]\n",
"7884: [D loss: 0.690594, acc: 0.552734] [A loss: 0.841366, acc: 0.203125]\n",
"7885: [D loss: 0.705346, acc: 0.521484] [A loss: 0.875918, acc: 0.203125]\n",
"7886: [D loss: 0.695603, acc: 0.523438] [A loss: 0.779816, acc: 0.296875]\n",
"7887: [D loss: 0.711806, acc: 0.531250] [A loss: 0.954179, acc: 0.082031]\n",
"7888: [D loss: 0.705298, acc: 0.503906] [A loss: 0.690320, acc: 0.535156]\n",
"7889: [D loss: 0.725634, acc: 0.517578] [A loss: 0.915971, acc: 0.093750]\n",
"7890: [D loss: 0.693268, acc: 0.535156] [A loss: 0.724065, acc: 0.433594]\n",
"7891: [D loss: 0.717721, acc: 0.513672] [A loss: 0.848863, acc: 0.214844]\n",
"7892: [D loss: 0.700097, acc: 0.544922] [A loss: 0.776382, acc: 0.308594]\n",
"7893: [D loss: 0.698124, acc: 0.511719] [A loss: 0.818546, acc: 0.222656]\n",
"7894: [D loss: 0.691181, acc: 0.548828] [A loss: 0.716191, acc: 0.503906]\n",
"7895: [D loss: 0.716461, acc: 0.509766] [A loss: 0.933024, acc: 0.109375]\n",
"7896: [D loss: 0.701102, acc: 0.517578] [A loss: 0.697369, acc: 0.511719]\n",
"7897: [D loss: 0.720791, acc: 0.492188] [A loss: 0.804464, acc: 0.304688]\n",
"7898: [D loss: 0.697995, acc: 0.505859] [A loss: 0.790467, acc: 0.296875]\n",
"7899: [D loss: 0.706736, acc: 0.521484] [A loss: 0.782159, acc: 0.312500]\n",
"7900: [D loss: 0.710050, acc: 0.511719] [A loss: 0.842225, acc: 0.187500]\n",
"7901: [D loss: 0.693694, acc: 0.533203] [A loss: 0.768939, acc: 0.343750]\n",
"7902: [D loss: 0.704238, acc: 0.511719] [A loss: 0.849425, acc: 0.195312]\n",
"7903: [D loss: 0.706651, acc: 0.503906] [A loss: 0.770868, acc: 0.320312]\n",
"7904: [D loss: 0.700084, acc: 0.521484] [A loss: 0.853285, acc: 0.187500]\n",
"7905: [D loss: 0.702853, acc: 0.531250] [A loss: 0.761751, acc: 0.375000]\n",
"7906: [D loss: 0.696736, acc: 0.533203] [A loss: 0.778797, acc: 0.316406]\n",
"7907: [D loss: 0.711706, acc: 0.515625] [A loss: 0.821352, acc: 0.238281]\n",
"7908: [D loss: 0.689911, acc: 0.539062] [A loss: 0.844247, acc: 0.175781]\n",
"7909: [D loss: 0.698865, acc: 0.521484] [A loss: 0.770381, acc: 0.343750]\n",
"7910: [D loss: 0.702086, acc: 0.531250] [A loss: 0.827640, acc: 0.199219]\n",
"7911: [D loss: 0.700074, acc: 0.509766] [A loss: 0.769969, acc: 0.343750]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7912: [D loss: 0.704677, acc: 0.496094] [A loss: 0.860045, acc: 0.156250]\n",
"7913: [D loss: 0.687388, acc: 0.560547] [A loss: 0.718230, acc: 0.480469]\n",
"7914: [D loss: 0.700669, acc: 0.554688] [A loss: 0.932136, acc: 0.097656]\n",
"7915: [D loss: 0.699346, acc: 0.542969] [A loss: 0.688429, acc: 0.496094]\n",
"7916: [D loss: 0.717266, acc: 0.517578] [A loss: 0.897071, acc: 0.148438]\n",
"7917: [D loss: 0.698609, acc: 0.533203] [A loss: 0.729763, acc: 0.433594]\n",
"7918: [D loss: 0.712154, acc: 0.511719] [A loss: 0.947186, acc: 0.097656]\n",
"7919: [D loss: 0.699642, acc: 0.525391] [A loss: 0.693064, acc: 0.566406]\n",
"7920: [D loss: 0.720452, acc: 0.482422] [A loss: 0.864251, acc: 0.222656]\n",
"7921: [D loss: 0.700271, acc: 0.509766] [A loss: 0.741385, acc: 0.406250]\n",
"7922: [D loss: 0.707604, acc: 0.492188] [A loss: 0.829332, acc: 0.238281]\n",
"7923: [D loss: 0.704424, acc: 0.511719] [A loss: 0.770884, acc: 0.351562]\n",
"7924: [D loss: 0.705536, acc: 0.500000] [A loss: 0.806350, acc: 0.273438]\n",
"7925: [D loss: 0.709467, acc: 0.513672] [A loss: 0.810093, acc: 0.250000]\n",
"7926: [D loss: 0.694750, acc: 0.525391] [A loss: 0.797294, acc: 0.269531]\n",
"7927: [D loss: 0.711257, acc: 0.513672] [A loss: 0.841120, acc: 0.203125]\n",
"7928: [D loss: 0.705613, acc: 0.500000] [A loss: 0.788168, acc: 0.308594]\n",
"7929: [D loss: 0.705459, acc: 0.490234] [A loss: 0.848002, acc: 0.175781]\n",
"7930: [D loss: 0.696056, acc: 0.515625] [A loss: 0.751121, acc: 0.359375]\n",
"7931: [D loss: 0.708170, acc: 0.484375] [A loss: 0.800677, acc: 0.230469]\n",
"7932: [D loss: 0.688200, acc: 0.531250] [A loss: 0.817918, acc: 0.210938]\n",
"7933: [D loss: 0.700421, acc: 0.511719] [A loss: 0.796380, acc: 0.246094]\n",
"7934: [D loss: 0.700754, acc: 0.541016] [A loss: 0.860305, acc: 0.183594]\n",
"7935: [D loss: 0.694513, acc: 0.521484] [A loss: 0.821645, acc: 0.277344]\n",
"7936: [D loss: 0.696705, acc: 0.529297] [A loss: 0.801804, acc: 0.292969]\n",
"7937: [D loss: 0.709459, acc: 0.523438] [A loss: 0.791184, acc: 0.296875]\n",
"7938: [D loss: 0.698729, acc: 0.531250] [A loss: 0.833624, acc: 0.234375]\n",
"7939: [D loss: 0.715465, acc: 0.466797] [A loss: 0.867188, acc: 0.187500]\n",
"7940: [D loss: 0.703630, acc: 0.515625] [A loss: 0.792267, acc: 0.308594]\n",
"7941: [D loss: 0.711375, acc: 0.523438] [A loss: 0.932840, acc: 0.097656]\n",
"7942: [D loss: 0.686956, acc: 0.572266] [A loss: 0.735719, acc: 0.414062]\n",
"7943: [D loss: 0.718528, acc: 0.498047] [A loss: 0.903408, acc: 0.128906]\n",
"7944: [D loss: 0.693266, acc: 0.535156] [A loss: 0.761691, acc: 0.351562]\n",
"7945: [D loss: 0.719932, acc: 0.492188] [A loss: 0.914109, acc: 0.144531]\n",
"7946: [D loss: 0.700298, acc: 0.515625] [A loss: 0.691996, acc: 0.507812]\n",
"7947: [D loss: 0.713890, acc: 0.494141] [A loss: 0.868980, acc: 0.160156]\n",
"7948: [D loss: 0.692222, acc: 0.548828] [A loss: 0.730428, acc: 0.445312]\n",
"7949: [D loss: 0.719730, acc: 0.496094] [A loss: 0.831292, acc: 0.230469]\n",
"7950: [D loss: 0.697240, acc: 0.533203] [A loss: 0.758862, acc: 0.351562]\n",
"7951: [D loss: 0.714663, acc: 0.492188] [A loss: 0.896288, acc: 0.171875]\n",
"7952: [D loss: 0.702022, acc: 0.531250] [A loss: 0.772675, acc: 0.347656]\n",
"7953: [D loss: 0.695630, acc: 0.556641] [A loss: 0.825353, acc: 0.226562]\n",
"7954: [D loss: 0.691093, acc: 0.542969] [A loss: 0.787964, acc: 0.308594]\n",
"7955: [D loss: 0.699422, acc: 0.519531] [A loss: 0.817229, acc: 0.269531]\n",
"7956: [D loss: 0.691802, acc: 0.533203] [A loss: 0.789094, acc: 0.320312]\n",
"7957: [D loss: 0.700718, acc: 0.523438] [A loss: 0.814510, acc: 0.226562]\n",
"7958: [D loss: 0.693443, acc: 0.546875] [A loss: 0.823155, acc: 0.238281]\n",
"7959: [D loss: 0.714105, acc: 0.490234] [A loss: 0.809837, acc: 0.277344]\n",
"7960: [D loss: 0.707788, acc: 0.494141] [A loss: 0.879981, acc: 0.171875]\n",
"7961: [D loss: 0.701141, acc: 0.515625] [A loss: 0.728918, acc: 0.460938]\n",
"7962: [D loss: 0.700248, acc: 0.527344] [A loss: 0.904825, acc: 0.125000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"7963: [D loss: 0.706458, acc: 0.507812] [A loss: 0.736130, acc: 0.441406]\n",
"7964: [D loss: 0.699536, acc: 0.513672] [A loss: 0.869514, acc: 0.148438]\n",
"7965: [D loss: 0.705722, acc: 0.505859] [A loss: 0.806831, acc: 0.257812]\n",
"7966: [D loss: 0.693616, acc: 0.533203] [A loss: 0.827568, acc: 0.218750]\n",
"7967: [D loss: 0.701046, acc: 0.533203] [A loss: 0.737395, acc: 0.402344]\n",
"7968: [D loss: 0.711024, acc: 0.494141] [A loss: 0.964784, acc: 0.054688]\n",
"7969: [D loss: 0.693938, acc: 0.535156] [A loss: 0.664044, acc: 0.617188]\n",
"7970: [D loss: 0.735313, acc: 0.496094] [A loss: 0.960697, acc: 0.078125]\n",
"7971: [D loss: 0.704699, acc: 0.501953] [A loss: 0.712789, acc: 0.464844]\n",
"7972: [D loss: 0.708027, acc: 0.503906] [A loss: 0.835715, acc: 0.222656]\n",
"7973: [D loss: 0.709530, acc: 0.484375] [A loss: 0.715768, acc: 0.484375]\n",
"7974: [D loss: 0.719754, acc: 0.513672] [A loss: 0.884299, acc: 0.152344]\n",
"7975: [D loss: 0.693061, acc: 0.533203] [A loss: 0.733810, acc: 0.390625]\n",
"7976: [D loss: 0.731177, acc: 0.480469] [A loss: 0.867545, acc: 0.164062]\n",
"7977: [D loss: 0.696796, acc: 0.521484] [A loss: 0.776352, acc: 0.335938]\n",
"7978: [D loss: 0.706955, acc: 0.525391] [A loss: 0.779906, acc: 0.316406]\n",
"7979: [D loss: 0.704361, acc: 0.517578] [A loss: 0.754432, acc: 0.359375]\n",
"7980: [D loss: 0.712077, acc: 0.490234] [A loss: 0.828801, acc: 0.214844]\n",
"7981: [D loss: 0.691491, acc: 0.519531] [A loss: 0.789281, acc: 0.324219]\n",
"7982: [D loss: 0.693075, acc: 0.537109] [A loss: 0.847104, acc: 0.207031]\n",
"7983: [D loss: 0.692490, acc: 0.515625] [A loss: 0.741521, acc: 0.410156]\n",
"7984: [D loss: 0.709066, acc: 0.503906] [A loss: 0.863168, acc: 0.195312]\n",
"7985: [D loss: 0.698040, acc: 0.527344] [A loss: 0.746395, acc: 0.390625]\n",
"7986: [D loss: 0.714180, acc: 0.500000] [A loss: 0.848338, acc: 0.207031]\n",
"7987: [D loss: 0.708652, acc: 0.511719] [A loss: 0.775926, acc: 0.328125]\n",
"7988: [D loss: 0.705599, acc: 0.525391] [A loss: 0.874889, acc: 0.167969]\n",
"7989: [D loss: 0.713085, acc: 0.492188] [A loss: 0.742990, acc: 0.402344]\n",
"7990: [D loss: 0.714349, acc: 0.511719] [A loss: 0.832892, acc: 0.210938]\n",
"7991: [D loss: 0.703544, acc: 0.509766] [A loss: 0.752873, acc: 0.375000]\n",
"7992: [D loss: 0.700125, acc: 0.525391] [A loss: 0.861471, acc: 0.179688]\n",
"7993: [D loss: 0.703398, acc: 0.509766] [A loss: 0.734265, acc: 0.421875]\n",
"7994: [D loss: 0.715900, acc: 0.490234] [A loss: 0.833103, acc: 0.183594]\n",
"7995: [D loss: 0.697281, acc: 0.539062] [A loss: 0.782594, acc: 0.273438]\n",
"7996: [D loss: 0.706037, acc: 0.519531] [A loss: 0.821517, acc: 0.226562]\n",
"7997: [D loss: 0.699549, acc: 0.509766] [A loss: 0.769158, acc: 0.324219]\n",
"7998: [D loss: 0.700380, acc: 0.517578] [A loss: 0.835844, acc: 0.167969]\n",
"7999: [D loss: 0.696719, acc: 0.523438] [A loss: 0.787322, acc: 0.277344]\n",
"8000: [D loss: 0.692686, acc: 0.503906] [A loss: 0.792371, acc: 0.304688]\n",
"8001: [D loss: 0.710381, acc: 0.537109] [A loss: 0.855055, acc: 0.195312]\n",
"8002: [D loss: 0.709267, acc: 0.488281] [A loss: 0.857030, acc: 0.164062]\n",
"8003: [D loss: 0.700271, acc: 0.513672] [A loss: 0.740598, acc: 0.417969]\n",
"8004: [D loss: 0.711560, acc: 0.490234] [A loss: 0.833359, acc: 0.230469]\n",
"8005: [D loss: 0.691201, acc: 0.539062] [A loss: 0.775564, acc: 0.277344]\n",
"8006: [D loss: 0.706100, acc: 0.511719] [A loss: 0.873967, acc: 0.148438]\n",
"8007: [D loss: 0.701597, acc: 0.513672] [A loss: 0.732830, acc: 0.445312]\n",
"8008: [D loss: 0.704767, acc: 0.509766] [A loss: 0.900933, acc: 0.132812]\n",
"8009: [D loss: 0.688559, acc: 0.542969] [A loss: 0.708678, acc: 0.433594]\n",
"8010: [D loss: 0.697971, acc: 0.533203] [A loss: 0.893333, acc: 0.167969]\n",
"8011: [D loss: 0.696713, acc: 0.525391] [A loss: 0.753124, acc: 0.390625]\n",
"8012: [D loss: 0.721324, acc: 0.519531] [A loss: 0.881398, acc: 0.164062]\n",
"8013: [D loss: 0.706943, acc: 0.474609] [A loss: 0.765582, acc: 0.382812]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8014: [D loss: 0.716035, acc: 0.488281] [A loss: 0.808329, acc: 0.289062]\n",
"8015: [D loss: 0.690817, acc: 0.503906] [A loss: 0.751174, acc: 0.347656]\n",
"8016: [D loss: 0.726831, acc: 0.458984] [A loss: 0.875246, acc: 0.187500]\n",
"8017: [D loss: 0.694612, acc: 0.529297] [A loss: 0.761690, acc: 0.347656]\n",
"8018: [D loss: 0.700113, acc: 0.525391] [A loss: 0.823764, acc: 0.234375]\n",
"8019: [D loss: 0.707201, acc: 0.492188] [A loss: 0.738176, acc: 0.425781]\n",
"8020: [D loss: 0.706503, acc: 0.503906] [A loss: 0.758680, acc: 0.367188]\n",
"8021: [D loss: 0.697188, acc: 0.539062] [A loss: 0.829961, acc: 0.230469]\n",
"8022: [D loss: 0.707456, acc: 0.492188] [A loss: 0.751341, acc: 0.382812]\n",
"8023: [D loss: 0.713036, acc: 0.505859] [A loss: 0.907634, acc: 0.140625]\n",
"8024: [D loss: 0.704176, acc: 0.492188] [A loss: 0.732511, acc: 0.441406]\n",
"8025: [D loss: 0.706302, acc: 0.517578] [A loss: 0.874995, acc: 0.144531]\n",
"8026: [D loss: 0.697093, acc: 0.513672] [A loss: 0.764741, acc: 0.371094]\n",
"8027: [D loss: 0.697046, acc: 0.525391] [A loss: 0.885087, acc: 0.156250]\n",
"8028: [D loss: 0.689294, acc: 0.550781] [A loss: 0.705777, acc: 0.500000]\n",
"8029: [D loss: 0.712656, acc: 0.515625] [A loss: 0.904800, acc: 0.125000]\n",
"8030: [D loss: 0.698793, acc: 0.529297] [A loss: 0.898818, acc: 0.218750]\n",
"8031: [D loss: 0.701059, acc: 0.527344] [A loss: 0.816709, acc: 0.265625]\n",
"8032: [D loss: 0.700705, acc: 0.523438] [A loss: 0.886460, acc: 0.117188]\n",
"8033: [D loss: 0.692410, acc: 0.550781] [A loss: 0.724795, acc: 0.445312]\n",
"8034: [D loss: 0.697690, acc: 0.525391] [A loss: 0.823657, acc: 0.238281]\n",
"8035: [D loss: 0.710674, acc: 0.513672] [A loss: 0.820381, acc: 0.250000]\n",
"8036: [D loss: 0.707138, acc: 0.513672] [A loss: 0.825631, acc: 0.230469]\n",
"8037: [D loss: 0.693270, acc: 0.542969] [A loss: 0.745497, acc: 0.390625]\n",
"8038: [D loss: 0.695856, acc: 0.554688] [A loss: 0.805888, acc: 0.273438]\n",
"8039: [D loss: 0.703301, acc: 0.529297] [A loss: 0.801398, acc: 0.273438]\n",
"8040: [D loss: 0.697232, acc: 0.515625] [A loss: 0.777209, acc: 0.316406]\n",
"8041: [D loss: 0.708546, acc: 0.507812] [A loss: 0.891667, acc: 0.222656]\n",
"8042: [D loss: 0.695300, acc: 0.531250] [A loss: 1.045876, acc: 0.082031]\n",
"8043: [D loss: 0.692778, acc: 0.535156] [A loss: 0.764143, acc: 0.316406]\n",
"8044: [D loss: 0.701178, acc: 0.535156] [A loss: 0.817903, acc: 0.218750]\n",
"8045: [D loss: 0.703149, acc: 0.498047] [A loss: 0.824564, acc: 0.207031]\n",
"8046: [D loss: 0.706861, acc: 0.513672] [A loss: 0.745862, acc: 0.382812]\n",
"8047: [D loss: 0.699605, acc: 0.531250] [A loss: 0.866728, acc: 0.167969]\n",
"8048: [D loss: 0.699313, acc: 0.546875] [A loss: 0.722242, acc: 0.437500]\n",
"8049: [D loss: 0.713346, acc: 0.505859] [A loss: 0.873307, acc: 0.183594]\n",
"8050: [D loss: 0.707554, acc: 0.501953] [A loss: 0.752988, acc: 0.375000]\n",
"8051: [D loss: 0.709646, acc: 0.494141] [A loss: 0.814491, acc: 0.265625]\n",
"8052: [D loss: 0.698863, acc: 0.523438] [A loss: 0.751842, acc: 0.367188]\n",
"8053: [D loss: 0.693970, acc: 0.519531] [A loss: 0.840378, acc: 0.234375]\n",
"8054: [D loss: 0.692030, acc: 0.521484] [A loss: 0.754507, acc: 0.406250]\n",
"8055: [D loss: 0.722267, acc: 0.509766] [A loss: 0.964587, acc: 0.113281]\n",
"8056: [D loss: 0.695850, acc: 0.556641] [A loss: 0.657927, acc: 0.617188]\n",
"8057: [D loss: 0.722666, acc: 0.494141] [A loss: 0.882536, acc: 0.148438]\n",
"8058: [D loss: 0.697058, acc: 0.519531] [A loss: 0.725779, acc: 0.417969]\n",
"8059: [D loss: 0.714634, acc: 0.490234] [A loss: 0.817631, acc: 0.242188]\n",
"8060: [D loss: 0.702592, acc: 0.496094] [A loss: 0.767842, acc: 0.332031]\n",
"8061: [D loss: 0.699859, acc: 0.505859] [A loss: 0.803495, acc: 0.273438]\n",
"8062: [D loss: 0.688290, acc: 0.544922] [A loss: 0.784722, acc: 0.296875]\n",
"8063: [D loss: 0.702451, acc: 0.509766] [A loss: 0.799736, acc: 0.265625]\n",
"8064: [D loss: 0.707606, acc: 0.519531] [A loss: 0.743174, acc: 0.386719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8065: [D loss: 0.699245, acc: 0.531250] [A loss: 0.880951, acc: 0.164062]\n",
"8066: [D loss: 0.699268, acc: 0.531250] [A loss: 0.701445, acc: 0.507812]\n",
"8067: [D loss: 0.725471, acc: 0.498047] [A loss: 0.910738, acc: 0.148438]\n",
"8068: [D loss: 0.699752, acc: 0.501953] [A loss: 0.771348, acc: 0.347656]\n",
"8069: [D loss: 0.696016, acc: 0.519531] [A loss: 0.818073, acc: 0.230469]\n",
"8070: [D loss: 0.712738, acc: 0.492188] [A loss: 0.797958, acc: 0.289062]\n",
"8071: [D loss: 0.694402, acc: 0.511719] [A loss: 0.802481, acc: 0.246094]\n",
"8072: [D loss: 0.698051, acc: 0.541016] [A loss: 0.814559, acc: 0.234375]\n",
"8073: [D loss: 0.692461, acc: 0.529297] [A loss: 0.754268, acc: 0.371094]\n",
"8074: [D loss: 0.697397, acc: 0.519531] [A loss: 0.959167, acc: 0.101562]\n",
"8075: [D loss: 0.710418, acc: 0.488281] [A loss: 0.759705, acc: 0.343750]\n",
"8076: [D loss: 0.717672, acc: 0.527344] [A loss: 0.835894, acc: 0.218750]\n",
"8077: [D loss: 0.709605, acc: 0.490234] [A loss: 0.767133, acc: 0.375000]\n",
"8078: [D loss: 0.711659, acc: 0.517578] [A loss: 0.811413, acc: 0.273438]\n",
"8079: [D loss: 0.709372, acc: 0.505859] [A loss: 0.815224, acc: 0.234375]\n",
"8080: [D loss: 0.690613, acc: 0.527344] [A loss: 0.781939, acc: 0.269531]\n",
"8081: [D loss: 0.707039, acc: 0.521484] [A loss: 0.849621, acc: 0.179688]\n",
"8082: [D loss: 0.699999, acc: 0.496094] [A loss: 0.737037, acc: 0.394531]\n",
"8083: [D loss: 0.705280, acc: 0.498047] [A loss: 0.933682, acc: 0.101562]\n",
"8084: [D loss: 0.692553, acc: 0.505859] [A loss: 0.738203, acc: 0.394531]\n",
"8085: [D loss: 0.713760, acc: 0.492188] [A loss: 0.860733, acc: 0.140625]\n",
"8086: [D loss: 0.703989, acc: 0.505859] [A loss: 0.740554, acc: 0.375000]\n",
"8087: [D loss: 0.710346, acc: 0.503906] [A loss: 0.911072, acc: 0.109375]\n",
"8088: [D loss: 0.691218, acc: 0.544922] [A loss: 0.724006, acc: 0.453125]\n",
"8089: [D loss: 0.715810, acc: 0.503906] [A loss: 0.915399, acc: 0.105469]\n",
"8090: [D loss: 0.709100, acc: 0.474609] [A loss: 0.721795, acc: 0.476562]\n",
"8091: [D loss: 0.710489, acc: 0.515625] [A loss: 0.869831, acc: 0.144531]\n",
"8092: [D loss: 0.704602, acc: 0.488281] [A loss: 0.759576, acc: 0.359375]\n",
"8093: [D loss: 0.682359, acc: 0.572266] [A loss: 0.853144, acc: 0.203125]\n",
"8094: [D loss: 0.694770, acc: 0.525391] [A loss: 0.842350, acc: 0.187500]\n",
"8095: [D loss: 0.702630, acc: 0.513672] [A loss: 0.767278, acc: 0.343750]\n",
"8096: [D loss: 0.699038, acc: 0.517578] [A loss: 0.855738, acc: 0.183594]\n",
"8097: [D loss: 0.695719, acc: 0.515625] [A loss: 0.768147, acc: 0.316406]\n",
"8098: [D loss: 0.702449, acc: 0.535156] [A loss: 0.823439, acc: 0.222656]\n",
"8099: [D loss: 0.697172, acc: 0.535156] [A loss: 0.802169, acc: 0.273438]\n",
"8100: [D loss: 0.701868, acc: 0.500000] [A loss: 0.840744, acc: 0.156250]\n",
"8101: [D loss: 0.704079, acc: 0.476562] [A loss: 0.787371, acc: 0.269531]\n",
"8102: [D loss: 0.690909, acc: 0.542969] [A loss: 0.781017, acc: 0.312500]\n",
"8103: [D loss: 0.700613, acc: 0.517578] [A loss: 0.802431, acc: 0.269531]\n",
"8104: [D loss: 0.681673, acc: 0.564453] [A loss: 0.784673, acc: 0.304688]\n",
"8105: [D loss: 0.705627, acc: 0.513672] [A loss: 0.900197, acc: 0.132812]\n",
"8106: [D loss: 0.699609, acc: 0.513672] [A loss: 0.741129, acc: 0.371094]\n",
"8107: [D loss: 0.709599, acc: 0.492188] [A loss: 0.854425, acc: 0.214844]\n",
"8108: [D loss: 0.696871, acc: 0.539062] [A loss: 0.779426, acc: 0.285156]\n",
"8109: [D loss: 0.709151, acc: 0.503906] [A loss: 0.846444, acc: 0.179688]\n",
"8110: [D loss: 0.699316, acc: 0.521484] [A loss: 0.804370, acc: 0.261719]\n",
"8111: [D loss: 0.692315, acc: 0.523438] [A loss: 0.826577, acc: 0.214844]\n",
"8112: [D loss: 0.710233, acc: 0.494141] [A loss: 0.857645, acc: 0.179688]\n",
"8113: [D loss: 0.703683, acc: 0.535156] [A loss: 0.834803, acc: 0.199219]\n",
"8114: [D loss: 0.712064, acc: 0.474609] [A loss: 0.894635, acc: 0.109375]\n",
"8115: [D loss: 0.684758, acc: 0.558594] [A loss: 0.744218, acc: 0.386719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8116: [D loss: 0.706399, acc: 0.511719] [A loss: 1.000049, acc: 0.093750]\n",
"8117: [D loss: 0.695664, acc: 0.527344] [A loss: 0.683609, acc: 0.531250]\n",
"8118: [D loss: 0.702129, acc: 0.515625] [A loss: 0.877661, acc: 0.140625]\n",
"8119: [D loss: 0.698301, acc: 0.519531] [A loss: 0.773365, acc: 0.316406]\n",
"8120: [D loss: 0.699387, acc: 0.537109] [A loss: 0.851722, acc: 0.152344]\n",
"8121: [D loss: 0.698295, acc: 0.509766] [A loss: 0.762857, acc: 0.320312]\n",
"8122: [D loss: 0.702679, acc: 0.511719] [A loss: 0.873514, acc: 0.152344]\n",
"8123: [D loss: 0.700085, acc: 0.488281] [A loss: 0.727380, acc: 0.425781]\n",
"8124: [D loss: 0.706886, acc: 0.500000] [A loss: 0.870742, acc: 0.144531]\n",
"8125: [D loss: 0.696987, acc: 0.498047] [A loss: 0.737243, acc: 0.421875]\n",
"8126: [D loss: 0.699849, acc: 0.513672] [A loss: 0.894622, acc: 0.136719]\n",
"8127: [D loss: 0.712024, acc: 0.494141] [A loss: 0.759095, acc: 0.382812]\n",
"8128: [D loss: 0.712295, acc: 0.517578] [A loss: 0.864905, acc: 0.179688]\n",
"8129: [D loss: 0.703440, acc: 0.505859] [A loss: 0.768976, acc: 0.351562]\n",
"8130: [D loss: 0.706472, acc: 0.482422] [A loss: 0.886435, acc: 0.117188]\n",
"8131: [D loss: 0.702879, acc: 0.498047] [A loss: 0.735725, acc: 0.378906]\n",
"8132: [D loss: 0.701799, acc: 0.523438] [A loss: 0.829088, acc: 0.210938]\n",
"8133: [D loss: 0.703862, acc: 0.500000] [A loss: 0.802262, acc: 0.285156]\n",
"8134: [D loss: 0.695001, acc: 0.542969] [A loss: 0.789273, acc: 0.308594]\n",
"8135: [D loss: 0.702529, acc: 0.484375] [A loss: 0.874661, acc: 0.140625]\n",
"8136: [D loss: 0.691680, acc: 0.548828] [A loss: 0.810387, acc: 0.242188]\n",
"8137: [D loss: 0.705011, acc: 0.503906] [A loss: 0.886593, acc: 0.156250]\n",
"8138: [D loss: 0.700385, acc: 0.513672] [A loss: 0.788224, acc: 0.253906]\n",
"8139: [D loss: 0.700194, acc: 0.529297] [A loss: 0.862258, acc: 0.195312]\n",
"8140: [D loss: 0.696219, acc: 0.529297] [A loss: 0.742976, acc: 0.390625]\n",
"8141: [D loss: 0.692286, acc: 0.552734] [A loss: 0.839644, acc: 0.187500]\n",
"8142: [D loss: 0.703219, acc: 0.496094] [A loss: 0.818723, acc: 0.242188]\n",
"8143: [D loss: 0.701941, acc: 0.523438] [A loss: 0.823542, acc: 0.207031]\n",
"8144: [D loss: 0.707197, acc: 0.496094] [A loss: 0.795121, acc: 0.281250]\n",
"8145: [D loss: 0.705833, acc: 0.515625] [A loss: 0.839324, acc: 0.191406]\n",
"8146: [D loss: 0.691873, acc: 0.556641] [A loss: 0.777308, acc: 0.316406]\n",
"8147: [D loss: 0.704550, acc: 0.519531] [A loss: 0.838948, acc: 0.226562]\n",
"8148: [D loss: 0.691158, acc: 0.539062] [A loss: 0.809443, acc: 0.257812]\n",
"8149: [D loss: 0.699916, acc: 0.527344] [A loss: 0.854255, acc: 0.179688]\n",
"8150: [D loss: 0.694660, acc: 0.558594] [A loss: 0.749673, acc: 0.402344]\n",
"8151: [D loss: 0.717574, acc: 0.484375] [A loss: 0.907068, acc: 0.101562]\n",
"8152: [D loss: 0.707839, acc: 0.492188] [A loss: 0.667294, acc: 0.578125]\n",
"8153: [D loss: 0.708090, acc: 0.531250] [A loss: 0.888381, acc: 0.128906]\n",
"8154: [D loss: 0.699474, acc: 0.501953] [A loss: 0.683495, acc: 0.558594]\n",
"8155: [D loss: 0.709790, acc: 0.517578] [A loss: 0.923772, acc: 0.144531]\n",
"8156: [D loss: 0.722775, acc: 0.474609] [A loss: 0.849854, acc: 0.234375]\n",
"8157: [D loss: 0.708389, acc: 0.517578] [A loss: 0.803861, acc: 0.257812]\n",
"8158: [D loss: 0.696706, acc: 0.521484] [A loss: 0.774489, acc: 0.324219]\n",
"8159: [D loss: 0.708393, acc: 0.498047] [A loss: 0.788126, acc: 0.285156]\n",
"8160: [D loss: 0.697453, acc: 0.537109] [A loss: 0.794973, acc: 0.281250]\n",
"8161: [D loss: 0.701061, acc: 0.498047] [A loss: 0.786992, acc: 0.277344]\n",
"8162: [D loss: 0.696960, acc: 0.535156] [A loss: 0.831194, acc: 0.226562]\n",
"8163: [D loss: 0.699178, acc: 0.513672] [A loss: 0.763223, acc: 0.351562]\n",
"8164: [D loss: 0.713885, acc: 0.488281] [A loss: 0.955921, acc: 0.082031]\n",
"8165: [D loss: 0.696877, acc: 0.523438] [A loss: 0.729904, acc: 0.445312]\n",
"8166: [D loss: 0.720155, acc: 0.507812] [A loss: 0.869968, acc: 0.179688]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8167: [D loss: 0.697130, acc: 0.513672] [A loss: 0.714118, acc: 0.492188]\n",
"8168: [D loss: 0.710811, acc: 0.525391] [A loss: 0.888877, acc: 0.132812]\n",
"8169: [D loss: 0.691552, acc: 0.529297] [A loss: 0.705574, acc: 0.496094]\n",
"8170: [D loss: 0.700917, acc: 0.544922] [A loss: 0.839837, acc: 0.179688]\n",
"8171: [D loss: 0.691016, acc: 0.535156] [A loss: 0.747654, acc: 0.386719]\n",
"8172: [D loss: 0.703892, acc: 0.515625] [A loss: 0.831033, acc: 0.207031]\n",
"8173: [D loss: 0.699486, acc: 0.525391] [A loss: 0.750774, acc: 0.359375]\n",
"8174: [D loss: 0.702029, acc: 0.542969] [A loss: 0.826337, acc: 0.214844]\n",
"8175: [D loss: 0.700853, acc: 0.511719] [A loss: 0.764178, acc: 0.332031]\n",
"8176: [D loss: 0.703542, acc: 0.494141] [A loss: 0.833022, acc: 0.214844]\n",
"8177: [D loss: 0.707315, acc: 0.484375] [A loss: 0.757224, acc: 0.402344]\n",
"8178: [D loss: 0.699106, acc: 0.511719] [A loss: 0.891529, acc: 0.152344]\n",
"8179: [D loss: 0.704363, acc: 0.478516] [A loss: 0.700956, acc: 0.492188]\n",
"8180: [D loss: 0.712534, acc: 0.501953] [A loss: 0.874518, acc: 0.101562]\n",
"8181: [D loss: 0.698982, acc: 0.527344] [A loss: 0.740584, acc: 0.386719]\n",
"8182: [D loss: 0.698848, acc: 0.541016] [A loss: 0.845978, acc: 0.195312]\n",
"8183: [D loss: 0.691047, acc: 0.525391] [A loss: 0.723235, acc: 0.437500]\n",
"8184: [D loss: 0.706021, acc: 0.519531] [A loss: 0.851572, acc: 0.171875]\n",
"8185: [D loss: 0.703857, acc: 0.507812] [A loss: 0.706183, acc: 0.476562]\n",
"8186: [D loss: 0.715796, acc: 0.513672] [A loss: 0.921405, acc: 0.078125]\n",
"8187: [D loss: 0.700449, acc: 0.527344] [A loss: 0.705750, acc: 0.500000]\n",
"8188: [D loss: 0.723658, acc: 0.503906] [A loss: 0.909108, acc: 0.097656]\n",
"8189: [D loss: 0.692255, acc: 0.527344] [A loss: 0.698954, acc: 0.500000]\n",
"8190: [D loss: 0.687660, acc: 0.546875] [A loss: 0.845521, acc: 0.179688]\n",
"8191: [D loss: 0.693104, acc: 0.546875] [A loss: 0.803859, acc: 0.234375]\n",
"8192: [D loss: 0.702560, acc: 0.525391] [A loss: 0.840824, acc: 0.214844]\n",
"8193: [D loss: 0.694726, acc: 0.525391] [A loss: 0.748766, acc: 0.371094]\n",
"8194: [D loss: 0.697288, acc: 0.515625] [A loss: 0.845090, acc: 0.210938]\n",
"8195: [D loss: 0.686490, acc: 0.556641] [A loss: 0.722776, acc: 0.429688]\n",
"8196: [D loss: 0.696335, acc: 0.546875] [A loss: 0.817548, acc: 0.234375]\n",
"8197: [D loss: 0.697673, acc: 0.511719] [A loss: 0.766161, acc: 0.312500]\n",
"8198: [D loss: 0.692897, acc: 0.542969] [A loss: 0.838079, acc: 0.218750]\n",
"8199: [D loss: 0.699813, acc: 0.531250] [A loss: 0.852352, acc: 0.230469]\n",
"8200: [D loss: 0.685621, acc: 0.550781] [A loss: 0.792439, acc: 0.324219]\n",
"8201: [D loss: 0.695628, acc: 0.517578] [A loss: 0.795551, acc: 0.265625]\n",
"8202: [D loss: 0.699114, acc: 0.509766] [A loss: 0.815673, acc: 0.218750]\n",
"8203: [D loss: 0.705981, acc: 0.505859] [A loss: 0.829220, acc: 0.199219]\n",
"8204: [D loss: 0.706288, acc: 0.490234] [A loss: 0.756738, acc: 0.367188]\n",
"8205: [D loss: 0.707454, acc: 0.509766] [A loss: 0.791529, acc: 0.300781]\n",
"8206: [D loss: 0.695302, acc: 0.515625] [A loss: 0.899697, acc: 0.171875]\n",
"8207: [D loss: 0.707758, acc: 0.498047] [A loss: 0.778549, acc: 0.320312]\n",
"8208: [D loss: 0.702487, acc: 0.503906] [A loss: 0.839738, acc: 0.226562]\n",
"8209: [D loss: 0.683043, acc: 0.562500] [A loss: 0.740673, acc: 0.394531]\n",
"8210: [D loss: 0.698398, acc: 0.515625] [A loss: 0.861311, acc: 0.160156]\n",
"8211: [D loss: 0.707210, acc: 0.468750] [A loss: 0.752337, acc: 0.394531]\n",
"8212: [D loss: 0.719596, acc: 0.492188] [A loss: 0.884739, acc: 0.152344]\n",
"8213: [D loss: 0.695247, acc: 0.546875] [A loss: 0.766310, acc: 0.371094]\n",
"8214: [D loss: 0.694099, acc: 0.501953] [A loss: 0.826810, acc: 0.238281]\n",
"8215: [D loss: 0.697307, acc: 0.535156] [A loss: 0.804197, acc: 0.230469]\n",
"8216: [D loss: 0.691785, acc: 0.546875] [A loss: 0.822003, acc: 0.234375]\n",
"8217: [D loss: 0.708807, acc: 0.474609] [A loss: 0.788582, acc: 0.261719]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8218: [D loss: 0.703201, acc: 0.507812] [A loss: 0.803699, acc: 0.230469]\n",
"8219: [D loss: 0.689765, acc: 0.560547] [A loss: 0.767837, acc: 0.363281]\n",
"8220: [D loss: 0.690375, acc: 0.541016] [A loss: 0.908674, acc: 0.136719]\n",
"8221: [D loss: 0.710681, acc: 0.488281] [A loss: 0.822794, acc: 0.203125]\n",
"8222: [D loss: 0.691480, acc: 0.539062] [A loss: 0.823352, acc: 0.250000]\n",
"8223: [D loss: 0.680686, acc: 0.585938] [A loss: 0.824844, acc: 0.226562]\n",
"8224: [D loss: 0.699426, acc: 0.521484] [A loss: 0.849939, acc: 0.242188]\n",
"8225: [D loss: 0.702498, acc: 0.523438] [A loss: 0.822325, acc: 0.242188]\n",
"8226: [D loss: 0.703258, acc: 0.509766] [A loss: 0.872948, acc: 0.171875]\n",
"8227: [D loss: 0.701810, acc: 0.494141] [A loss: 0.755596, acc: 0.355469]\n",
"8228: [D loss: 0.690779, acc: 0.576172] [A loss: 0.911518, acc: 0.097656]\n",
"8229: [D loss: 0.682840, acc: 0.556641] [A loss: 0.757811, acc: 0.367188]\n",
"8230: [D loss: 0.702526, acc: 0.527344] [A loss: 0.850378, acc: 0.191406]\n",
"8231: [D loss: 0.700403, acc: 0.517578] [A loss: 0.762531, acc: 0.371094]\n",
"8232: [D loss: 0.714952, acc: 0.478516] [A loss: 0.935795, acc: 0.136719]\n",
"8233: [D loss: 0.704002, acc: 0.519531] [A loss: 0.731174, acc: 0.425781]\n",
"8234: [D loss: 0.701940, acc: 0.511719] [A loss: 0.780759, acc: 0.316406]\n",
"8235: [D loss: 0.705860, acc: 0.507812] [A loss: 0.805672, acc: 0.265625]\n",
"8236: [D loss: 0.704842, acc: 0.527344] [A loss: 0.818267, acc: 0.242188]\n",
"8237: [D loss: 0.701840, acc: 0.515625] [A loss: 0.794407, acc: 0.289062]\n",
"8238: [D loss: 0.702894, acc: 0.523438] [A loss: 0.835741, acc: 0.203125]\n",
"8239: [D loss: 0.701850, acc: 0.511719] [A loss: 0.705397, acc: 0.488281]\n",
"8240: [D loss: 0.699285, acc: 0.541016] [A loss: 0.893759, acc: 0.152344]\n",
"8241: [D loss: 0.688540, acc: 0.533203] [A loss: 0.690041, acc: 0.488281]\n",
"8242: [D loss: 0.708457, acc: 0.531250] [A loss: 0.877350, acc: 0.175781]\n",
"8243: [D loss: 0.696984, acc: 0.541016] [A loss: 0.691077, acc: 0.546875]\n",
"8244: [D loss: 0.714373, acc: 0.503906] [A loss: 0.839001, acc: 0.218750]\n",
"8245: [D loss: 0.692911, acc: 0.517578] [A loss: 0.810177, acc: 0.242188]\n",
"8246: [D loss: 0.696993, acc: 0.546875] [A loss: 0.780416, acc: 0.304688]\n",
"8247: [D loss: 0.696266, acc: 0.509766] [A loss: 0.808019, acc: 0.261719]\n",
"8248: [D loss: 0.709435, acc: 0.498047] [A loss: 0.730764, acc: 0.453125]\n",
"8249: [D loss: 0.698644, acc: 0.501953] [A loss: 0.916207, acc: 0.089844]\n",
"8250: [D loss: 0.695795, acc: 0.527344] [A loss: 0.776811, acc: 0.304688]\n",
"8251: [D loss: 0.706675, acc: 0.525391] [A loss: 0.882444, acc: 0.148438]\n",
"8252: [D loss: 0.692625, acc: 0.535156] [A loss: 0.773742, acc: 0.277344]\n",
"8253: [D loss: 0.702085, acc: 0.521484] [A loss: 0.826119, acc: 0.230469]\n",
"8254: [D loss: 0.700224, acc: 0.517578] [A loss: 0.798224, acc: 0.253906]\n",
"8255: [D loss: 0.692599, acc: 0.531250] [A loss: 0.751209, acc: 0.355469]\n",
"8256: [D loss: 0.714048, acc: 0.517578] [A loss: 0.838194, acc: 0.238281]\n",
"8257: [D loss: 0.698114, acc: 0.517578] [A loss: 0.744629, acc: 0.343750]\n",
"8258: [D loss: 0.707230, acc: 0.517578] [A loss: 0.876016, acc: 0.183594]\n",
"8259: [D loss: 0.693616, acc: 0.509766] [A loss: 0.765003, acc: 0.343750]\n",
"8260: [D loss: 0.703166, acc: 0.544922] [A loss: 0.849807, acc: 0.191406]\n",
"8261: [D loss: 0.696287, acc: 0.529297] [A loss: 0.825971, acc: 0.214844]\n",
"8262: [D loss: 0.708599, acc: 0.507812] [A loss: 0.818854, acc: 0.242188]\n",
"8263: [D loss: 0.706110, acc: 0.498047] [A loss: 0.818023, acc: 0.203125]\n",
"8264: [D loss: 0.693848, acc: 0.533203] [A loss: 0.792847, acc: 0.265625]\n",
"8265: [D loss: 0.690079, acc: 0.537109] [A loss: 0.772270, acc: 0.304688]\n",
"8266: [D loss: 0.707535, acc: 0.505859] [A loss: 0.829380, acc: 0.226562]\n",
"8267: [D loss: 0.688730, acc: 0.546875] [A loss: 0.836594, acc: 0.214844]\n",
"8268: [D loss: 0.701390, acc: 0.505859] [A loss: 0.747829, acc: 0.367188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8269: [D loss: 0.716597, acc: 0.507812] [A loss: 0.900162, acc: 0.113281]\n",
"8270: [D loss: 0.700944, acc: 0.513672] [A loss: 0.734383, acc: 0.398438]\n",
"8271: [D loss: 0.710569, acc: 0.496094] [A loss: 0.864108, acc: 0.179688]\n",
"8272: [D loss: 0.703697, acc: 0.500000] [A loss: 0.747684, acc: 0.398438]\n",
"8273: [D loss: 0.704501, acc: 0.521484] [A loss: 0.866541, acc: 0.156250]\n",
"8274: [D loss: 0.695578, acc: 0.519531] [A loss: 0.754944, acc: 0.355469]\n",
"8275: [D loss: 0.709952, acc: 0.498047] [A loss: 0.869333, acc: 0.152344]\n",
"8276: [D loss: 0.707863, acc: 0.494141] [A loss: 0.767072, acc: 0.328125]\n",
"8277: [D loss: 0.715091, acc: 0.507812] [A loss: 0.848320, acc: 0.183594]\n",
"8278: [D loss: 0.704357, acc: 0.515625] [A loss: 0.725673, acc: 0.453125]\n",
"8279: [D loss: 0.695819, acc: 0.533203] [A loss: 0.876693, acc: 0.144531]\n",
"8280: [D loss: 0.699422, acc: 0.498047] [A loss: 0.730761, acc: 0.445312]\n",
"8281: [D loss: 0.709310, acc: 0.476562] [A loss: 0.801879, acc: 0.285156]\n",
"8282: [D loss: 0.699900, acc: 0.513672] [A loss: 0.811245, acc: 0.253906]\n",
"8283: [D loss: 0.712071, acc: 0.468750] [A loss: 0.784755, acc: 0.300781]\n",
"8284: [D loss: 0.706496, acc: 0.501953] [A loss: 0.781435, acc: 0.292969]\n",
"8285: [D loss: 0.705070, acc: 0.519531] [A loss: 0.762651, acc: 0.363281]\n",
"8286: [D loss: 0.714198, acc: 0.474609] [A loss: 0.864803, acc: 0.136719]\n",
"8287: [D loss: 0.701125, acc: 0.509766] [A loss: 0.679619, acc: 0.562500]\n",
"8288: [D loss: 0.703524, acc: 0.535156] [A loss: 0.871854, acc: 0.144531]\n",
"8289: [D loss: 0.703302, acc: 0.515625] [A loss: 0.730403, acc: 0.468750]\n",
"8290: [D loss: 0.702622, acc: 0.509766] [A loss: 0.805810, acc: 0.265625]\n",
"8291: [D loss: 0.702875, acc: 0.492188] [A loss: 0.813977, acc: 0.238281]\n",
"8292: [D loss: 0.692497, acc: 0.542969] [A loss: 0.735232, acc: 0.371094]\n",
"8293: [D loss: 0.718753, acc: 0.488281] [A loss: 0.852164, acc: 0.132812]\n",
"8294: [D loss: 0.705690, acc: 0.511719] [A loss: 0.736391, acc: 0.386719]\n",
"8295: [D loss: 0.708592, acc: 0.507812] [A loss: 0.864348, acc: 0.128906]\n",
"8296: [D loss: 0.695425, acc: 0.541016] [A loss: 0.774206, acc: 0.320312]\n",
"8297: [D loss: 0.704689, acc: 0.500000] [A loss: 0.818303, acc: 0.214844]\n",
"8298: [D loss: 0.698516, acc: 0.484375] [A loss: 0.738609, acc: 0.421875]\n",
"8299: [D loss: 0.710606, acc: 0.548828] [A loss: 0.869120, acc: 0.156250]\n",
"8300: [D loss: 0.698159, acc: 0.507812] [A loss: 0.760553, acc: 0.367188]\n",
"8301: [D loss: 0.721846, acc: 0.468750] [A loss: 0.881902, acc: 0.113281]\n",
"8302: [D loss: 0.691382, acc: 0.539062] [A loss: 0.755684, acc: 0.363281]\n",
"8303: [D loss: 0.702724, acc: 0.511719] [A loss: 0.857144, acc: 0.175781]\n",
"8304: [D loss: 0.701656, acc: 0.519531] [A loss: 0.743639, acc: 0.406250]\n",
"8305: [D loss: 0.708637, acc: 0.517578] [A loss: 0.874706, acc: 0.148438]\n",
"8306: [D loss: 0.705567, acc: 0.476562] [A loss: 0.826500, acc: 0.218750]\n",
"8307: [D loss: 0.699866, acc: 0.529297] [A loss: 0.806711, acc: 0.257812]\n",
"8308: [D loss: 0.701506, acc: 0.513672] [A loss: 0.775955, acc: 0.320312]\n",
"8309: [D loss: 0.704831, acc: 0.509766] [A loss: 0.798880, acc: 0.238281]\n",
"8310: [D loss: 0.689837, acc: 0.533203] [A loss: 0.766812, acc: 0.359375]\n",
"8311: [D loss: 0.699243, acc: 0.507812] [A loss: 0.841694, acc: 0.214844]\n",
"8312: [D loss: 0.694930, acc: 0.509766] [A loss: 0.762940, acc: 0.296875]\n",
"8313: [D loss: 0.705115, acc: 0.525391] [A loss: 0.888582, acc: 0.128906]\n",
"8314: [D loss: 0.692295, acc: 0.570312] [A loss: 0.721722, acc: 0.421875]\n",
"8315: [D loss: 0.704484, acc: 0.517578] [A loss: 0.849732, acc: 0.183594]\n",
"8316: [D loss: 0.694307, acc: 0.560547] [A loss: 0.752101, acc: 0.367188]\n",
"8317: [D loss: 0.719640, acc: 0.500000] [A loss: 0.883325, acc: 0.125000]\n",
"8318: [D loss: 0.701910, acc: 0.507812] [A loss: 0.719028, acc: 0.425781]\n",
"8319: [D loss: 0.709987, acc: 0.513672] [A loss: 0.883115, acc: 0.132812]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8320: [D loss: 0.688800, acc: 0.519531] [A loss: 0.699202, acc: 0.527344]\n",
"8321: [D loss: 0.717209, acc: 0.500000] [A loss: 0.836498, acc: 0.171875]\n",
"8322: [D loss: 0.696609, acc: 0.542969] [A loss: 0.744414, acc: 0.402344]\n",
"8323: [D loss: 0.718057, acc: 0.480469] [A loss: 0.851026, acc: 0.183594]\n",
"8324: [D loss: 0.707499, acc: 0.500000] [A loss: 0.784297, acc: 0.304688]\n",
"8325: [D loss: 0.704889, acc: 0.535156] [A loss: 0.908003, acc: 0.046875]\n",
"8326: [D loss: 0.698425, acc: 0.513672] [A loss: 0.743490, acc: 0.382812]\n",
"8327: [D loss: 0.699398, acc: 0.511719] [A loss: 0.787900, acc: 0.300781]\n",
"8328: [D loss: 0.708724, acc: 0.500000] [A loss: 0.843815, acc: 0.179688]\n",
"8329: [D loss: 0.703157, acc: 0.507812] [A loss: 0.802318, acc: 0.238281]\n",
"8330: [D loss: 0.705454, acc: 0.542969] [A loss: 0.872819, acc: 0.148438]\n",
"8331: [D loss: 0.700185, acc: 0.539062] [A loss: 0.788159, acc: 0.300781]\n",
"8332: [D loss: 0.709987, acc: 0.509766] [A loss: 0.872721, acc: 0.152344]\n",
"8333: [D loss: 0.709494, acc: 0.480469] [A loss: 0.721497, acc: 0.460938]\n",
"8334: [D loss: 0.711380, acc: 0.484375] [A loss: 0.872627, acc: 0.132812]\n",
"8335: [D loss: 0.687683, acc: 0.546875] [A loss: 0.715020, acc: 0.480469]\n",
"8336: [D loss: 0.704945, acc: 0.519531] [A loss: 0.817448, acc: 0.238281]\n",
"8337: [D loss: 0.691539, acc: 0.550781] [A loss: 0.782906, acc: 0.289062]\n",
"8338: [D loss: 0.703149, acc: 0.498047] [A loss: 0.816735, acc: 0.238281]\n",
"8339: [D loss: 0.701130, acc: 0.492188] [A loss: 0.746140, acc: 0.382812]\n",
"8340: [D loss: 0.706459, acc: 0.531250] [A loss: 0.899844, acc: 0.136719]\n",
"8341: [D loss: 0.692903, acc: 0.542969] [A loss: 0.742226, acc: 0.390625]\n",
"8342: [D loss: 0.708953, acc: 0.486328] [A loss: 0.820046, acc: 0.207031]\n",
"8343: [D loss: 0.701560, acc: 0.517578] [A loss: 0.815140, acc: 0.210938]\n",
"8344: [D loss: 0.692894, acc: 0.556641] [A loss: 0.797835, acc: 0.261719]\n",
"8345: [D loss: 0.700693, acc: 0.533203] [A loss: 0.864489, acc: 0.171875]\n",
"8346: [D loss: 0.698449, acc: 0.533203] [A loss: 0.871687, acc: 0.203125]\n",
"8347: [D loss: 0.697924, acc: 0.511719] [A loss: 0.767582, acc: 0.332031]\n",
"8348: [D loss: 0.689203, acc: 0.537109] [A loss: 0.778297, acc: 0.296875]\n",
"8349: [D loss: 0.696127, acc: 0.542969] [A loss: 0.850721, acc: 0.171875]\n",
"8350: [D loss: 0.703229, acc: 0.523438] [A loss: 0.730288, acc: 0.437500]\n",
"8351: [D loss: 0.710541, acc: 0.501953] [A loss: 0.891479, acc: 0.105469]\n",
"8352: [D loss: 0.701647, acc: 0.492188] [A loss: 0.714360, acc: 0.476562]\n",
"8353: [D loss: 0.706147, acc: 0.535156] [A loss: 0.829658, acc: 0.203125]\n",
"8354: [D loss: 0.688153, acc: 0.566406] [A loss: 0.722663, acc: 0.433594]\n",
"8355: [D loss: 0.701431, acc: 0.548828] [A loss: 0.916000, acc: 0.089844]\n",
"8356: [D loss: 0.695356, acc: 0.523438] [A loss: 0.680770, acc: 0.554688]\n",
"8357: [D loss: 0.711340, acc: 0.513672] [A loss: 0.918069, acc: 0.105469]\n",
"8358: [D loss: 0.692317, acc: 0.566406] [A loss: 0.718530, acc: 0.441406]\n",
"8359: [D loss: 0.708787, acc: 0.511719] [A loss: 0.834570, acc: 0.207031]\n",
"8360: [D loss: 0.695519, acc: 0.507812] [A loss: 0.763091, acc: 0.378906]\n",
"8361: [D loss: 0.709850, acc: 0.523438] [A loss: 0.919784, acc: 0.152344]\n",
"8362: [D loss: 0.699117, acc: 0.513672] [A loss: 0.778930, acc: 0.308594]\n",
"8363: [D loss: 0.700489, acc: 0.488281] [A loss: 0.826146, acc: 0.199219]\n",
"8364: [D loss: 0.691288, acc: 0.523438] [A loss: 0.709651, acc: 0.464844]\n",
"8365: [D loss: 0.698308, acc: 0.535156] [A loss: 0.831425, acc: 0.214844]\n",
"8366: [D loss: 0.681966, acc: 0.558594] [A loss: 0.720041, acc: 0.425781]\n",
"8367: [D loss: 0.709562, acc: 0.496094] [A loss: 0.838390, acc: 0.203125]\n",
"8368: [D loss: 0.696412, acc: 0.537109] [A loss: 0.767850, acc: 0.363281]\n",
"8369: [D loss: 0.699388, acc: 0.517578] [A loss: 0.812744, acc: 0.234375]\n",
"8370: [D loss: 0.709155, acc: 0.478516] [A loss: 0.803075, acc: 0.277344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8371: [D loss: 0.696921, acc: 0.507812] [A loss: 0.843389, acc: 0.191406]\n",
"8372: [D loss: 0.698068, acc: 0.509766] [A loss: 0.761455, acc: 0.324219]\n",
"8373: [D loss: 0.718617, acc: 0.501953] [A loss: 0.917657, acc: 0.117188]\n",
"8374: [D loss: 0.693742, acc: 0.531250] [A loss: 0.676210, acc: 0.554688]\n",
"8375: [D loss: 0.705096, acc: 0.498047] [A loss: 0.911905, acc: 0.074219]\n",
"8376: [D loss: 0.691738, acc: 0.537109] [A loss: 0.679261, acc: 0.558594]\n",
"8377: [D loss: 0.719803, acc: 0.513672] [A loss: 0.879350, acc: 0.128906]\n",
"8378: [D loss: 0.709947, acc: 0.480469] [A loss: 0.767275, acc: 0.347656]\n",
"8379: [D loss: 0.706414, acc: 0.513672] [A loss: 0.775360, acc: 0.320312]\n",
"8380: [D loss: 0.695483, acc: 0.539062] [A loss: 0.785462, acc: 0.296875]\n",
"8381: [D loss: 0.702529, acc: 0.490234] [A loss: 0.807384, acc: 0.257812]\n",
"8382: [D loss: 0.696938, acc: 0.523438] [A loss: 0.735895, acc: 0.417969]\n",
"8383: [D loss: 0.699074, acc: 0.525391] [A loss: 0.816025, acc: 0.257812]\n",
"8384: [D loss: 0.699344, acc: 0.542969] [A loss: 0.759819, acc: 0.347656]\n",
"8385: [D loss: 0.709350, acc: 0.490234] [A loss: 0.787077, acc: 0.292969]\n",
"8386: [D loss: 0.697962, acc: 0.509766] [A loss: 0.809187, acc: 0.250000]\n",
"8387: [D loss: 0.707400, acc: 0.509766] [A loss: 0.747304, acc: 0.386719]\n",
"8388: [D loss: 0.689514, acc: 0.544922] [A loss: 0.796951, acc: 0.246094]\n",
"8389: [D loss: 0.700432, acc: 0.527344] [A loss: 0.806437, acc: 0.246094]\n",
"8390: [D loss: 0.697572, acc: 0.515625] [A loss: 0.770425, acc: 0.308594]\n",
"8391: [D loss: 0.696926, acc: 0.552734] [A loss: 0.825765, acc: 0.214844]\n",
"8392: [D loss: 0.714746, acc: 0.470703] [A loss: 0.776182, acc: 0.308594]\n",
"8393: [D loss: 0.702599, acc: 0.521484] [A loss: 0.796022, acc: 0.273438]\n",
"8394: [D loss: 0.707808, acc: 0.503906] [A loss: 0.758937, acc: 0.363281]\n",
"8395: [D loss: 0.703048, acc: 0.503906] [A loss: 0.890761, acc: 0.121094]\n",
"8396: [D loss: 0.697302, acc: 0.521484] [A loss: 0.764879, acc: 0.339844]\n",
"8397: [D loss: 0.716162, acc: 0.486328] [A loss: 0.888543, acc: 0.148438]\n",
"8398: [D loss: 0.690897, acc: 0.537109] [A loss: 0.708608, acc: 0.515625]\n",
"8399: [D loss: 0.708998, acc: 0.513672] [A loss: 0.919120, acc: 0.085938]\n",
"8400: [D loss: 0.686792, acc: 0.544922] [A loss: 0.674963, acc: 0.550781]\n",
"8401: [D loss: 0.717305, acc: 0.507812] [A loss: 0.930780, acc: 0.085938]\n",
"8402: [D loss: 0.698112, acc: 0.517578] [A loss: 0.751113, acc: 0.335938]\n",
"8403: [D loss: 0.697603, acc: 0.523438] [A loss: 0.813836, acc: 0.230469]\n",
"8404: [D loss: 0.698257, acc: 0.529297] [A loss: 0.798775, acc: 0.332031]\n",
"8405: [D loss: 0.695033, acc: 0.539062] [A loss: 0.779857, acc: 0.316406]\n",
"8406: [D loss: 0.703405, acc: 0.498047] [A loss: 0.857021, acc: 0.144531]\n",
"8407: [D loss: 0.697659, acc: 0.527344] [A loss: 0.750922, acc: 0.398438]\n",
"8408: [D loss: 0.701709, acc: 0.541016] [A loss: 0.841828, acc: 0.210938]\n",
"8409: [D loss: 0.701394, acc: 0.505859] [A loss: 0.771873, acc: 0.292969]\n",
"8410: [D loss: 0.699893, acc: 0.529297] [A loss: 0.813724, acc: 0.261719]\n",
"8411: [D loss: 0.697514, acc: 0.488281] [A loss: 0.802496, acc: 0.246094]\n",
"8412: [D loss: 0.695263, acc: 0.535156] [A loss: 0.812491, acc: 0.226562]\n",
"8413: [D loss: 0.698912, acc: 0.513672] [A loss: 0.765125, acc: 0.343750]\n",
"8414: [D loss: 0.694632, acc: 0.525391] [A loss: 0.840016, acc: 0.195312]\n",
"8415: [D loss: 0.683901, acc: 0.548828] [A loss: 0.746877, acc: 0.441406]\n",
"8416: [D loss: 0.702699, acc: 0.515625] [A loss: 0.865791, acc: 0.148438]\n",
"8417: [D loss: 0.700332, acc: 0.496094] [A loss: 0.732924, acc: 0.433594]\n",
"8418: [D loss: 0.702184, acc: 0.511719] [A loss: 0.833148, acc: 0.230469]\n",
"8419: [D loss: 0.700292, acc: 0.492188] [A loss: 0.735736, acc: 0.394531]\n",
"8420: [D loss: 0.724049, acc: 0.503906] [A loss: 0.896607, acc: 0.125000]\n",
"8421: [D loss: 0.696100, acc: 0.525391] [A loss: 0.706183, acc: 0.468750]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8422: [D loss: 0.710297, acc: 0.537109] [A loss: 0.873982, acc: 0.156250]\n",
"8423: [D loss: 0.690866, acc: 0.533203] [A loss: 0.742729, acc: 0.394531]\n",
"8424: [D loss: 0.720127, acc: 0.500000] [A loss: 0.869354, acc: 0.160156]\n",
"8425: [D loss: 0.691153, acc: 0.519531] [A loss: 0.772914, acc: 0.308594]\n",
"8426: [D loss: 0.711610, acc: 0.482422] [A loss: 0.806022, acc: 0.234375]\n",
"8427: [D loss: 0.699227, acc: 0.533203] [A loss: 0.784315, acc: 0.304688]\n",
"8428: [D loss: 0.708791, acc: 0.498047] [A loss: 0.799650, acc: 0.332031]\n",
"8429: [D loss: 0.704066, acc: 0.527344] [A loss: 0.875790, acc: 0.160156]\n",
"8430: [D loss: 0.695790, acc: 0.521484] [A loss: 0.735206, acc: 0.410156]\n",
"8431: [D loss: 0.702836, acc: 0.537109] [A loss: 0.877193, acc: 0.144531]\n",
"8432: [D loss: 0.691500, acc: 0.533203] [A loss: 0.726617, acc: 0.417969]\n",
"8433: [D loss: 0.707596, acc: 0.529297] [A loss: 0.937290, acc: 0.082031]\n",
"8434: [D loss: 0.696866, acc: 0.519531] [A loss: 0.701718, acc: 0.511719]\n",
"8435: [D loss: 0.713774, acc: 0.507812] [A loss: 0.859028, acc: 0.191406]\n",
"8436: [D loss: 0.711528, acc: 0.525391] [A loss: 0.764115, acc: 0.316406]\n",
"8437: [D loss: 0.703686, acc: 0.517578] [A loss: 0.823197, acc: 0.218750]\n",
"8438: [D loss: 0.690583, acc: 0.576172] [A loss: 0.813665, acc: 0.300781]\n",
"8439: [D loss: 0.699257, acc: 0.509766] [A loss: 0.814299, acc: 0.222656]\n",
"8440: [D loss: 0.700454, acc: 0.503906] [A loss: 0.747469, acc: 0.437500]\n",
"8441: [D loss: 0.691358, acc: 0.539062] [A loss: 0.855268, acc: 0.167969]\n",
"8442: [D loss: 0.699132, acc: 0.529297] [A loss: 0.737182, acc: 0.367188]\n",
"8443: [D loss: 0.716176, acc: 0.500000] [A loss: 0.840840, acc: 0.175781]\n",
"8444: [D loss: 0.695865, acc: 0.542969] [A loss: 0.813421, acc: 0.234375]\n",
"8445: [D loss: 0.715207, acc: 0.488281] [A loss: 0.842194, acc: 0.179688]\n",
"8446: [D loss: 0.697527, acc: 0.513672] [A loss: 0.727079, acc: 0.464844]\n",
"8447: [D loss: 0.712737, acc: 0.496094] [A loss: 0.866033, acc: 0.132812]\n",
"8448: [D loss: 0.696057, acc: 0.525391] [A loss: 0.736955, acc: 0.410156]\n",
"8449: [D loss: 0.695786, acc: 0.505859] [A loss: 0.883630, acc: 0.113281]\n",
"8450: [D loss: 0.697983, acc: 0.539062] [A loss: 0.712263, acc: 0.500000]\n",
"8451: [D loss: 0.700330, acc: 0.537109] [A loss: 0.885869, acc: 0.128906]\n",
"8452: [D loss: 0.698919, acc: 0.529297] [A loss: 0.813710, acc: 0.257812]\n",
"8453: [D loss: 0.711158, acc: 0.519531] [A loss: 0.846845, acc: 0.171875]\n",
"8454: [D loss: 0.697632, acc: 0.509766] [A loss: 0.757663, acc: 0.359375]\n",
"8455: [D loss: 0.694948, acc: 0.541016] [A loss: 0.823747, acc: 0.195312]\n",
"8456: [D loss: 0.695499, acc: 0.523438] [A loss: 0.731647, acc: 0.406250]\n",
"8457: [D loss: 0.700177, acc: 0.531250] [A loss: 0.855014, acc: 0.187500]\n",
"8458: [D loss: 0.686916, acc: 0.529297] [A loss: 0.733381, acc: 0.382812]\n",
"8459: [D loss: 0.718101, acc: 0.494141] [A loss: 0.882066, acc: 0.140625]\n",
"8460: [D loss: 0.692410, acc: 0.542969] [A loss: 0.747295, acc: 0.386719]\n",
"8461: [D loss: 0.711258, acc: 0.509766] [A loss: 0.797358, acc: 0.300781]\n",
"8462: [D loss: 0.699769, acc: 0.509766] [A loss: 0.827711, acc: 0.257812]\n",
"8463: [D loss: 0.709532, acc: 0.480469] [A loss: 0.768849, acc: 0.320312]\n",
"8464: [D loss: 0.694155, acc: 0.542969] [A loss: 0.785981, acc: 0.261719]\n",
"8465: [D loss: 0.708129, acc: 0.525391] [A loss: 0.788169, acc: 0.242188]\n",
"8466: [D loss: 0.702622, acc: 0.537109] [A loss: 0.801520, acc: 0.289062]\n",
"8467: [D loss: 0.716004, acc: 0.517578] [A loss: 0.905608, acc: 0.152344]\n",
"8468: [D loss: 0.691782, acc: 0.542969] [A loss: 0.766788, acc: 0.355469]\n",
"8469: [D loss: 0.706159, acc: 0.515625] [A loss: 0.859609, acc: 0.175781]\n",
"8470: [D loss: 0.691497, acc: 0.546875] [A loss: 0.756731, acc: 0.332031]\n",
"8471: [D loss: 0.713117, acc: 0.501953] [A loss: 0.888814, acc: 0.128906]\n",
"8472: [D loss: 0.706851, acc: 0.488281] [A loss: 0.742615, acc: 0.324219]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8473: [D loss: 0.706918, acc: 0.505859] [A loss: 0.841348, acc: 0.210938]\n",
"8474: [D loss: 0.705295, acc: 0.511719] [A loss: 0.705831, acc: 0.503906]\n",
"8475: [D loss: 0.706821, acc: 0.500000] [A loss: 0.853673, acc: 0.203125]\n",
"8476: [D loss: 0.698611, acc: 0.531250] [A loss: 0.873317, acc: 0.148438]\n",
"8477: [D loss: 0.694101, acc: 0.523438] [A loss: 0.786252, acc: 0.292969]\n",
"8478: [D loss: 0.698810, acc: 0.531250] [A loss: 0.815317, acc: 0.250000]\n",
"8479: [D loss: 0.705811, acc: 0.503906] [A loss: 0.922907, acc: 0.089844]\n",
"8480: [D loss: 0.707403, acc: 0.482422] [A loss: 0.692298, acc: 0.484375]\n",
"8481: [D loss: 0.707801, acc: 0.509766] [A loss: 0.871968, acc: 0.144531]\n",
"8482: [D loss: 0.704477, acc: 0.503906] [A loss: 0.735490, acc: 0.421875]\n",
"8483: [D loss: 0.711113, acc: 0.511719] [A loss: 0.845433, acc: 0.187500]\n",
"8484: [D loss: 0.700434, acc: 0.515625] [A loss: 0.749233, acc: 0.398438]\n",
"8485: [D loss: 0.705660, acc: 0.509766] [A loss: 0.781941, acc: 0.312500]\n",
"8486: [D loss: 0.702471, acc: 0.500000] [A loss: 0.791305, acc: 0.265625]\n",
"8487: [D loss: 0.695102, acc: 0.513672] [A loss: 0.853493, acc: 0.187500]\n",
"8488: [D loss: 0.701055, acc: 0.533203] [A loss: 0.795899, acc: 0.281250]\n",
"8489: [D loss: 0.698591, acc: 0.544922] [A loss: 0.822550, acc: 0.238281]\n",
"8490: [D loss: 0.708334, acc: 0.511719] [A loss: 0.925667, acc: 0.105469]\n",
"8491: [D loss: 0.702239, acc: 0.501953] [A loss: 0.795367, acc: 0.273438]\n",
"8492: [D loss: 0.718138, acc: 0.484375] [A loss: 0.827964, acc: 0.195312]\n",
"8493: [D loss: 0.679360, acc: 0.566406] [A loss: 0.778533, acc: 0.324219]\n",
"8494: [D loss: 0.700679, acc: 0.541016] [A loss: 0.877954, acc: 0.109375]\n",
"8495: [D loss: 0.700847, acc: 0.503906] [A loss: 0.738187, acc: 0.414062]\n",
"8496: [D loss: 0.711631, acc: 0.517578] [A loss: 0.907510, acc: 0.097656]\n",
"8497: [D loss: 0.699759, acc: 0.484375] [A loss: 0.716475, acc: 0.421875]\n",
"8498: [D loss: 0.716964, acc: 0.492188] [A loss: 0.906100, acc: 0.105469]\n",
"8499: [D loss: 0.698331, acc: 0.537109] [A loss: 0.705556, acc: 0.480469]\n",
"8500: [D loss: 0.715385, acc: 0.503906] [A loss: 0.995684, acc: 0.050781]\n",
"8501: [D loss: 0.701543, acc: 0.513672] [A loss: 0.695676, acc: 0.488281]\n",
"8502: [D loss: 0.714711, acc: 0.482422] [A loss: 0.852889, acc: 0.179688]\n",
"8503: [D loss: 0.696091, acc: 0.519531] [A loss: 0.707567, acc: 0.496094]\n",
"8504: [D loss: 0.709652, acc: 0.505859] [A loss: 0.825797, acc: 0.199219]\n",
"8505: [D loss: 0.706241, acc: 0.503906] [A loss: 0.751686, acc: 0.355469]\n",
"8506: [D loss: 0.699459, acc: 0.544922] [A loss: 0.800958, acc: 0.277344]\n",
"8507: [D loss: 0.692231, acc: 0.537109] [A loss: 0.761161, acc: 0.332031]\n",
"8508: [D loss: 0.696341, acc: 0.554688] [A loss: 0.796475, acc: 0.289062]\n",
"8509: [D loss: 0.686366, acc: 0.576172] [A loss: 0.764196, acc: 0.351562]\n",
"8510: [D loss: 0.697411, acc: 0.533203] [A loss: 0.773730, acc: 0.320312]\n",
"8511: [D loss: 0.708331, acc: 0.521484] [A loss: 0.827080, acc: 0.218750]\n",
"8512: [D loss: 0.691650, acc: 0.556641] [A loss: 0.799776, acc: 0.253906]\n",
"8513: [D loss: 0.698543, acc: 0.494141] [A loss: 0.806237, acc: 0.230469]\n",
"8514: [D loss: 0.693106, acc: 0.515625] [A loss: 0.782446, acc: 0.269531]\n",
"8515: [D loss: 0.704534, acc: 0.498047] [A loss: 0.782897, acc: 0.316406]\n",
"8516: [D loss: 0.696214, acc: 0.525391] [A loss: 0.874273, acc: 0.160156]\n",
"8517: [D loss: 0.682875, acc: 0.570312] [A loss: 0.744932, acc: 0.414062]\n",
"8518: [D loss: 0.708511, acc: 0.498047] [A loss: 0.876868, acc: 0.125000]\n",
"8519: [D loss: 0.686921, acc: 0.558594] [A loss: 0.714299, acc: 0.480469]\n",
"8520: [D loss: 0.722679, acc: 0.484375] [A loss: 0.830568, acc: 0.203125]\n",
"8521: [D loss: 0.698079, acc: 0.527344] [A loss: 0.695428, acc: 0.519531]\n",
"8522: [D loss: 0.707290, acc: 0.476562] [A loss: 0.838265, acc: 0.199219]\n",
"8523: [D loss: 0.696642, acc: 0.531250] [A loss: 0.818381, acc: 0.250000]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8524: [D loss: 0.705500, acc: 0.527344] [A loss: 0.782120, acc: 0.324219]\n",
"8525: [D loss: 0.706947, acc: 0.496094] [A loss: 0.803800, acc: 0.253906]\n",
"8526: [D loss: 0.689990, acc: 0.542969] [A loss: 0.767293, acc: 0.332031]\n",
"8527: [D loss: 0.711291, acc: 0.474609] [A loss: 0.838582, acc: 0.199219]\n",
"8528: [D loss: 0.699807, acc: 0.531250] [A loss: 0.722922, acc: 0.437500]\n",
"8529: [D loss: 0.718685, acc: 0.505859] [A loss: 0.864337, acc: 0.164062]\n",
"8530: [D loss: 0.689727, acc: 0.544922] [A loss: 0.683170, acc: 0.527344]\n",
"8531: [D loss: 0.712138, acc: 0.494141] [A loss: 0.880665, acc: 0.164062]\n",
"8532: [D loss: 0.704334, acc: 0.503906] [A loss: 0.703871, acc: 0.496094]\n",
"8533: [D loss: 0.708621, acc: 0.537109] [A loss: 0.853042, acc: 0.183594]\n",
"8534: [D loss: 0.684841, acc: 0.552734] [A loss: 0.718445, acc: 0.484375]\n",
"8535: [D loss: 0.711846, acc: 0.494141] [A loss: 0.844737, acc: 0.199219]\n",
"8536: [D loss: 0.712045, acc: 0.503906] [A loss: 0.748890, acc: 0.375000]\n",
"8537: [D loss: 0.697967, acc: 0.533203] [A loss: 0.818499, acc: 0.218750]\n",
"8538: [D loss: 0.691138, acc: 0.542969] [A loss: 0.772596, acc: 0.335938]\n",
"8539: [D loss: 0.707511, acc: 0.509766] [A loss: 0.833185, acc: 0.183594]\n",
"8540: [D loss: 0.692295, acc: 0.531250] [A loss: 0.754927, acc: 0.371094]\n",
"8541: [D loss: 0.693896, acc: 0.531250] [A loss: 0.867319, acc: 0.136719]\n",
"8542: [D loss: 0.706153, acc: 0.482422] [A loss: 0.764361, acc: 0.343750]\n",
"8543: [D loss: 0.714160, acc: 0.486328] [A loss: 0.850536, acc: 0.171875]\n",
"8544: [D loss: 0.688025, acc: 0.523438] [A loss: 0.727429, acc: 0.441406]\n",
"8545: [D loss: 0.695180, acc: 0.548828] [A loss: 0.868044, acc: 0.179688]\n",
"8546: [D loss: 0.688948, acc: 0.544922] [A loss: 0.709841, acc: 0.488281]\n",
"8547: [D loss: 0.706908, acc: 0.490234] [A loss: 0.835195, acc: 0.191406]\n",
"8548: [D loss: 0.713468, acc: 0.498047] [A loss: 0.730401, acc: 0.453125]\n",
"8549: [D loss: 0.695407, acc: 0.546875] [A loss: 0.841937, acc: 0.183594]\n",
"8550: [D loss: 0.692781, acc: 0.523438] [A loss: 0.738366, acc: 0.414062]\n",
"8551: [D loss: 0.702871, acc: 0.498047] [A loss: 0.874675, acc: 0.148438]\n",
"8552: [D loss: 0.687757, acc: 0.533203] [A loss: 0.699293, acc: 0.523438]\n",
"8553: [D loss: 0.729637, acc: 0.496094] [A loss: 0.949868, acc: 0.039062]\n",
"8554: [D loss: 0.691778, acc: 0.541016] [A loss: 0.691406, acc: 0.535156]\n",
"8555: [D loss: 0.706916, acc: 0.521484] [A loss: 0.792724, acc: 0.238281]\n",
"8556: [D loss: 0.695782, acc: 0.501953] [A loss: 0.704817, acc: 0.460938]\n",
"8557: [D loss: 0.700278, acc: 0.533203] [A loss: 0.825479, acc: 0.222656]\n",
"8558: [D loss: 0.709712, acc: 0.501953] [A loss: 0.783196, acc: 0.292969]\n",
"8559: [D loss: 0.714584, acc: 0.492188] [A loss: 0.789895, acc: 0.285156]\n",
"8560: [D loss: 0.701375, acc: 0.505859] [A loss: 0.806159, acc: 0.238281]\n",
"8561: [D loss: 0.702514, acc: 0.527344] [A loss: 0.747337, acc: 0.343750]\n",
"8562: [D loss: 0.690374, acc: 0.566406] [A loss: 0.801919, acc: 0.281250]\n",
"8563: [D loss: 0.697034, acc: 0.548828] [A loss: 0.826751, acc: 0.175781]\n",
"8564: [D loss: 0.692565, acc: 0.548828] [A loss: 0.736366, acc: 0.351562]\n",
"8565: [D loss: 0.695223, acc: 0.525391] [A loss: 0.805266, acc: 0.230469]\n",
"8566: [D loss: 0.703363, acc: 0.494141] [A loss: 0.734400, acc: 0.406250]\n",
"8567: [D loss: 0.696183, acc: 0.544922] [A loss: 0.882097, acc: 0.125000]\n",
"8568: [D loss: 0.700580, acc: 0.464844] [A loss: 0.760762, acc: 0.339844]\n",
"8569: [D loss: 0.698106, acc: 0.519531] [A loss: 0.874650, acc: 0.183594]\n",
"8570: [D loss: 0.700898, acc: 0.496094] [A loss: 0.832330, acc: 0.183594]\n",
"8571: [D loss: 0.712452, acc: 0.496094] [A loss: 0.778729, acc: 0.332031]\n",
"8572: [D loss: 0.708497, acc: 0.482422] [A loss: 0.807474, acc: 0.226562]\n",
"8573: [D loss: 0.714853, acc: 0.494141] [A loss: 0.833779, acc: 0.195312]\n",
"8574: [D loss: 0.705425, acc: 0.503906] [A loss: 0.727710, acc: 0.402344]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8575: [D loss: 0.708130, acc: 0.523438] [A loss: 0.851519, acc: 0.179688]\n",
"8576: [D loss: 0.697954, acc: 0.517578] [A loss: 0.749439, acc: 0.367188]\n",
"8577: [D loss: 0.690812, acc: 0.535156] [A loss: 0.788361, acc: 0.269531]\n",
"8578: [D loss: 0.679442, acc: 0.556641] [A loss: 0.742936, acc: 0.429688]\n",
"8579: [D loss: 0.697465, acc: 0.544922] [A loss: 0.892811, acc: 0.148438]\n",
"8580: [D loss: 0.695705, acc: 0.533203] [A loss: 0.712170, acc: 0.476562]\n",
"8581: [D loss: 0.715895, acc: 0.519531] [A loss: 0.936956, acc: 0.085938]\n",
"8582: [D loss: 0.688206, acc: 0.535156] [A loss: 0.710610, acc: 0.449219]\n",
"8583: [D loss: 0.713443, acc: 0.531250] [A loss: 0.880060, acc: 0.164062]\n",
"8584: [D loss: 0.708595, acc: 0.509766] [A loss: 0.764799, acc: 0.339844]\n",
"8585: [D loss: 0.706050, acc: 0.490234] [A loss: 0.860742, acc: 0.195312]\n",
"8586: [D loss: 0.692476, acc: 0.539062] [A loss: 0.772294, acc: 0.332031]\n",
"8587: [D loss: 0.693861, acc: 0.541016] [A loss: 0.828050, acc: 0.238281]\n",
"8588: [D loss: 0.683402, acc: 0.572266] [A loss: 0.743693, acc: 0.414062]\n",
"8589: [D loss: 0.712109, acc: 0.507812] [A loss: 0.848788, acc: 0.238281]\n",
"8590: [D loss: 0.705303, acc: 0.511719] [A loss: 0.869688, acc: 0.175781]\n",
"8591: [D loss: 0.703999, acc: 0.513672] [A loss: 0.740134, acc: 0.425781]\n",
"8592: [D loss: 0.716566, acc: 0.517578] [A loss: 0.888701, acc: 0.144531]\n",
"8593: [D loss: 0.696287, acc: 0.492188] [A loss: 0.757764, acc: 0.402344]\n",
"8594: [D loss: 0.712282, acc: 0.533203] [A loss: 0.884696, acc: 0.101562]\n",
"8595: [D loss: 0.700517, acc: 0.505859] [A loss: 0.735831, acc: 0.437500]\n",
"8596: [D loss: 0.727301, acc: 0.486328] [A loss: 0.837648, acc: 0.183594]\n",
"8597: [D loss: 0.698298, acc: 0.533203] [A loss: 0.749794, acc: 0.371094]\n",
"8598: [D loss: 0.716833, acc: 0.484375] [A loss: 0.848700, acc: 0.199219]\n",
"8599: [D loss: 0.706009, acc: 0.476562] [A loss: 0.758471, acc: 0.375000]\n",
"8600: [D loss: 0.696248, acc: 0.527344] [A loss: 0.821347, acc: 0.210938]\n",
"8601: [D loss: 0.697519, acc: 0.517578] [A loss: 0.777278, acc: 0.328125]\n",
"8602: [D loss: 0.696381, acc: 0.535156] [A loss: 0.804491, acc: 0.265625]\n",
"8603: [D loss: 0.706993, acc: 0.511719] [A loss: 0.741139, acc: 0.390625]\n",
"8604: [D loss: 0.696027, acc: 0.531250] [A loss: 0.810525, acc: 0.273438]\n",
"8605: [D loss: 0.701119, acc: 0.498047] [A loss: 0.806854, acc: 0.246094]\n",
"8606: [D loss: 0.709296, acc: 0.476562] [A loss: 0.780121, acc: 0.296875]\n",
"8607: [D loss: 0.697043, acc: 0.548828] [A loss: 0.752523, acc: 0.378906]\n",
"8608: [D loss: 0.713934, acc: 0.515625] [A loss: 0.860056, acc: 0.128906]\n",
"8609: [D loss: 0.684644, acc: 0.554688] [A loss: 0.710075, acc: 0.445312]\n",
"8610: [D loss: 0.715268, acc: 0.498047] [A loss: 0.861283, acc: 0.175781]\n",
"8611: [D loss: 0.703469, acc: 0.509766] [A loss: 0.723376, acc: 0.425781]\n",
"8612: [D loss: 0.703887, acc: 0.515625] [A loss: 0.839541, acc: 0.246094]\n",
"8613: [D loss: 0.691760, acc: 0.542969] [A loss: 0.752296, acc: 0.367188]\n",
"8614: [D loss: 0.703311, acc: 0.513672] [A loss: 0.831247, acc: 0.203125]\n",
"8615: [D loss: 0.686912, acc: 0.552734] [A loss: 0.713495, acc: 0.480469]\n",
"8616: [D loss: 0.707430, acc: 0.521484] [A loss: 0.886078, acc: 0.097656]\n",
"8617: [D loss: 0.690833, acc: 0.515625] [A loss: 0.709048, acc: 0.480469]\n",
"8618: [D loss: 0.717777, acc: 0.507812] [A loss: 0.965320, acc: 0.117188]\n",
"8619: [D loss: 0.709298, acc: 0.490234] [A loss: 0.764099, acc: 0.335938]\n",
"8620: [D loss: 0.695148, acc: 0.525391] [A loss: 0.778860, acc: 0.292969]\n",
"8621: [D loss: 0.702461, acc: 0.498047] [A loss: 0.771106, acc: 0.316406]\n",
"8622: [D loss: 0.709459, acc: 0.496094] [A loss: 0.791304, acc: 0.250000]\n",
"8623: [D loss: 0.702994, acc: 0.529297] [A loss: 0.764614, acc: 0.335938]\n",
"8624: [D loss: 0.710533, acc: 0.501953] [A loss: 0.792789, acc: 0.292969]\n",
"8625: [D loss: 0.690199, acc: 0.554688] [A loss: 0.778713, acc: 0.296875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8626: [D loss: 0.703812, acc: 0.515625] [A loss: 0.815850, acc: 0.234375]\n",
"8627: [D loss: 0.709408, acc: 0.480469] [A loss: 0.784368, acc: 0.265625]\n",
"8628: [D loss: 0.708181, acc: 0.462891] [A loss: 0.785670, acc: 0.261719]\n",
"8629: [D loss: 0.704130, acc: 0.500000] [A loss: 0.712910, acc: 0.449219]\n",
"8630: [D loss: 0.713710, acc: 0.496094] [A loss: 0.875104, acc: 0.160156]\n",
"8631: [D loss: 0.698303, acc: 0.537109] [A loss: 0.705524, acc: 0.476562]\n",
"8632: [D loss: 0.705692, acc: 0.519531] [A loss: 0.859836, acc: 0.160156]\n",
"8633: [D loss: 0.686763, acc: 0.550781] [A loss: 0.739967, acc: 0.378906]\n",
"8634: [D loss: 0.717455, acc: 0.523438] [A loss: 0.867066, acc: 0.167969]\n",
"8635: [D loss: 0.694792, acc: 0.513672] [A loss: 0.818170, acc: 0.273438]\n",
"8636: [D loss: 0.708637, acc: 0.492188] [A loss: 0.887228, acc: 0.167969]\n",
"8637: [D loss: 0.680919, acc: 0.546875] [A loss: 0.818492, acc: 0.261719]\n",
"8638: [D loss: 0.687808, acc: 0.546875] [A loss: 0.836599, acc: 0.226562]\n",
"8639: [D loss: 0.704678, acc: 0.533203] [A loss: 0.745025, acc: 0.390625]\n",
"8640: [D loss: 0.698128, acc: 0.535156] [A loss: 0.836342, acc: 0.210938]\n",
"8641: [D loss: 0.699875, acc: 0.531250] [A loss: 0.741951, acc: 0.390625]\n",
"8642: [D loss: 0.696121, acc: 0.533203] [A loss: 0.834230, acc: 0.218750]\n",
"8643: [D loss: 0.695497, acc: 0.515625] [A loss: 0.773814, acc: 0.312500]\n",
"8644: [D loss: 0.703557, acc: 0.523438] [A loss: 0.904064, acc: 0.105469]\n",
"8645: [D loss: 0.702169, acc: 0.523438] [A loss: 0.667956, acc: 0.585938]\n",
"8646: [D loss: 0.719203, acc: 0.515625] [A loss: 0.927807, acc: 0.093750]\n",
"8647: [D loss: 0.712260, acc: 0.464844] [A loss: 0.688450, acc: 0.496094]\n",
"8648: [D loss: 0.710637, acc: 0.498047] [A loss: 0.831976, acc: 0.175781]\n",
"8649: [D loss: 0.697154, acc: 0.511719] [A loss: 0.746665, acc: 0.375000]\n",
"8650: [D loss: 0.713554, acc: 0.486328] [A loss: 0.787033, acc: 0.261719]\n",
"8651: [D loss: 0.691591, acc: 0.541016] [A loss: 0.752375, acc: 0.328125]\n",
"8652: [D loss: 0.686119, acc: 0.533203] [A loss: 0.782308, acc: 0.285156]\n",
"8653: [D loss: 0.696392, acc: 0.527344] [A loss: 0.794849, acc: 0.246094]\n",
"8654: [D loss: 0.683575, acc: 0.544922] [A loss: 0.789494, acc: 0.312500]\n",
"8655: [D loss: 0.700327, acc: 0.523438] [A loss: 0.834162, acc: 0.191406]\n",
"8656: [D loss: 0.692452, acc: 0.558594] [A loss: 0.797033, acc: 0.289062]\n",
"8657: [D loss: 0.692266, acc: 0.542969] [A loss: 0.806760, acc: 0.261719]\n",
"8658: [D loss: 0.695576, acc: 0.535156] [A loss: 0.783853, acc: 0.312500]\n",
"8659: [D loss: 0.705587, acc: 0.517578] [A loss: 0.862408, acc: 0.164062]\n",
"8660: [D loss: 0.699308, acc: 0.505859] [A loss: 0.721798, acc: 0.453125]\n",
"8661: [D loss: 0.704618, acc: 0.527344] [A loss: 0.875222, acc: 0.152344]\n",
"8662: [D loss: 0.704878, acc: 0.498047] [A loss: 0.810608, acc: 0.285156]\n",
"8663: [D loss: 0.704637, acc: 0.523438] [A loss: 0.803188, acc: 0.253906]\n",
"8664: [D loss: 0.691411, acc: 0.564453] [A loss: 0.799631, acc: 0.281250]\n",
"8665: [D loss: 0.700727, acc: 0.505859] [A loss: 0.773028, acc: 0.320312]\n",
"8666: [D loss: 0.691916, acc: 0.511719] [A loss: 0.752546, acc: 0.371094]\n",
"8667: [D loss: 0.706089, acc: 0.507812] [A loss: 0.776728, acc: 0.308594]\n",
"8668: [D loss: 0.692365, acc: 0.525391] [A loss: 0.785637, acc: 0.308594]\n",
"8669: [D loss: 0.698634, acc: 0.517578] [A loss: 0.839341, acc: 0.160156]\n",
"8670: [D loss: 0.696624, acc: 0.517578] [A loss: 0.728683, acc: 0.460938]\n",
"8671: [D loss: 0.715173, acc: 0.488281] [A loss: 0.935423, acc: 0.097656]\n",
"8672: [D loss: 0.697208, acc: 0.511719] [A loss: 0.694499, acc: 0.472656]\n",
"8673: [D loss: 0.715730, acc: 0.511719] [A loss: 0.863672, acc: 0.179688]\n",
"8674: [D loss: 0.685326, acc: 0.562500] [A loss: 0.714501, acc: 0.476562]\n",
"8675: [D loss: 0.708594, acc: 0.486328] [A loss: 0.840625, acc: 0.207031]\n",
"8676: [D loss: 0.700568, acc: 0.531250] [A loss: 0.753087, acc: 0.367188]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8677: [D loss: 0.701350, acc: 0.523438] [A loss: 0.868500, acc: 0.167969]\n",
"8678: [D loss: 0.711194, acc: 0.462891] [A loss: 0.765076, acc: 0.316406]\n",
"8679: [D loss: 0.694660, acc: 0.546875] [A loss: 0.796702, acc: 0.296875]\n",
"8680: [D loss: 0.697396, acc: 0.517578] [A loss: 0.753916, acc: 0.394531]\n",
"8681: [D loss: 0.696694, acc: 0.527344] [A loss: 0.757491, acc: 0.343750]\n",
"8682: [D loss: 0.704539, acc: 0.505859] [A loss: 0.806031, acc: 0.234375]\n",
"8683: [D loss: 0.693889, acc: 0.527344] [A loss: 0.687645, acc: 0.546875]\n",
"8684: [D loss: 0.703651, acc: 0.544922] [A loss: 0.906254, acc: 0.125000]\n",
"8685: [D loss: 0.687073, acc: 0.521484] [A loss: 0.694827, acc: 0.523438]\n",
"8686: [D loss: 0.716589, acc: 0.503906] [A loss: 0.817845, acc: 0.214844]\n",
"8687: [D loss: 0.694901, acc: 0.537109] [A loss: 0.787705, acc: 0.289062]\n",
"8688: [D loss: 0.700795, acc: 0.517578] [A loss: 0.768635, acc: 0.324219]\n",
"8689: [D loss: 0.695299, acc: 0.535156] [A loss: 0.844952, acc: 0.230469]\n",
"8690: [D loss: 0.705070, acc: 0.494141] [A loss: 0.747598, acc: 0.371094]\n",
"8691: [D loss: 0.713123, acc: 0.509766] [A loss: 0.817372, acc: 0.222656]\n",
"8692: [D loss: 0.701519, acc: 0.505859] [A loss: 0.798945, acc: 0.296875]\n",
"8693: [D loss: 0.701549, acc: 0.519531] [A loss: 0.854825, acc: 0.191406]\n",
"8694: [D loss: 0.709211, acc: 0.468750] [A loss: 0.799370, acc: 0.257812]\n",
"8695: [D loss: 0.705587, acc: 0.513672] [A loss: 0.751373, acc: 0.375000]\n",
"8696: [D loss: 0.699917, acc: 0.546875] [A loss: 0.859609, acc: 0.179688]\n",
"8697: [D loss: 0.710774, acc: 0.500000] [A loss: 0.744523, acc: 0.375000]\n",
"8698: [D loss: 0.700622, acc: 0.521484] [A loss: 0.845620, acc: 0.207031]\n",
"8699: [D loss: 0.699179, acc: 0.541016] [A loss: 0.758710, acc: 0.355469]\n",
"8700: [D loss: 0.726240, acc: 0.490234] [A loss: 0.829375, acc: 0.195312]\n",
"8701: [D loss: 0.694451, acc: 0.519531] [A loss: 0.721780, acc: 0.417969]\n",
"8702: [D loss: 0.695911, acc: 0.527344] [A loss: 0.831238, acc: 0.187500]\n",
"8703: [D loss: 0.694153, acc: 0.501953] [A loss: 0.779548, acc: 0.292969]\n",
"8704: [D loss: 0.697290, acc: 0.523438] [A loss: 0.789108, acc: 0.285156]\n",
"8705: [D loss: 0.704919, acc: 0.501953] [A loss: 0.853629, acc: 0.160156]\n",
"8706: [D loss: 0.703388, acc: 0.490234] [A loss: 0.765133, acc: 0.351562]\n",
"8707: [D loss: 0.692766, acc: 0.539062] [A loss: 0.771923, acc: 0.339844]\n",
"8708: [D loss: 0.711752, acc: 0.500000] [A loss: 0.855309, acc: 0.160156]\n",
"8709: [D loss: 0.706987, acc: 0.484375] [A loss: 0.765476, acc: 0.355469]\n",
"8710: [D loss: 0.725277, acc: 0.480469] [A loss: 0.881288, acc: 0.144531]\n",
"8711: [D loss: 0.696332, acc: 0.529297] [A loss: 0.708815, acc: 0.476562]\n",
"8712: [D loss: 0.709526, acc: 0.488281] [A loss: 0.841673, acc: 0.191406]\n",
"8713: [D loss: 0.706814, acc: 0.521484] [A loss: 0.791504, acc: 0.269531]\n",
"8714: [D loss: 0.710989, acc: 0.515625] [A loss: 0.789222, acc: 0.308594]\n",
"8715: [D loss: 0.704348, acc: 0.507812] [A loss: 0.774700, acc: 0.289062]\n",
"8716: [D loss: 0.694551, acc: 0.533203] [A loss: 0.792573, acc: 0.277344]\n",
"8717: [D loss: 0.689358, acc: 0.566406] [A loss: 0.816150, acc: 0.238281]\n",
"8718: [D loss: 0.688483, acc: 0.548828] [A loss: 0.804895, acc: 0.269531]\n",
"8719: [D loss: 0.706010, acc: 0.517578] [A loss: 0.790160, acc: 0.289062]\n",
"8720: [D loss: 0.694709, acc: 0.537109] [A loss: 0.829726, acc: 0.210938]\n",
"8721: [D loss: 0.704662, acc: 0.501953] [A loss: 0.810553, acc: 0.261719]\n",
"8722: [D loss: 0.712289, acc: 0.505859] [A loss: 0.860980, acc: 0.160156]\n",
"8723: [D loss: 0.698398, acc: 0.523438] [A loss: 0.717194, acc: 0.480469]\n",
"8724: [D loss: 0.704986, acc: 0.509766] [A loss: 0.875345, acc: 0.125000]\n",
"8725: [D loss: 0.705751, acc: 0.486328] [A loss: 0.720024, acc: 0.476562]\n",
"8726: [D loss: 0.717396, acc: 0.531250] [A loss: 0.900078, acc: 0.105469]\n",
"8727: [D loss: 0.702826, acc: 0.537109] [A loss: 0.753657, acc: 0.351562]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8728: [D loss: 0.688056, acc: 0.521484] [A loss: 0.805590, acc: 0.265625]\n",
"8729: [D loss: 0.693400, acc: 0.541016] [A loss: 0.790887, acc: 0.289062]\n",
"8730: [D loss: 0.706923, acc: 0.496094] [A loss: 0.810900, acc: 0.281250]\n",
"8731: [D loss: 0.705094, acc: 0.500000] [A loss: 0.810451, acc: 0.230469]\n",
"8732: [D loss: 0.695341, acc: 0.513672] [A loss: 0.808997, acc: 0.265625]\n",
"8733: [D loss: 0.711488, acc: 0.494141] [A loss: 0.819500, acc: 0.238281]\n",
"8734: [D loss: 0.690822, acc: 0.550781] [A loss: 0.821842, acc: 0.261719]\n",
"8735: [D loss: 0.706948, acc: 0.517578] [A loss: 0.865543, acc: 0.128906]\n",
"8736: [D loss: 0.705572, acc: 0.496094] [A loss: 0.760125, acc: 0.386719]\n",
"8737: [D loss: 0.714932, acc: 0.494141] [A loss: 0.906809, acc: 0.113281]\n",
"8738: [D loss: 0.705192, acc: 0.494141] [A loss: 0.735700, acc: 0.410156]\n",
"8739: [D loss: 0.726015, acc: 0.498047] [A loss: 0.951670, acc: 0.054688]\n",
"8740: [D loss: 0.705406, acc: 0.492188] [A loss: 0.688812, acc: 0.562500]\n",
"8741: [D loss: 0.713677, acc: 0.494141] [A loss: 0.830033, acc: 0.218750]\n",
"8742: [D loss: 0.699484, acc: 0.521484] [A loss: 0.749968, acc: 0.371094]\n",
"8743: [D loss: 0.708347, acc: 0.517578] [A loss: 0.794937, acc: 0.281250]\n",
"8744: [D loss: 0.699678, acc: 0.519531] [A loss: 0.817612, acc: 0.210938]\n",
"8745: [D loss: 0.695261, acc: 0.529297] [A loss: 0.801841, acc: 0.246094]\n",
"8746: [D loss: 0.708192, acc: 0.521484] [A loss: 0.759711, acc: 0.300781]\n",
"8747: [D loss: 0.707639, acc: 0.503906] [A loss: 0.843387, acc: 0.156250]\n",
"8748: [D loss: 0.697882, acc: 0.527344] [A loss: 0.794929, acc: 0.289062]\n",
"8749: [D loss: 0.701665, acc: 0.490234] [A loss: 0.815947, acc: 0.207031]\n",
"8750: [D loss: 0.696991, acc: 0.517578] [A loss: 0.790132, acc: 0.269531]\n",
"8751: [D loss: 0.699515, acc: 0.509766] [A loss: 0.755053, acc: 0.367188]\n",
"8752: [D loss: 0.718351, acc: 0.478516] [A loss: 0.813542, acc: 0.214844]\n",
"8753: [D loss: 0.700992, acc: 0.519531] [A loss: 0.738811, acc: 0.398438]\n",
"8754: [D loss: 0.714444, acc: 0.498047] [A loss: 0.860554, acc: 0.156250]\n",
"8755: [D loss: 0.690401, acc: 0.560547] [A loss: 0.748215, acc: 0.394531]\n",
"8756: [D loss: 0.698823, acc: 0.525391] [A loss: 0.863535, acc: 0.175781]\n",
"8757: [D loss: 0.704262, acc: 0.509766] [A loss: 0.739043, acc: 0.441406]\n",
"8758: [D loss: 0.706826, acc: 0.517578] [A loss: 0.806755, acc: 0.207031]\n",
"8759: [D loss: 0.707402, acc: 0.503906] [A loss: 0.807354, acc: 0.226562]\n",
"8760: [D loss: 0.711066, acc: 0.503906] [A loss: 0.848045, acc: 0.195312]\n",
"8761: [D loss: 0.693971, acc: 0.523438] [A loss: 0.744688, acc: 0.375000]\n",
"8762: [D loss: 0.706493, acc: 0.513672] [A loss: 0.890633, acc: 0.121094]\n",
"8763: [D loss: 0.699117, acc: 0.511719] [A loss: 0.719499, acc: 0.429688]\n",
"8764: [D loss: 0.703449, acc: 0.505859] [A loss: 0.849668, acc: 0.156250]\n",
"8765: [D loss: 0.701550, acc: 0.527344] [A loss: 0.748998, acc: 0.378906]\n",
"8766: [D loss: 0.702473, acc: 0.509766] [A loss: 0.879428, acc: 0.140625]\n",
"8767: [D loss: 0.701598, acc: 0.515625] [A loss: 0.674619, acc: 0.589844]\n",
"8768: [D loss: 0.712727, acc: 0.501953] [A loss: 0.831386, acc: 0.207031]\n",
"8769: [D loss: 0.707160, acc: 0.505859] [A loss: 0.745003, acc: 0.378906]\n",
"8770: [D loss: 0.715982, acc: 0.511719] [A loss: 0.836613, acc: 0.183594]\n",
"8771: [D loss: 0.700816, acc: 0.517578] [A loss: 0.740396, acc: 0.378906]\n",
"8772: [D loss: 0.719228, acc: 0.511719] [A loss: 0.857169, acc: 0.183594]\n",
"8773: [D loss: 0.704436, acc: 0.498047] [A loss: 0.718084, acc: 0.429688]\n",
"8774: [D loss: 0.708938, acc: 0.513672] [A loss: 0.824119, acc: 0.230469]\n",
"8775: [D loss: 0.703052, acc: 0.486328] [A loss: 0.760149, acc: 0.351562]\n",
"8776: [D loss: 0.710137, acc: 0.498047] [A loss: 0.795304, acc: 0.253906]\n",
"8777: [D loss: 0.704854, acc: 0.519531] [A loss: 0.807262, acc: 0.265625]\n",
"8778: [D loss: 0.696672, acc: 0.546875] [A loss: 0.778938, acc: 0.281250]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8779: [D loss: 0.703103, acc: 0.521484] [A loss: 0.765606, acc: 0.300781]\n",
"8780: [D loss: 0.702099, acc: 0.527344] [A loss: 0.838876, acc: 0.207031]\n",
"8781: [D loss: 0.707648, acc: 0.486328] [A loss: 0.733697, acc: 0.468750]\n",
"8782: [D loss: 0.723347, acc: 0.484375] [A loss: 0.857078, acc: 0.121094]\n",
"8783: [D loss: 0.704031, acc: 0.515625] [A loss: 0.715491, acc: 0.472656]\n",
"8784: [D loss: 0.714994, acc: 0.503906] [A loss: 0.850145, acc: 0.140625]\n",
"8785: [D loss: 0.698089, acc: 0.511719] [A loss: 0.701789, acc: 0.519531]\n",
"8786: [D loss: 0.713989, acc: 0.521484] [A loss: 0.990452, acc: 0.050781]\n",
"8787: [D loss: 0.700932, acc: 0.507812] [A loss: 0.663485, acc: 0.597656]\n",
"8788: [D loss: 0.730364, acc: 0.519531] [A loss: 0.858940, acc: 0.140625]\n",
"8789: [D loss: 0.702042, acc: 0.517578] [A loss: 0.716762, acc: 0.468750]\n",
"8790: [D loss: 0.721265, acc: 0.470703] [A loss: 0.828313, acc: 0.199219]\n",
"8791: [D loss: 0.695411, acc: 0.515625] [A loss: 0.743807, acc: 0.375000]\n",
"8792: [D loss: 0.705106, acc: 0.503906] [A loss: 0.762783, acc: 0.339844]\n",
"8793: [D loss: 0.701175, acc: 0.529297] [A loss: 0.780120, acc: 0.292969]\n",
"8794: [D loss: 0.700809, acc: 0.486328] [A loss: 0.769087, acc: 0.335938]\n",
"8795: [D loss: 0.697957, acc: 0.539062] [A loss: 0.783263, acc: 0.289062]\n",
"8796: [D loss: 0.701000, acc: 0.517578] [A loss: 0.777627, acc: 0.277344]\n",
"8797: [D loss: 0.691474, acc: 0.531250] [A loss: 0.797077, acc: 0.277344]\n",
"8798: [D loss: 0.697478, acc: 0.541016] [A loss: 0.774885, acc: 0.296875]\n",
"8799: [D loss: 0.698799, acc: 0.505859] [A loss: 0.773354, acc: 0.292969]\n",
"8800: [D loss: 0.708868, acc: 0.503906] [A loss: 0.834878, acc: 0.218750]\n",
"8801: [D loss: 0.701793, acc: 0.500000] [A loss: 0.770858, acc: 0.328125]\n",
"8802: [D loss: 0.699079, acc: 0.531250] [A loss: 0.857148, acc: 0.136719]\n",
"8803: [D loss: 0.708278, acc: 0.501953] [A loss: 0.811229, acc: 0.230469]\n",
"8804: [D loss: 0.698962, acc: 0.521484] [A loss: 0.812742, acc: 0.195312]\n",
"8805: [D loss: 0.700163, acc: 0.507812] [A loss: 0.758463, acc: 0.347656]\n",
"8806: [D loss: 0.693998, acc: 0.529297] [A loss: 0.842026, acc: 0.171875]\n",
"8807: [D loss: 0.702841, acc: 0.505859] [A loss: 0.751480, acc: 0.402344]\n",
"8808: [D loss: 0.704825, acc: 0.519531] [A loss: 0.865610, acc: 0.140625]\n",
"8809: [D loss: 0.690048, acc: 0.517578] [A loss: 0.764002, acc: 0.304688]\n",
"8810: [D loss: 0.701360, acc: 0.503906] [A loss: 0.890690, acc: 0.105469]\n",
"8811: [D loss: 0.692991, acc: 0.542969] [A loss: 0.736294, acc: 0.398438]\n",
"8812: [D loss: 0.702604, acc: 0.531250] [A loss: 0.831075, acc: 0.195312]\n",
"8813: [D loss: 0.685762, acc: 0.542969] [A loss: 0.700879, acc: 0.500000]\n",
"8814: [D loss: 0.707716, acc: 0.507812] [A loss: 0.880754, acc: 0.164062]\n",
"8815: [D loss: 0.698429, acc: 0.513672] [A loss: 0.737177, acc: 0.414062]\n",
"8816: [D loss: 0.710323, acc: 0.521484] [A loss: 0.825477, acc: 0.214844]\n",
"8817: [D loss: 0.692393, acc: 0.527344] [A loss: 0.768324, acc: 0.351562]\n",
"8818: [D loss: 0.698440, acc: 0.509766] [A loss: 0.829756, acc: 0.230469]\n",
"8819: [D loss: 0.683227, acc: 0.542969] [A loss: 0.835471, acc: 0.242188]\n",
"8820: [D loss: 0.695154, acc: 0.546875] [A loss: 0.842994, acc: 0.175781]\n",
"8821: [D loss: 0.694601, acc: 0.535156] [A loss: 0.832121, acc: 0.179688]\n",
"8822: [D loss: 0.709856, acc: 0.490234] [A loss: 0.774625, acc: 0.308594]\n",
"8823: [D loss: 0.703565, acc: 0.525391] [A loss: 0.805756, acc: 0.265625]\n",
"8824: [D loss: 0.707857, acc: 0.494141] [A loss: 0.753906, acc: 0.332031]\n",
"8825: [D loss: 0.694371, acc: 0.548828] [A loss: 0.778471, acc: 0.316406]\n",
"8826: [D loss: 0.704151, acc: 0.515625] [A loss: 0.836178, acc: 0.179688]\n",
"8827: [D loss: 0.704153, acc: 0.513672] [A loss: 0.795071, acc: 0.250000]\n",
"8828: [D loss: 0.703997, acc: 0.505859] [A loss: 0.846410, acc: 0.179688]\n",
"8829: [D loss: 0.708393, acc: 0.484375] [A loss: 0.831906, acc: 0.191406]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8830: [D loss: 0.696703, acc: 0.509766] [A loss: 0.801870, acc: 0.253906]\n",
"8831: [D loss: 0.710213, acc: 0.496094] [A loss: 0.839025, acc: 0.218750]\n",
"8832: [D loss: 0.704110, acc: 0.500000] [A loss: 0.816546, acc: 0.207031]\n",
"8833: [D loss: 0.691579, acc: 0.513672] [A loss: 0.875319, acc: 0.144531]\n",
"8834: [D loss: 0.684488, acc: 0.548828] [A loss: 0.759870, acc: 0.312500]\n",
"8835: [D loss: 0.707349, acc: 0.498047] [A loss: 0.867514, acc: 0.171875]\n",
"8836: [D loss: 0.703698, acc: 0.480469] [A loss: 0.718784, acc: 0.464844]\n",
"8837: [D loss: 0.706167, acc: 0.501953] [A loss: 0.863513, acc: 0.175781]\n",
"8838: [D loss: 0.699442, acc: 0.513672] [A loss: 0.723162, acc: 0.414062]\n",
"8839: [D loss: 0.719607, acc: 0.490234] [A loss: 0.921093, acc: 0.093750]\n",
"8840: [D loss: 0.704969, acc: 0.505859] [A loss: 0.680221, acc: 0.566406]\n",
"8841: [D loss: 0.708980, acc: 0.500000] [A loss: 0.795005, acc: 0.269531]\n",
"8842: [D loss: 0.702109, acc: 0.523438] [A loss: 0.744788, acc: 0.378906]\n",
"8843: [D loss: 0.703111, acc: 0.527344] [A loss: 0.803223, acc: 0.246094]\n",
"8844: [D loss: 0.691950, acc: 0.537109] [A loss: 0.743770, acc: 0.386719]\n",
"8845: [D loss: 0.705816, acc: 0.496094] [A loss: 0.820829, acc: 0.187500]\n",
"8846: [D loss: 0.714670, acc: 0.488281] [A loss: 0.759846, acc: 0.343750]\n",
"8847: [D loss: 0.708919, acc: 0.509766] [A loss: 0.893366, acc: 0.121094]\n",
"8848: [D loss: 0.699789, acc: 0.470703] [A loss: 0.782782, acc: 0.285156]\n",
"8849: [D loss: 0.693713, acc: 0.517578] [A loss: 0.855329, acc: 0.144531]\n",
"8850: [D loss: 0.702536, acc: 0.507812] [A loss: 0.747459, acc: 0.375000]\n",
"8851: [D loss: 0.702283, acc: 0.517578] [A loss: 0.884295, acc: 0.113281]\n",
"8852: [D loss: 0.691255, acc: 0.527344] [A loss: 0.719347, acc: 0.460938]\n",
"8853: [D loss: 0.700865, acc: 0.533203] [A loss: 0.927594, acc: 0.074219]\n",
"8854: [D loss: 0.689660, acc: 0.550781] [A loss: 0.712889, acc: 0.425781]\n",
"8855: [D loss: 0.703500, acc: 0.523438] [A loss: 0.891193, acc: 0.144531]\n",
"8856: [D loss: 0.701481, acc: 0.498047] [A loss: 0.751298, acc: 0.382812]\n",
"8857: [D loss: 0.708638, acc: 0.494141] [A loss: 0.807989, acc: 0.226562]\n",
"8858: [D loss: 0.705370, acc: 0.503906] [A loss: 0.786359, acc: 0.261719]\n",
"8859: [D loss: 0.686415, acc: 0.560547] [A loss: 0.776310, acc: 0.343750]\n",
"8860: [D loss: 0.699600, acc: 0.492188] [A loss: 0.745152, acc: 0.394531]\n",
"8861: [D loss: 0.704524, acc: 0.505859] [A loss: 0.816407, acc: 0.226562]\n",
"8862: [D loss: 0.703099, acc: 0.511719] [A loss: 0.785840, acc: 0.281250]\n",
"8863: [D loss: 0.706357, acc: 0.511719] [A loss: 0.828112, acc: 0.199219]\n",
"8864: [D loss: 0.701867, acc: 0.523438] [A loss: 0.791882, acc: 0.289062]\n",
"8865: [D loss: 0.699932, acc: 0.521484] [A loss: 0.814435, acc: 0.207031]\n",
"8866: [D loss: 0.700152, acc: 0.484375] [A loss: 0.779324, acc: 0.316406]\n",
"8867: [D loss: 0.705256, acc: 0.501953] [A loss: 0.841090, acc: 0.171875]\n",
"8868: [D loss: 0.695756, acc: 0.529297] [A loss: 0.734822, acc: 0.417969]\n",
"8869: [D loss: 0.710584, acc: 0.505859] [A loss: 0.885402, acc: 0.136719]\n",
"8870: [D loss: 0.689151, acc: 0.541016] [A loss: 0.759879, acc: 0.335938]\n",
"8871: [D loss: 0.704625, acc: 0.517578] [A loss: 0.815568, acc: 0.238281]\n",
"8872: [D loss: 0.700844, acc: 0.494141] [A loss: 0.709292, acc: 0.468750]\n",
"8873: [D loss: 0.708984, acc: 0.503906] [A loss: 0.879661, acc: 0.125000]\n",
"8874: [D loss: 0.692796, acc: 0.521484] [A loss: 0.769807, acc: 0.320312]\n",
"8875: [D loss: 0.710767, acc: 0.511719] [A loss: 0.857725, acc: 0.152344]\n",
"8876: [D loss: 0.698158, acc: 0.501953] [A loss: 0.748191, acc: 0.367188]\n",
"8877: [D loss: 0.701887, acc: 0.527344] [A loss: 0.853884, acc: 0.152344]\n",
"8878: [D loss: 0.699911, acc: 0.548828] [A loss: 0.763814, acc: 0.367188]\n",
"8879: [D loss: 0.693669, acc: 0.527344] [A loss: 0.786279, acc: 0.296875]\n",
"8880: [D loss: 0.705378, acc: 0.498047] [A loss: 0.817732, acc: 0.253906]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8881: [D loss: 0.692323, acc: 0.529297] [A loss: 0.749478, acc: 0.367188]\n",
"8882: [D loss: 0.720965, acc: 0.494141] [A loss: 0.870754, acc: 0.144531]\n",
"8883: [D loss: 0.701395, acc: 0.500000] [A loss: 0.760881, acc: 0.332031]\n",
"8884: [D loss: 0.715416, acc: 0.470703] [A loss: 0.842861, acc: 0.183594]\n",
"8885: [D loss: 0.695972, acc: 0.529297] [A loss: 0.788655, acc: 0.296875]\n",
"8886: [D loss: 0.703377, acc: 0.515625] [A loss: 0.778226, acc: 0.292969]\n",
"8887: [D loss: 0.700797, acc: 0.517578] [A loss: 0.831438, acc: 0.179688]\n",
"8888: [D loss: 0.702793, acc: 0.507812] [A loss: 0.690039, acc: 0.535156]\n",
"8889: [D loss: 0.720736, acc: 0.519531] [A loss: 0.951042, acc: 0.054688]\n",
"8890: [D loss: 0.702042, acc: 0.511719] [A loss: 0.710982, acc: 0.476562]\n",
"8891: [D loss: 0.723013, acc: 0.488281] [A loss: 0.856433, acc: 0.171875]\n",
"8892: [D loss: 0.704365, acc: 0.496094] [A loss: 0.745860, acc: 0.359375]\n",
"8893: [D loss: 0.699464, acc: 0.531250] [A loss: 0.839997, acc: 0.210938]\n",
"8894: [D loss: 0.702706, acc: 0.494141] [A loss: 0.757485, acc: 0.328125]\n",
"8895: [D loss: 0.710185, acc: 0.507812] [A loss: 0.804626, acc: 0.273438]\n",
"8896: [D loss: 0.700207, acc: 0.525391] [A loss: 0.776255, acc: 0.312500]\n",
"8897: [D loss: 0.704132, acc: 0.482422] [A loss: 0.790461, acc: 0.312500]\n",
"8898: [D loss: 0.701780, acc: 0.513672] [A loss: 0.795362, acc: 0.285156]\n",
"8899: [D loss: 0.694479, acc: 0.513672] [A loss: 0.763468, acc: 0.367188]\n",
"8900: [D loss: 0.691193, acc: 0.539062] [A loss: 0.814356, acc: 0.269531]\n",
"8901: [D loss: 0.690769, acc: 0.541016] [A loss: 0.804720, acc: 0.261719]\n",
"8902: [D loss: 0.694296, acc: 0.537109] [A loss: 0.779563, acc: 0.296875]\n",
"8903: [D loss: 0.687604, acc: 0.537109] [A loss: 0.782458, acc: 0.320312]\n",
"8904: [D loss: 0.693204, acc: 0.509766] [A loss: 0.804280, acc: 0.269531]\n",
"8905: [D loss: 0.688775, acc: 0.531250] [A loss: 0.757779, acc: 0.347656]\n",
"8906: [D loss: 0.697283, acc: 0.527344] [A loss: 0.841830, acc: 0.183594]\n",
"8907: [D loss: 0.709277, acc: 0.500000] [A loss: 0.803778, acc: 0.265625]\n",
"8908: [D loss: 0.711813, acc: 0.453125] [A loss: 0.783810, acc: 0.292969]\n",
"8909: [D loss: 0.714468, acc: 0.507812] [A loss: 0.929817, acc: 0.070312]\n",
"8910: [D loss: 0.691510, acc: 0.519531] [A loss: 0.693747, acc: 0.535156]\n",
"8911: [D loss: 0.723208, acc: 0.500000] [A loss: 0.838654, acc: 0.179688]\n",
"8912: [D loss: 0.692875, acc: 0.566406] [A loss: 0.759160, acc: 0.363281]\n",
"8913: [D loss: 0.702257, acc: 0.513672] [A loss: 0.814365, acc: 0.238281]\n",
"8914: [D loss: 0.699799, acc: 0.537109] [A loss: 0.773396, acc: 0.347656]\n",
"8915: [D loss: 0.719589, acc: 0.460938] [A loss: 0.837363, acc: 0.187500]\n",
"8916: [D loss: 0.696819, acc: 0.525391] [A loss: 0.727982, acc: 0.449219]\n",
"8917: [D loss: 0.723326, acc: 0.464844] [A loss: 0.877860, acc: 0.160156]\n",
"8918: [D loss: 0.703664, acc: 0.486328] [A loss: 0.790337, acc: 0.261719]\n",
"8919: [D loss: 0.702543, acc: 0.496094] [A loss: 0.803018, acc: 0.257812]\n",
"8920: [D loss: 0.698069, acc: 0.521484] [A loss: 0.797148, acc: 0.296875]\n",
"8921: [D loss: 0.698943, acc: 0.535156] [A loss: 0.864243, acc: 0.171875]\n",
"8922: [D loss: 0.715077, acc: 0.468750] [A loss: 0.775362, acc: 0.312500]\n",
"8923: [D loss: 0.697347, acc: 0.523438] [A loss: 0.795189, acc: 0.289062]\n",
"8924: [D loss: 0.705871, acc: 0.509766] [A loss: 0.803410, acc: 0.234375]\n",
"8925: [D loss: 0.696195, acc: 0.513672] [A loss: 0.760510, acc: 0.359375]\n",
"8926: [D loss: 0.707247, acc: 0.519531] [A loss: 0.902001, acc: 0.113281]\n",
"8927: [D loss: 0.685834, acc: 0.556641] [A loss: 0.717836, acc: 0.441406]\n",
"8928: [D loss: 0.705151, acc: 0.537109] [A loss: 0.915923, acc: 0.113281]\n",
"8929: [D loss: 0.690251, acc: 0.535156] [A loss: 0.751816, acc: 0.343750]\n",
"8930: [D loss: 0.698343, acc: 0.539062] [A loss: 0.843743, acc: 0.207031]\n",
"8931: [D loss: 0.690214, acc: 0.554688] [A loss: 0.756455, acc: 0.347656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8932: [D loss: 0.719908, acc: 0.503906] [A loss: 0.844922, acc: 0.171875]\n",
"8933: [D loss: 0.705240, acc: 0.482422] [A loss: 0.759163, acc: 0.359375]\n",
"8934: [D loss: 0.706932, acc: 0.505859] [A loss: 0.860772, acc: 0.214844]\n",
"8935: [D loss: 0.700500, acc: 0.513672] [A loss: 0.768006, acc: 0.316406]\n",
"8936: [D loss: 0.702464, acc: 0.503906] [A loss: 0.839235, acc: 0.179688]\n",
"8937: [D loss: 0.688673, acc: 0.537109] [A loss: 0.735616, acc: 0.402344]\n",
"8938: [D loss: 0.699954, acc: 0.503906] [A loss: 0.822659, acc: 0.183594]\n",
"8939: [D loss: 0.712414, acc: 0.478516] [A loss: 0.766837, acc: 0.316406]\n",
"8940: [D loss: 0.702818, acc: 0.511719] [A loss: 0.859203, acc: 0.156250]\n",
"8941: [D loss: 0.697182, acc: 0.507812] [A loss: 0.725876, acc: 0.429688]\n",
"8942: [D loss: 0.711632, acc: 0.511719] [A loss: 0.886193, acc: 0.117188]\n",
"8943: [D loss: 0.693721, acc: 0.535156] [A loss: 0.719182, acc: 0.468750]\n",
"8944: [D loss: 0.709077, acc: 0.492188] [A loss: 0.849190, acc: 0.171875]\n",
"8945: [D loss: 0.702807, acc: 0.513672] [A loss: 0.718551, acc: 0.421875]\n",
"8946: [D loss: 0.703829, acc: 0.507812] [A loss: 0.822765, acc: 0.214844]\n",
"8947: [D loss: 0.701105, acc: 0.515625] [A loss: 0.774329, acc: 0.308594]\n",
"8948: [D loss: 0.716493, acc: 0.521484] [A loss: 0.825705, acc: 0.210938]\n",
"8949: [D loss: 0.705567, acc: 0.498047] [A loss: 0.803197, acc: 0.273438]\n",
"8950: [D loss: 0.701223, acc: 0.511719] [A loss: 0.819082, acc: 0.234375]\n",
"8951: [D loss: 0.700596, acc: 0.488281] [A loss: 0.748196, acc: 0.375000]\n",
"8952: [D loss: 0.715310, acc: 0.494141] [A loss: 0.847021, acc: 0.179688]\n",
"8953: [D loss: 0.698975, acc: 0.523438] [A loss: 0.735688, acc: 0.390625]\n",
"8954: [D loss: 0.707926, acc: 0.521484] [A loss: 0.882208, acc: 0.113281]\n",
"8955: [D loss: 0.704311, acc: 0.505859] [A loss: 0.729554, acc: 0.414062]\n",
"8956: [D loss: 0.706800, acc: 0.515625] [A loss: 0.839979, acc: 0.195312]\n",
"8957: [D loss: 0.699316, acc: 0.509766] [A loss: 0.794069, acc: 0.226562]\n",
"8958: [D loss: 0.704751, acc: 0.501953] [A loss: 0.830874, acc: 0.195312]\n",
"8959: [D loss: 0.681681, acc: 0.574219] [A loss: 0.740589, acc: 0.398438]\n",
"8960: [D loss: 0.703224, acc: 0.513672] [A loss: 0.856578, acc: 0.175781]\n",
"8961: [D loss: 0.703419, acc: 0.509766] [A loss: 0.763144, acc: 0.335938]\n",
"8962: [D loss: 0.702567, acc: 0.507812] [A loss: 0.781418, acc: 0.285156]\n",
"8963: [D loss: 0.711485, acc: 0.513672] [A loss: 0.752394, acc: 0.351562]\n",
"8964: [D loss: 0.700280, acc: 0.505859] [A loss: 0.765908, acc: 0.316406]\n",
"8965: [D loss: 0.704501, acc: 0.503906] [A loss: 0.806810, acc: 0.296875]\n",
"8966: [D loss: 0.702243, acc: 0.517578] [A loss: 0.766276, acc: 0.289062]\n",
"8967: [D loss: 0.711790, acc: 0.519531] [A loss: 0.886885, acc: 0.156250]\n",
"8968: [D loss: 0.694528, acc: 0.541016] [A loss: 0.697273, acc: 0.527344]\n",
"8969: [D loss: 0.714478, acc: 0.523438] [A loss: 0.904398, acc: 0.062500]\n",
"8970: [D loss: 0.697121, acc: 0.513672] [A loss: 0.673790, acc: 0.566406]\n",
"8971: [D loss: 0.705236, acc: 0.517578] [A loss: 0.843798, acc: 0.175781]\n",
"8972: [D loss: 0.708503, acc: 0.509766] [A loss: 0.804460, acc: 0.246094]\n",
"8973: [D loss: 0.699567, acc: 0.527344] [A loss: 0.760183, acc: 0.347656]\n",
"8974: [D loss: 0.699780, acc: 0.519531] [A loss: 0.805110, acc: 0.257812]\n",
"8975: [D loss: 0.690896, acc: 0.527344] [A loss: 0.756579, acc: 0.347656]\n",
"8976: [D loss: 0.701776, acc: 0.498047] [A loss: 0.820694, acc: 0.253906]\n",
"8977: [D loss: 0.703108, acc: 0.509766] [A loss: 0.711249, acc: 0.460938]\n",
"8978: [D loss: 0.710477, acc: 0.548828] [A loss: 0.880590, acc: 0.140625]\n",
"8979: [D loss: 0.686924, acc: 0.507812] [A loss: 0.714646, acc: 0.457031]\n",
"8980: [D loss: 0.712614, acc: 0.513672] [A loss: 0.816738, acc: 0.250000]\n",
"8981: [D loss: 0.699878, acc: 0.535156] [A loss: 0.761460, acc: 0.300781]\n",
"8982: [D loss: 0.711311, acc: 0.476562] [A loss: 0.831257, acc: 0.195312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"8983: [D loss: 0.702659, acc: 0.525391] [A loss: 0.819976, acc: 0.230469]\n",
"8984: [D loss: 0.707335, acc: 0.492188] [A loss: 0.771084, acc: 0.296875]\n",
"8985: [D loss: 0.712253, acc: 0.474609] [A loss: 0.789612, acc: 0.285156]\n",
"8986: [D loss: 0.699097, acc: 0.515625] [A loss: 0.753899, acc: 0.386719]\n",
"8987: [D loss: 0.711243, acc: 0.537109] [A loss: 0.872068, acc: 0.140625]\n",
"8988: [D loss: 0.701655, acc: 0.488281] [A loss: 0.761934, acc: 0.339844]\n",
"8989: [D loss: 0.700433, acc: 0.533203] [A loss: 0.767032, acc: 0.351562]\n",
"8990: [D loss: 0.706537, acc: 0.523438] [A loss: 0.856279, acc: 0.152344]\n",
"8991: [D loss: 0.697900, acc: 0.531250] [A loss: 0.779559, acc: 0.257812]\n",
"8992: [D loss: 0.694994, acc: 0.552734] [A loss: 0.760644, acc: 0.367188]\n",
"8993: [D loss: 0.704014, acc: 0.523438] [A loss: 0.875789, acc: 0.128906]\n",
"8994: [D loss: 0.698865, acc: 0.519531] [A loss: 0.718235, acc: 0.468750]\n",
"8995: [D loss: 0.718537, acc: 0.490234] [A loss: 0.920861, acc: 0.105469]\n",
"8996: [D loss: 0.709122, acc: 0.496094] [A loss: 0.839584, acc: 0.171875]\n",
"8997: [D loss: 0.700920, acc: 0.501953] [A loss: 0.810479, acc: 0.242188]\n",
"8998: [D loss: 0.689673, acc: 0.564453] [A loss: 0.774377, acc: 0.308594]\n",
"8999: [D loss: 0.700679, acc: 0.541016] [A loss: 0.857454, acc: 0.144531]\n",
"9000: [D loss: 0.688203, acc: 0.535156] [A loss: 0.716707, acc: 0.457031]\n",
"9001: [D loss: 0.702343, acc: 0.515625] [A loss: 0.874266, acc: 0.105469]\n",
"9002: [D loss: 0.697617, acc: 0.515625] [A loss: 0.705398, acc: 0.480469]\n",
"9003: [D loss: 0.719931, acc: 0.490234] [A loss: 0.891371, acc: 0.117188]\n",
"9004: [D loss: 0.697460, acc: 0.542969] [A loss: 0.753191, acc: 0.390625]\n",
"9005: [D loss: 0.721407, acc: 0.488281] [A loss: 0.793661, acc: 0.261719]\n",
"9006: [D loss: 0.699771, acc: 0.500000] [A loss: 0.820632, acc: 0.230469]\n",
"9007: [D loss: 0.691324, acc: 0.505859] [A loss: 0.835575, acc: 0.156250]\n",
"9008: [D loss: 0.709953, acc: 0.460938] [A loss: 0.779160, acc: 0.343750]\n",
"9009: [D loss: 0.700956, acc: 0.517578] [A loss: 0.809103, acc: 0.265625]\n",
"9010: [D loss: 0.706860, acc: 0.519531] [A loss: 0.832411, acc: 0.226562]\n",
"9011: [D loss: 0.699353, acc: 0.515625] [A loss: 0.773040, acc: 0.324219]\n",
"9012: [D loss: 0.695515, acc: 0.533203] [A loss: 0.806240, acc: 0.277344]\n",
"9013: [D loss: 0.705847, acc: 0.498047] [A loss: 0.788254, acc: 0.296875]\n",
"9014: [D loss: 0.697862, acc: 0.531250] [A loss: 0.777020, acc: 0.312500]\n",
"9015: [D loss: 0.700088, acc: 0.515625] [A loss: 0.830029, acc: 0.218750]\n",
"9016: [D loss: 0.708281, acc: 0.484375] [A loss: 0.810668, acc: 0.253906]\n",
"9017: [D loss: 0.696101, acc: 0.501953] [A loss: 0.793797, acc: 0.246094]\n",
"9018: [D loss: 0.700141, acc: 0.519531] [A loss: 0.784333, acc: 0.300781]\n",
"9019: [D loss: 0.704131, acc: 0.523438] [A loss: 0.807255, acc: 0.261719]\n",
"9020: [D loss: 0.699440, acc: 0.515625] [A loss: 0.762126, acc: 0.363281]\n",
"9021: [D loss: 0.695954, acc: 0.548828] [A loss: 0.802572, acc: 0.210938]\n",
"9022: [D loss: 0.704902, acc: 0.509766] [A loss: 0.763951, acc: 0.335938]\n",
"9023: [D loss: 0.704010, acc: 0.500000] [A loss: 0.797463, acc: 0.250000]\n",
"9024: [D loss: 0.704620, acc: 0.525391] [A loss: 0.784510, acc: 0.300781]\n",
"9025: [D loss: 0.701523, acc: 0.535156] [A loss: 0.845464, acc: 0.214844]\n",
"9026: [D loss: 0.698029, acc: 0.513672] [A loss: 0.743045, acc: 0.394531]\n",
"9027: [D loss: 0.707987, acc: 0.488281] [A loss: 0.927576, acc: 0.101562]\n",
"9028: [D loss: 0.688170, acc: 0.556641] [A loss: 0.687252, acc: 0.546875]\n",
"9029: [D loss: 0.702737, acc: 0.523438] [A loss: 0.889899, acc: 0.128906]\n",
"9030: [D loss: 0.700044, acc: 0.503906] [A loss: 0.708383, acc: 0.468750]\n",
"9031: [D loss: 0.704780, acc: 0.507812] [A loss: 0.895226, acc: 0.148438]\n",
"9032: [D loss: 0.696858, acc: 0.539062] [A loss: 0.834687, acc: 0.246094]\n",
"9033: [D loss: 0.701538, acc: 0.515625] [A loss: 0.839407, acc: 0.191406]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9034: [D loss: 0.715227, acc: 0.494141] [A loss: 0.895817, acc: 0.105469]\n",
"9035: [D loss: 0.698347, acc: 0.496094] [A loss: 0.731518, acc: 0.417969]\n",
"9036: [D loss: 0.708105, acc: 0.525391] [A loss: 0.867986, acc: 0.136719]\n",
"9037: [D loss: 0.699282, acc: 0.500000] [A loss: 0.734105, acc: 0.375000]\n",
"9038: [D loss: 0.708074, acc: 0.476562] [A loss: 0.855629, acc: 0.183594]\n",
"9039: [D loss: 0.700990, acc: 0.476562] [A loss: 0.763273, acc: 0.375000]\n",
"9040: [D loss: 0.703237, acc: 0.503906] [A loss: 0.868818, acc: 0.164062]\n",
"9041: [D loss: 0.692836, acc: 0.535156] [A loss: 0.738617, acc: 0.410156]\n",
"9042: [D loss: 0.700922, acc: 0.531250] [A loss: 0.878438, acc: 0.148438]\n",
"9043: [D loss: 0.699900, acc: 0.521484] [A loss: 0.724784, acc: 0.433594]\n",
"9044: [D loss: 0.699973, acc: 0.544922] [A loss: 0.841845, acc: 0.140625]\n",
"9045: [D loss: 0.699218, acc: 0.507812] [A loss: 0.754167, acc: 0.355469]\n",
"9046: [D loss: 0.723520, acc: 0.462891] [A loss: 0.826923, acc: 0.183594]\n",
"9047: [D loss: 0.699674, acc: 0.525391] [A loss: 0.766459, acc: 0.316406]\n",
"9048: [D loss: 0.705936, acc: 0.478516] [A loss: 0.803780, acc: 0.253906]\n",
"9049: [D loss: 0.694725, acc: 0.519531] [A loss: 0.778716, acc: 0.332031]\n",
"9050: [D loss: 0.701550, acc: 0.529297] [A loss: 0.807759, acc: 0.281250]\n",
"9051: [D loss: 0.693527, acc: 0.523438] [A loss: 0.769843, acc: 0.324219]\n",
"9052: [D loss: 0.702607, acc: 0.509766] [A loss: 0.784689, acc: 0.281250]\n",
"9053: [D loss: 0.693207, acc: 0.533203] [A loss: 0.838064, acc: 0.199219]\n",
"9054: [D loss: 0.716175, acc: 0.455078] [A loss: 0.841825, acc: 0.175781]\n",
"9055: [D loss: 0.711768, acc: 0.496094] [A loss: 0.816959, acc: 0.304688]\n",
"9056: [D loss: 0.700899, acc: 0.521484] [A loss: 0.761151, acc: 0.343750]\n",
"9057: [D loss: 0.700777, acc: 0.517578] [A loss: 0.829386, acc: 0.253906]\n",
"9058: [D loss: 0.687200, acc: 0.542969] [A loss: 0.744043, acc: 0.355469]\n",
"9059: [D loss: 0.707630, acc: 0.498047] [A loss: 0.870214, acc: 0.140625]\n",
"9060: [D loss: 0.706971, acc: 0.507812] [A loss: 0.740141, acc: 0.363281]\n",
"9061: [D loss: 0.709591, acc: 0.498047] [A loss: 0.871171, acc: 0.144531]\n",
"9062: [D loss: 0.709078, acc: 0.472656] [A loss: 0.755319, acc: 0.359375]\n",
"9063: [D loss: 0.700623, acc: 0.500000] [A loss: 0.805032, acc: 0.246094]\n",
"9064: [D loss: 0.691399, acc: 0.574219] [A loss: 0.805543, acc: 0.265625]\n",
"9065: [D loss: 0.696091, acc: 0.507812] [A loss: 0.796026, acc: 0.281250]\n",
"9066: [D loss: 0.700311, acc: 0.525391] [A loss: 0.878646, acc: 0.156250]\n",
"9067: [D loss: 0.703468, acc: 0.507812] [A loss: 0.845645, acc: 0.292969]\n",
"9068: [D loss: 0.705141, acc: 0.511719] [A loss: 0.898207, acc: 0.167969]\n",
"9069: [D loss: 0.694565, acc: 0.531250] [A loss: 0.728956, acc: 0.453125]\n",
"9070: [D loss: 0.730315, acc: 0.500000] [A loss: 0.975315, acc: 0.035156]\n",
"9071: [D loss: 0.701364, acc: 0.509766] [A loss: 0.711621, acc: 0.453125]\n",
"9072: [D loss: 0.719686, acc: 0.484375] [A loss: 0.935239, acc: 0.074219]\n",
"9073: [D loss: 0.686623, acc: 0.546875] [A loss: 0.733660, acc: 0.445312]\n",
"9074: [D loss: 0.721681, acc: 0.480469] [A loss: 0.849225, acc: 0.179688]\n",
"9075: [D loss: 0.698051, acc: 0.505859] [A loss: 0.742705, acc: 0.386719]\n",
"9076: [D loss: 0.701997, acc: 0.535156] [A loss: 0.757691, acc: 0.355469]\n",
"9077: [D loss: 0.715618, acc: 0.492188] [A loss: 0.827070, acc: 0.207031]\n",
"9078: [D loss: 0.698630, acc: 0.509766] [A loss: 0.741609, acc: 0.406250]\n",
"9079: [D loss: 0.698888, acc: 0.527344] [A loss: 0.890409, acc: 0.121094]\n",
"9080: [D loss: 0.692532, acc: 0.533203] [A loss: 0.702287, acc: 0.468750]\n",
"9081: [D loss: 0.724200, acc: 0.501953] [A loss: 0.947252, acc: 0.058594]\n",
"9082: [D loss: 0.694813, acc: 0.523438] [A loss: 0.671549, acc: 0.550781]\n",
"9083: [D loss: 0.715195, acc: 0.511719] [A loss: 0.854827, acc: 0.148438]\n",
"9084: [D loss: 0.704558, acc: 0.476562] [A loss: 0.783333, acc: 0.308594]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9085: [D loss: 0.708877, acc: 0.486328] [A loss: 0.804864, acc: 0.218750]\n",
"9086: [D loss: 0.690976, acc: 0.531250] [A loss: 0.745670, acc: 0.375000]\n",
"9087: [D loss: 0.708094, acc: 0.503906] [A loss: 0.809095, acc: 0.210938]\n",
"9088: [D loss: 0.703020, acc: 0.500000] [A loss: 0.749976, acc: 0.359375]\n",
"9089: [D loss: 0.697857, acc: 0.519531] [A loss: 0.826368, acc: 0.226562]\n",
"9090: [D loss: 0.708952, acc: 0.507812] [A loss: 0.806633, acc: 0.277344]\n",
"9091: [D loss: 0.717271, acc: 0.490234] [A loss: 0.828578, acc: 0.214844]\n",
"9092: [D loss: 0.692527, acc: 0.535156] [A loss: 0.802863, acc: 0.281250]\n",
"9093: [D loss: 0.717725, acc: 0.490234] [A loss: 0.843403, acc: 0.167969]\n",
"9094: [D loss: 0.709903, acc: 0.482422] [A loss: 0.799056, acc: 0.277344]\n",
"9095: [D loss: 0.702313, acc: 0.490234] [A loss: 0.804598, acc: 0.257812]\n",
"9096: [D loss: 0.695236, acc: 0.501953] [A loss: 0.804640, acc: 0.265625]\n",
"9097: [D loss: 0.686420, acc: 0.544922] [A loss: 0.791511, acc: 0.273438]\n",
"9098: [D loss: 0.706709, acc: 0.509766] [A loss: 0.826001, acc: 0.187500]\n",
"9099: [D loss: 0.703714, acc: 0.511719] [A loss: 0.731973, acc: 0.445312]\n",
"9100: [D loss: 0.703416, acc: 0.478516] [A loss: 0.830968, acc: 0.183594]\n",
"9101: [D loss: 0.702017, acc: 0.533203] [A loss: 0.768056, acc: 0.371094]\n",
"9102: [D loss: 0.706518, acc: 0.521484] [A loss: 0.856316, acc: 0.179688]\n",
"9103: [D loss: 0.712531, acc: 0.470703] [A loss: 0.740413, acc: 0.414062]\n",
"9104: [D loss: 0.721576, acc: 0.501953] [A loss: 0.954592, acc: 0.078125]\n",
"9105: [D loss: 0.709864, acc: 0.501953] [A loss: 0.704268, acc: 0.511719]\n",
"9106: [D loss: 0.713807, acc: 0.523438] [A loss: 0.850217, acc: 0.199219]\n",
"9107: [D loss: 0.696224, acc: 0.542969] [A loss: 0.772646, acc: 0.300781]\n",
"9108: [D loss: 0.705524, acc: 0.517578] [A loss: 0.778250, acc: 0.312500]\n",
"9109: [D loss: 0.713700, acc: 0.492188] [A loss: 0.845247, acc: 0.207031]\n",
"9110: [D loss: 0.703613, acc: 0.494141] [A loss: 0.729626, acc: 0.429688]\n",
"9111: [D loss: 0.706171, acc: 0.492188] [A loss: 0.810357, acc: 0.242188]\n",
"9112: [D loss: 0.680511, acc: 0.574219] [A loss: 0.724771, acc: 0.433594]\n",
"9113: [D loss: 0.713848, acc: 0.509766] [A loss: 0.914500, acc: 0.101562]\n",
"9114: [D loss: 0.697669, acc: 0.527344] [A loss: 0.710576, acc: 0.472656]\n",
"9115: [D loss: 0.708857, acc: 0.525391] [A loss: 0.829772, acc: 0.226562]\n",
"9116: [D loss: 0.702228, acc: 0.521484] [A loss: 0.756415, acc: 0.363281]\n",
"9117: [D loss: 0.699495, acc: 0.525391] [A loss: 0.878243, acc: 0.160156]\n",
"9118: [D loss: 0.692192, acc: 0.537109] [A loss: 0.705623, acc: 0.488281]\n",
"9119: [D loss: 0.713211, acc: 0.509766] [A loss: 0.891145, acc: 0.113281]\n",
"9120: [D loss: 0.693702, acc: 0.533203] [A loss: 0.759699, acc: 0.359375]\n",
"9121: [D loss: 0.708050, acc: 0.494141] [A loss: 0.864985, acc: 0.195312]\n",
"9122: [D loss: 0.692849, acc: 0.546875] [A loss: 0.727480, acc: 0.457031]\n",
"9123: [D loss: 0.729748, acc: 0.486328] [A loss: 0.863495, acc: 0.175781]\n",
"9124: [D loss: 0.698653, acc: 0.515625] [A loss: 0.751433, acc: 0.347656]\n",
"9125: [D loss: 0.706678, acc: 0.501953] [A loss: 0.799403, acc: 0.269531]\n",
"9126: [D loss: 0.691776, acc: 0.527344] [A loss: 0.752802, acc: 0.347656]\n",
"9127: [D loss: 0.705152, acc: 0.537109] [A loss: 0.800924, acc: 0.214844]\n",
"9128: [D loss: 0.694836, acc: 0.535156] [A loss: 0.734803, acc: 0.421875]\n",
"9129: [D loss: 0.715043, acc: 0.464844] [A loss: 0.834224, acc: 0.210938]\n",
"9130: [D loss: 0.702140, acc: 0.505859] [A loss: 0.739059, acc: 0.390625]\n",
"9131: [D loss: 0.710764, acc: 0.509766] [A loss: 0.873489, acc: 0.152344]\n",
"9132: [D loss: 0.692262, acc: 0.517578] [A loss: 0.696834, acc: 0.535156]\n",
"9133: [D loss: 0.718971, acc: 0.488281] [A loss: 0.909588, acc: 0.132812]\n",
"9134: [D loss: 0.698869, acc: 0.537109] [A loss: 0.669312, acc: 0.589844]\n",
"9135: [D loss: 0.718051, acc: 0.492188] [A loss: 0.899896, acc: 0.109375]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9136: [D loss: 0.695074, acc: 0.529297] [A loss: 0.685444, acc: 0.515625]\n",
"9137: [D loss: 0.700337, acc: 0.533203] [A loss: 0.846375, acc: 0.160156]\n",
"9138: [D loss: 0.710776, acc: 0.468750] [A loss: 0.760573, acc: 0.328125]\n",
"9139: [D loss: 0.698658, acc: 0.525391] [A loss: 0.757278, acc: 0.382812]\n",
"9140: [D loss: 0.700626, acc: 0.521484] [A loss: 0.797726, acc: 0.281250]\n",
"9141: [D loss: 0.705177, acc: 0.507812] [A loss: 0.749125, acc: 0.425781]\n",
"9142: [D loss: 0.707574, acc: 0.525391] [A loss: 0.801739, acc: 0.257812]\n",
"9143: [D loss: 0.709659, acc: 0.484375] [A loss: 0.756354, acc: 0.363281]\n",
"9144: [D loss: 0.702074, acc: 0.519531] [A loss: 0.808066, acc: 0.265625]\n",
"9145: [D loss: 0.708177, acc: 0.511719] [A loss: 0.750349, acc: 0.398438]\n",
"9146: [D loss: 0.712316, acc: 0.500000] [A loss: 0.830481, acc: 0.230469]\n",
"9147: [D loss: 0.702309, acc: 0.496094] [A loss: 0.716343, acc: 0.464844]\n",
"9148: [D loss: 0.716813, acc: 0.500000] [A loss: 0.842066, acc: 0.179688]\n",
"9149: [D loss: 0.696243, acc: 0.507812] [A loss: 0.754170, acc: 0.367188]\n",
"9150: [D loss: 0.704640, acc: 0.523438] [A loss: 0.820984, acc: 0.246094]\n",
"9151: [D loss: 0.708692, acc: 0.480469] [A loss: 0.757016, acc: 0.347656]\n",
"9152: [D loss: 0.703314, acc: 0.529297] [A loss: 0.853632, acc: 0.195312]\n",
"9153: [D loss: 0.704995, acc: 0.498047] [A loss: 0.767984, acc: 0.308594]\n",
"9154: [D loss: 0.712696, acc: 0.503906] [A loss: 0.910268, acc: 0.144531]\n",
"9155: [D loss: 0.696999, acc: 0.533203] [A loss: 0.676239, acc: 0.566406]\n",
"9156: [D loss: 0.715736, acc: 0.525391] [A loss: 0.923140, acc: 0.085938]\n",
"9157: [D loss: 0.706222, acc: 0.486328] [A loss: 0.696198, acc: 0.488281]\n",
"9158: [D loss: 0.718882, acc: 0.486328] [A loss: 0.877655, acc: 0.156250]\n",
"9159: [D loss: 0.704661, acc: 0.498047] [A loss: 0.733522, acc: 0.406250]\n",
"9160: [D loss: 0.717713, acc: 0.503906] [A loss: 0.843726, acc: 0.148438]\n",
"9161: [D loss: 0.699383, acc: 0.507812] [A loss: 0.771657, acc: 0.316406]\n",
"9162: [D loss: 0.701773, acc: 0.548828] [A loss: 0.871623, acc: 0.148438]\n",
"9163: [D loss: 0.708988, acc: 0.494141] [A loss: 0.718925, acc: 0.441406]\n",
"9164: [D loss: 0.703097, acc: 0.523438] [A loss: 0.880339, acc: 0.132812]\n",
"9165: [D loss: 0.705500, acc: 0.494141] [A loss: 0.764633, acc: 0.296875]\n",
"9166: [D loss: 0.698883, acc: 0.537109] [A loss: 0.786529, acc: 0.285156]\n",
"9167: [D loss: 0.697036, acc: 0.517578] [A loss: 0.815390, acc: 0.234375]\n",
"9168: [D loss: 0.702683, acc: 0.498047] [A loss: 0.779573, acc: 0.316406]\n",
"9169: [D loss: 0.710132, acc: 0.503906] [A loss: 0.808384, acc: 0.234375]\n",
"9170: [D loss: 0.705311, acc: 0.496094] [A loss: 0.764485, acc: 0.339844]\n",
"9171: [D loss: 0.718628, acc: 0.484375] [A loss: 0.832127, acc: 0.164062]\n",
"9172: [D loss: 0.704316, acc: 0.501953] [A loss: 0.795496, acc: 0.265625]\n",
"9173: [D loss: 0.694878, acc: 0.537109] [A loss: 0.818027, acc: 0.199219]\n",
"9174: [D loss: 0.700457, acc: 0.509766] [A loss: 0.825826, acc: 0.175781]\n",
"9175: [D loss: 0.696161, acc: 0.535156] [A loss: 0.750455, acc: 0.371094]\n",
"9176: [D loss: 0.709291, acc: 0.482422] [A loss: 0.872315, acc: 0.160156]\n",
"9177: [D loss: 0.689172, acc: 0.550781] [A loss: 0.699735, acc: 0.507812]\n",
"9178: [D loss: 0.718912, acc: 0.476562] [A loss: 0.840158, acc: 0.175781]\n",
"9179: [D loss: 0.708206, acc: 0.449219] [A loss: 0.748966, acc: 0.363281]\n",
"9180: [D loss: 0.701510, acc: 0.515625] [A loss: 0.786671, acc: 0.281250]\n",
"9181: [D loss: 0.706913, acc: 0.498047] [A loss: 0.817548, acc: 0.207031]\n",
"9182: [D loss: 0.695769, acc: 0.535156] [A loss: 0.776792, acc: 0.335938]\n",
"9183: [D loss: 0.714412, acc: 0.478516] [A loss: 0.833634, acc: 0.187500]\n",
"9184: [D loss: 0.701279, acc: 0.513672] [A loss: 0.763272, acc: 0.343750]\n",
"9185: [D loss: 0.707818, acc: 0.500000] [A loss: 0.818526, acc: 0.207031]\n",
"9186: [D loss: 0.685275, acc: 0.544922] [A loss: 0.741987, acc: 0.394531]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9187: [D loss: 0.709319, acc: 0.500000] [A loss: 0.996133, acc: 0.035156]\n",
"9188: [D loss: 0.703393, acc: 0.511719] [A loss: 0.672217, acc: 0.570312]\n",
"9189: [D loss: 0.732652, acc: 0.496094] [A loss: 0.909359, acc: 0.089844]\n",
"9190: [D loss: 0.698117, acc: 0.507812] [A loss: 0.701254, acc: 0.519531]\n",
"9191: [D loss: 0.697328, acc: 0.517578] [A loss: 0.813653, acc: 0.226562]\n",
"9192: [D loss: 0.709185, acc: 0.476562] [A loss: 0.706828, acc: 0.488281]\n",
"9193: [D loss: 0.709557, acc: 0.490234] [A loss: 0.811440, acc: 0.222656]\n",
"9194: [D loss: 0.699255, acc: 0.503906] [A loss: 0.714535, acc: 0.476562]\n",
"9195: [D loss: 0.709369, acc: 0.498047] [A loss: 0.814854, acc: 0.222656]\n",
"9196: [D loss: 0.703246, acc: 0.525391] [A loss: 0.749722, acc: 0.367188]\n",
"9197: [D loss: 0.702962, acc: 0.523438] [A loss: 0.863186, acc: 0.128906]\n",
"9198: [D loss: 0.703924, acc: 0.486328] [A loss: 0.689503, acc: 0.515625]\n",
"9199: [D loss: 0.710829, acc: 0.521484] [A loss: 0.872326, acc: 0.117188]\n",
"9200: [D loss: 0.699777, acc: 0.527344] [A loss: 0.696676, acc: 0.472656]\n",
"9201: [D loss: 0.715849, acc: 0.492188] [A loss: 0.825430, acc: 0.203125]\n",
"9202: [D loss: 0.698416, acc: 0.533203] [A loss: 0.741213, acc: 0.375000]\n",
"9203: [D loss: 0.716593, acc: 0.498047] [A loss: 0.789747, acc: 0.257812]\n",
"9204: [D loss: 0.702754, acc: 0.517578] [A loss: 0.733433, acc: 0.398438]\n",
"9205: [D loss: 0.697718, acc: 0.523438] [A loss: 0.807181, acc: 0.203125]\n",
"9206: [D loss: 0.701113, acc: 0.513672] [A loss: 0.775780, acc: 0.289062]\n",
"9207: [D loss: 0.715106, acc: 0.501953] [A loss: 0.844087, acc: 0.183594]\n",
"9208: [D loss: 0.707426, acc: 0.498047] [A loss: 0.773678, acc: 0.312500]\n",
"9209: [D loss: 0.706961, acc: 0.513672] [A loss: 0.867232, acc: 0.148438]\n",
"9210: [D loss: 0.687508, acc: 0.566406] [A loss: 0.739061, acc: 0.386719]\n",
"9211: [D loss: 0.712527, acc: 0.494141] [A loss: 0.870519, acc: 0.132812]\n",
"9212: [D loss: 0.706902, acc: 0.484375] [A loss: 0.751448, acc: 0.363281]\n",
"9213: [D loss: 0.706436, acc: 0.507812] [A loss: 0.887924, acc: 0.097656]\n",
"9214: [D loss: 0.699863, acc: 0.513672] [A loss: 0.699849, acc: 0.472656]\n",
"9215: [D loss: 0.706462, acc: 0.523438] [A loss: 0.818645, acc: 0.214844]\n",
"9216: [D loss: 0.688605, acc: 0.521484] [A loss: 0.766295, acc: 0.296875]\n",
"9217: [D loss: 0.699143, acc: 0.539062] [A loss: 0.821001, acc: 0.218750]\n",
"9218: [D loss: 0.695682, acc: 0.525391] [A loss: 0.739078, acc: 0.402344]\n",
"9219: [D loss: 0.711526, acc: 0.507812] [A loss: 0.775485, acc: 0.316406]\n",
"9220: [D loss: 0.689233, acc: 0.544922] [A loss: 0.766834, acc: 0.324219]\n",
"9221: [D loss: 0.685962, acc: 0.539062] [A loss: 0.797316, acc: 0.320312]\n",
"9222: [D loss: 0.706519, acc: 0.480469] [A loss: 0.808501, acc: 0.230469]\n",
"9223: [D loss: 0.706683, acc: 0.501953] [A loss: 0.747106, acc: 0.382812]\n",
"9224: [D loss: 0.699417, acc: 0.513672] [A loss: 0.822351, acc: 0.242188]\n",
"9225: [D loss: 0.704895, acc: 0.529297] [A loss: 0.772468, acc: 0.339844]\n",
"9226: [D loss: 0.703844, acc: 0.505859] [A loss: 0.741100, acc: 0.398438]\n",
"9227: [D loss: 0.711029, acc: 0.511719] [A loss: 0.825853, acc: 0.191406]\n",
"9228: [D loss: 0.697942, acc: 0.509766] [A loss: 0.762937, acc: 0.320312]\n",
"9229: [D loss: 0.697295, acc: 0.496094] [A loss: 0.817517, acc: 0.203125]\n",
"9230: [D loss: 0.692838, acc: 0.539062] [A loss: 0.789312, acc: 0.285156]\n",
"9231: [D loss: 0.700467, acc: 0.517578] [A loss: 0.885812, acc: 0.164062]\n",
"9232: [D loss: 0.695546, acc: 0.517578] [A loss: 0.726521, acc: 0.457031]\n",
"9233: [D loss: 0.707181, acc: 0.527344] [A loss: 0.897745, acc: 0.121094]\n",
"9234: [D loss: 0.698564, acc: 0.509766] [A loss: 0.694656, acc: 0.527344]\n",
"9235: [D loss: 0.710931, acc: 0.517578] [A loss: 0.823866, acc: 0.207031]\n",
"9236: [D loss: 0.702589, acc: 0.474609] [A loss: 0.788627, acc: 0.316406]\n",
"9237: [D loss: 0.701384, acc: 0.503906] [A loss: 0.768562, acc: 0.308594]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9238: [D loss: 0.699367, acc: 0.535156] [A loss: 0.817156, acc: 0.261719]\n",
"9239: [D loss: 0.706994, acc: 0.509766] [A loss: 0.809062, acc: 0.218750]\n",
"9240: [D loss: 0.698642, acc: 0.505859] [A loss: 0.790433, acc: 0.273438]\n",
"9241: [D loss: 0.703730, acc: 0.509766] [A loss: 0.777558, acc: 0.324219]\n",
"9242: [D loss: 0.702042, acc: 0.521484] [A loss: 0.867599, acc: 0.191406]\n",
"9243: [D loss: 0.684460, acc: 0.560547] [A loss: 0.723338, acc: 0.496094]\n",
"9244: [D loss: 0.716284, acc: 0.515625] [A loss: 0.970787, acc: 0.074219]\n",
"9245: [D loss: 0.699079, acc: 0.511719] [A loss: 0.682269, acc: 0.523438]\n",
"9246: [D loss: 0.706383, acc: 0.513672] [A loss: 0.851509, acc: 0.152344]\n",
"9247: [D loss: 0.693879, acc: 0.500000] [A loss: 0.743300, acc: 0.398438]\n",
"9248: [D loss: 0.703346, acc: 0.494141] [A loss: 0.798551, acc: 0.261719]\n",
"9249: [D loss: 0.697242, acc: 0.527344] [A loss: 0.790576, acc: 0.289062]\n",
"9250: [D loss: 0.693866, acc: 0.548828] [A loss: 0.749792, acc: 0.386719]\n",
"9251: [D loss: 0.686818, acc: 0.556641] [A loss: 0.824533, acc: 0.257812]\n",
"9252: [D loss: 0.692471, acc: 0.535156] [A loss: 0.782353, acc: 0.308594]\n",
"9253: [D loss: 0.704764, acc: 0.494141] [A loss: 0.872964, acc: 0.136719]\n",
"9254: [D loss: 0.692474, acc: 0.515625] [A loss: 0.723451, acc: 0.437500]\n",
"9255: [D loss: 0.696453, acc: 0.515625] [A loss: 0.808168, acc: 0.257812]\n",
"9256: [D loss: 0.688499, acc: 0.556641] [A loss: 0.775671, acc: 0.335938]\n",
"9257: [D loss: 0.710306, acc: 0.492188] [A loss: 0.750304, acc: 0.390625]\n",
"9258: [D loss: 0.694890, acc: 0.546875] [A loss: 0.813610, acc: 0.253906]\n",
"9259: [D loss: 0.696250, acc: 0.505859] [A loss: 0.789730, acc: 0.257812]\n",
"9260: [D loss: 0.719402, acc: 0.492188] [A loss: 0.816119, acc: 0.242188]\n",
"9261: [D loss: 0.705979, acc: 0.513672] [A loss: 0.763635, acc: 0.355469]\n",
"9262: [D loss: 0.707214, acc: 0.500000] [A loss: 0.852426, acc: 0.218750]\n",
"9263: [D loss: 0.705837, acc: 0.505859] [A loss: 0.806299, acc: 0.281250]\n",
"9264: [D loss: 0.711233, acc: 0.488281] [A loss: 0.762864, acc: 0.371094]\n",
"9265: [D loss: 0.699666, acc: 0.503906] [A loss: 0.813558, acc: 0.230469]\n",
"9266: [D loss: 0.713112, acc: 0.498047] [A loss: 0.820173, acc: 0.253906]\n",
"9267: [D loss: 0.694461, acc: 0.550781] [A loss: 0.807578, acc: 0.277344]\n",
"9268: [D loss: 0.692959, acc: 0.519531] [A loss: 0.778485, acc: 0.269531]\n",
"9269: [D loss: 0.715718, acc: 0.478516] [A loss: 0.805857, acc: 0.250000]\n",
"9270: [D loss: 0.708306, acc: 0.517578] [A loss: 0.791727, acc: 0.289062]\n",
"9271: [D loss: 0.700146, acc: 0.527344] [A loss: 0.765115, acc: 0.320312]\n",
"9272: [D loss: 0.716265, acc: 0.486328] [A loss: 0.819212, acc: 0.187500]\n",
"9273: [D loss: 0.701378, acc: 0.515625] [A loss: 0.786682, acc: 0.265625]\n",
"9274: [D loss: 0.705612, acc: 0.515625] [A loss: 0.784730, acc: 0.292969]\n",
"9275: [D loss: 0.700476, acc: 0.546875] [A loss: 0.787311, acc: 0.230469]\n",
"9276: [D loss: 0.701069, acc: 0.500000] [A loss: 0.820126, acc: 0.214844]\n",
"9277: [D loss: 0.690682, acc: 0.533203] [A loss: 0.761393, acc: 0.351562]\n",
"9278: [D loss: 0.715057, acc: 0.482422] [A loss: 0.910193, acc: 0.113281]\n",
"9279: [D loss: 0.700917, acc: 0.511719] [A loss: 0.670792, acc: 0.636719]\n",
"9280: [D loss: 0.722960, acc: 0.490234] [A loss: 0.896536, acc: 0.113281]\n",
"9281: [D loss: 0.700730, acc: 0.523438] [A loss: 0.706587, acc: 0.507812]\n",
"9282: [D loss: 0.729350, acc: 0.488281] [A loss: 0.895371, acc: 0.136719]\n",
"9283: [D loss: 0.696441, acc: 0.517578] [A loss: 0.717305, acc: 0.488281]\n",
"9284: [D loss: 0.721936, acc: 0.531250] [A loss: 0.946549, acc: 0.050781]\n",
"9285: [D loss: 0.703809, acc: 0.527344] [A loss: 0.724709, acc: 0.468750]\n",
"9286: [D loss: 0.706388, acc: 0.500000] [A loss: 0.773995, acc: 0.285156]\n",
"9287: [D loss: 0.705946, acc: 0.484375] [A loss: 0.747182, acc: 0.398438]\n",
"9288: [D loss: 0.700668, acc: 0.531250] [A loss: 0.785606, acc: 0.320312]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9289: [D loss: 0.694197, acc: 0.531250] [A loss: 0.760702, acc: 0.316406]\n",
"9290: [D loss: 0.717444, acc: 0.505859] [A loss: 0.818105, acc: 0.230469]\n",
"9291: [D loss: 0.689956, acc: 0.521484] [A loss: 0.831022, acc: 0.199219]\n",
"9292: [D loss: 0.694703, acc: 0.505859] [A loss: 0.797622, acc: 0.261719]\n",
"9293: [D loss: 0.700765, acc: 0.496094] [A loss: 0.748835, acc: 0.394531]\n",
"9294: [D loss: 0.718781, acc: 0.486328] [A loss: 0.894423, acc: 0.117188]\n",
"9295: [D loss: 0.708896, acc: 0.498047] [A loss: 0.699655, acc: 0.519531]\n",
"9296: [D loss: 0.717290, acc: 0.515625] [A loss: 0.841651, acc: 0.195312]\n",
"9297: [D loss: 0.696330, acc: 0.505859] [A loss: 0.726239, acc: 0.437500]\n",
"9298: [D loss: 0.727409, acc: 0.501953] [A loss: 0.885436, acc: 0.136719]\n",
"9299: [D loss: 0.709663, acc: 0.494141] [A loss: 0.751310, acc: 0.371094]\n",
"9300: [D loss: 0.713595, acc: 0.496094] [A loss: 0.815814, acc: 0.230469]\n",
"9301: [D loss: 0.706113, acc: 0.494141] [A loss: 0.768940, acc: 0.312500]\n",
"9302: [D loss: 0.702444, acc: 0.501953] [A loss: 0.799533, acc: 0.289062]\n",
"9303: [D loss: 0.714904, acc: 0.503906] [A loss: 0.795454, acc: 0.308594]\n",
"9304: [D loss: 0.701113, acc: 0.511719] [A loss: 0.801935, acc: 0.277344]\n",
"9305: [D loss: 0.698572, acc: 0.527344] [A loss: 0.807794, acc: 0.277344]\n",
"9306: [D loss: 0.693705, acc: 0.537109] [A loss: 0.766617, acc: 0.382812]\n",
"9307: [D loss: 0.711572, acc: 0.503906] [A loss: 0.775148, acc: 0.347656]\n",
"9308: [D loss: 0.691205, acc: 0.552734] [A loss: 0.793645, acc: 0.316406]\n",
"9309: [D loss: 0.689405, acc: 0.539062] [A loss: 0.772014, acc: 0.359375]\n",
"9310: [D loss: 0.697717, acc: 0.523438] [A loss: 0.856234, acc: 0.210938]\n",
"9311: [D loss: 0.708821, acc: 0.453125] [A loss: 0.712523, acc: 0.480469]\n",
"9312: [D loss: 0.713963, acc: 0.509766] [A loss: 0.834016, acc: 0.203125]\n",
"9313: [D loss: 0.699981, acc: 0.517578] [A loss: 0.748178, acc: 0.390625]\n",
"9314: [D loss: 0.707304, acc: 0.486328] [A loss: 0.800962, acc: 0.281250]\n",
"9315: [D loss: 0.702134, acc: 0.513672] [A loss: 0.751132, acc: 0.394531]\n",
"9316: [D loss: 0.705952, acc: 0.496094] [A loss: 0.834898, acc: 0.179688]\n",
"9317: [D loss: 0.699757, acc: 0.521484] [A loss: 0.708053, acc: 0.527344]\n",
"9318: [D loss: 0.717441, acc: 0.517578] [A loss: 0.857038, acc: 0.152344]\n",
"9319: [D loss: 0.698127, acc: 0.527344] [A loss: 0.703458, acc: 0.476562]\n",
"9320: [D loss: 0.722201, acc: 0.498047] [A loss: 0.895645, acc: 0.085938]\n",
"9321: [D loss: 0.700750, acc: 0.501953] [A loss: 0.712689, acc: 0.472656]\n",
"9322: [D loss: 0.724142, acc: 0.496094] [A loss: 0.840022, acc: 0.160156]\n",
"9323: [D loss: 0.690813, acc: 0.521484] [A loss: 0.725797, acc: 0.425781]\n",
"9324: [D loss: 0.700243, acc: 0.548828] [A loss: 0.817488, acc: 0.207031]\n",
"9325: [D loss: 0.692154, acc: 0.529297] [A loss: 0.769835, acc: 0.355469]\n",
"9326: [D loss: 0.702967, acc: 0.513672] [A loss: 0.878963, acc: 0.109375]\n",
"9327: [D loss: 0.706046, acc: 0.500000] [A loss: 0.762874, acc: 0.339844]\n",
"9328: [D loss: 0.702000, acc: 0.537109] [A loss: 0.874195, acc: 0.121094]\n",
"9329: [D loss: 0.697267, acc: 0.515625] [A loss: 0.724039, acc: 0.406250]\n",
"9330: [D loss: 0.710079, acc: 0.503906] [A loss: 0.832651, acc: 0.218750]\n",
"9331: [D loss: 0.700961, acc: 0.523438] [A loss: 0.652868, acc: 0.609375]\n",
"9332: [D loss: 0.724995, acc: 0.513672] [A loss: 0.881496, acc: 0.117188]\n",
"9333: [D loss: 0.703825, acc: 0.484375] [A loss: 0.757244, acc: 0.347656]\n",
"9334: [D loss: 0.706174, acc: 0.527344] [A loss: 0.767493, acc: 0.316406]\n",
"9335: [D loss: 0.702626, acc: 0.525391] [A loss: 0.845988, acc: 0.160156]\n",
"9336: [D loss: 0.700022, acc: 0.501953] [A loss: 0.717290, acc: 0.417969]\n",
"9337: [D loss: 0.693905, acc: 0.531250] [A loss: 0.755454, acc: 0.335938]\n",
"9338: [D loss: 0.701470, acc: 0.529297] [A loss: 0.800014, acc: 0.265625]\n",
"9339: [D loss: 0.699768, acc: 0.501953] [A loss: 0.770440, acc: 0.292969]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9340: [D loss: 0.699186, acc: 0.519531] [A loss: 0.801811, acc: 0.285156]\n",
"9341: [D loss: 0.715211, acc: 0.474609] [A loss: 0.760437, acc: 0.312500]\n",
"9342: [D loss: 0.716447, acc: 0.484375] [A loss: 0.811075, acc: 0.199219]\n",
"9343: [D loss: 0.695109, acc: 0.505859] [A loss: 0.778145, acc: 0.296875]\n",
"9344: [D loss: 0.700906, acc: 0.521484] [A loss: 0.851644, acc: 0.187500]\n",
"9345: [D loss: 0.691846, acc: 0.539062] [A loss: 0.766972, acc: 0.328125]\n",
"9346: [D loss: 0.702066, acc: 0.515625] [A loss: 0.815541, acc: 0.226562]\n",
"9347: [D loss: 0.691754, acc: 0.517578] [A loss: 0.787438, acc: 0.273438]\n",
"9348: [D loss: 0.708434, acc: 0.484375] [A loss: 0.791282, acc: 0.246094]\n",
"9349: [D loss: 0.707118, acc: 0.539062] [A loss: 0.908860, acc: 0.089844]\n",
"9350: [D loss: 0.701046, acc: 0.523438] [A loss: 0.722484, acc: 0.445312]\n",
"9351: [D loss: 0.709569, acc: 0.496094] [A loss: 0.852702, acc: 0.171875]\n",
"9352: [D loss: 0.696220, acc: 0.513672] [A loss: 0.711394, acc: 0.476562]\n",
"9353: [D loss: 0.708140, acc: 0.501953] [A loss: 0.832453, acc: 0.199219]\n",
"9354: [D loss: 0.697665, acc: 0.486328] [A loss: 0.730913, acc: 0.390625]\n",
"9355: [D loss: 0.720636, acc: 0.500000] [A loss: 0.908783, acc: 0.109375]\n",
"9356: [D loss: 0.701269, acc: 0.498047] [A loss: 0.719879, acc: 0.472656]\n",
"9357: [D loss: 0.703251, acc: 0.521484] [A loss: 0.794709, acc: 0.242188]\n",
"9358: [D loss: 0.702314, acc: 0.529297] [A loss: 0.774731, acc: 0.304688]\n",
"9359: [D loss: 0.716914, acc: 0.460938] [A loss: 0.858801, acc: 0.128906]\n",
"9360: [D loss: 0.707111, acc: 0.507812] [A loss: 0.729783, acc: 0.414062]\n",
"9361: [D loss: 0.711162, acc: 0.488281] [A loss: 0.834716, acc: 0.171875]\n",
"9362: [D loss: 0.696384, acc: 0.501953] [A loss: 0.787171, acc: 0.285156]\n",
"9363: [D loss: 0.713780, acc: 0.517578] [A loss: 0.836236, acc: 0.187500]\n",
"9364: [D loss: 0.695558, acc: 0.541016] [A loss: 0.719624, acc: 0.453125]\n",
"9365: [D loss: 0.703001, acc: 0.533203] [A loss: 0.876970, acc: 0.121094]\n",
"9366: [D loss: 0.710481, acc: 0.482422] [A loss: 0.737011, acc: 0.398438]\n",
"9367: [D loss: 0.701417, acc: 0.509766] [A loss: 0.807899, acc: 0.238281]\n",
"9368: [D loss: 0.688402, acc: 0.535156] [A loss: 0.759558, acc: 0.398438]\n",
"9369: [D loss: 0.693706, acc: 0.539062] [A loss: 0.811378, acc: 0.214844]\n",
"9370: [D loss: 0.701083, acc: 0.515625] [A loss: 0.764922, acc: 0.312500]\n",
"9371: [D loss: 0.703251, acc: 0.523438] [A loss: 0.775746, acc: 0.332031]\n",
"9372: [D loss: 0.704269, acc: 0.474609] [A loss: 0.780962, acc: 0.277344]\n",
"9373: [D loss: 0.697060, acc: 0.507812] [A loss: 0.759917, acc: 0.355469]\n",
"9374: [D loss: 0.703537, acc: 0.494141] [A loss: 0.772972, acc: 0.300781]\n",
"9375: [D loss: 0.703131, acc: 0.509766] [A loss: 0.807768, acc: 0.250000]\n",
"9376: [D loss: 0.688869, acc: 0.556641] [A loss: 0.816873, acc: 0.230469]\n",
"9377: [D loss: 0.711120, acc: 0.494141] [A loss: 0.788302, acc: 0.289062]\n",
"9378: [D loss: 0.692786, acc: 0.527344] [A loss: 0.791724, acc: 0.265625]\n",
"9379: [D loss: 0.700610, acc: 0.521484] [A loss: 0.816532, acc: 0.242188]\n",
"9380: [D loss: 0.709713, acc: 0.513672] [A loss: 0.794268, acc: 0.292969]\n",
"9381: [D loss: 0.702313, acc: 0.517578] [A loss: 0.782557, acc: 0.316406]\n",
"9382: [D loss: 0.702138, acc: 0.515625] [A loss: 0.804542, acc: 0.238281]\n",
"9383: [D loss: 0.717580, acc: 0.478516] [A loss: 0.850120, acc: 0.179688]\n",
"9384: [D loss: 0.710691, acc: 0.496094] [A loss: 0.744821, acc: 0.355469]\n",
"9385: [D loss: 0.705032, acc: 0.511719] [A loss: 0.839880, acc: 0.191406]\n",
"9386: [D loss: 0.698883, acc: 0.484375] [A loss: 0.763793, acc: 0.375000]\n",
"9387: [D loss: 0.701223, acc: 0.507812] [A loss: 0.789375, acc: 0.292969]\n",
"9388: [D loss: 0.693606, acc: 0.539062] [A loss: 0.891796, acc: 0.128906]\n",
"9389: [D loss: 0.706085, acc: 0.509766] [A loss: 0.655271, acc: 0.613281]\n",
"9390: [D loss: 0.739076, acc: 0.511719] [A loss: 1.037162, acc: 0.046875]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9391: [D loss: 0.705302, acc: 0.505859] [A loss: 0.736427, acc: 0.429688]\n",
"9392: [D loss: 0.702873, acc: 0.521484] [A loss: 0.805202, acc: 0.246094]\n",
"9393: [D loss: 0.705069, acc: 0.490234] [A loss: 0.753375, acc: 0.355469]\n",
"9394: [D loss: 0.702704, acc: 0.523438] [A loss: 0.805908, acc: 0.222656]\n",
"9395: [D loss: 0.695169, acc: 0.521484] [A loss: 0.740223, acc: 0.386719]\n",
"9396: [D loss: 0.706551, acc: 0.488281] [A loss: 0.812525, acc: 0.253906]\n",
"9397: [D loss: 0.704106, acc: 0.492188] [A loss: 0.728329, acc: 0.433594]\n",
"9398: [D loss: 0.699476, acc: 0.527344] [A loss: 0.801994, acc: 0.253906]\n",
"9399: [D loss: 0.694867, acc: 0.513672] [A loss: 0.758459, acc: 0.355469]\n",
"9400: [D loss: 0.715297, acc: 0.474609] [A loss: 0.793642, acc: 0.277344]\n",
"9401: [D loss: 0.709343, acc: 0.501953] [A loss: 0.814572, acc: 0.210938]\n",
"9402: [D loss: 0.687976, acc: 0.560547] [A loss: 0.770798, acc: 0.289062]\n",
"9403: [D loss: 0.707754, acc: 0.478516] [A loss: 0.798000, acc: 0.289062]\n",
"9404: [D loss: 0.697845, acc: 0.531250] [A loss: 0.759706, acc: 0.343750]\n",
"9405: [D loss: 0.701127, acc: 0.507812] [A loss: 0.840540, acc: 0.230469]\n",
"9406: [D loss: 0.715051, acc: 0.482422] [A loss: 0.808277, acc: 0.238281]\n",
"9407: [D loss: 0.706904, acc: 0.505859] [A loss: 0.832143, acc: 0.218750]\n",
"9408: [D loss: 0.696681, acc: 0.535156] [A loss: 0.748812, acc: 0.363281]\n",
"9409: [D loss: 0.713888, acc: 0.509766] [A loss: 0.892736, acc: 0.132812]\n",
"9410: [D loss: 0.704410, acc: 0.476562] [A loss: 0.711365, acc: 0.496094]\n",
"9411: [D loss: 0.713717, acc: 0.509766] [A loss: 0.861257, acc: 0.128906]\n",
"9412: [D loss: 0.692519, acc: 0.537109] [A loss: 0.733378, acc: 0.394531]\n",
"9413: [D loss: 0.704166, acc: 0.498047] [A loss: 0.806251, acc: 0.214844]\n",
"9414: [D loss: 0.690911, acc: 0.544922] [A loss: 0.796530, acc: 0.269531]\n",
"9415: [D loss: 0.702208, acc: 0.492188] [A loss: 0.813756, acc: 0.238281]\n",
"9416: [D loss: 0.708929, acc: 0.501953] [A loss: 0.841323, acc: 0.183594]\n",
"9417: [D loss: 0.696417, acc: 0.529297] [A loss: 0.745329, acc: 0.339844]\n",
"9418: [D loss: 0.709344, acc: 0.486328] [A loss: 0.833218, acc: 0.214844]\n",
"9419: [D loss: 0.698133, acc: 0.529297] [A loss: 0.725905, acc: 0.425781]\n",
"9420: [D loss: 0.711024, acc: 0.511719] [A loss: 0.896155, acc: 0.121094]\n",
"9421: [D loss: 0.699973, acc: 0.484375] [A loss: 0.722662, acc: 0.449219]\n",
"9422: [D loss: 0.721634, acc: 0.466797] [A loss: 0.903574, acc: 0.070312]\n",
"9423: [D loss: 0.716498, acc: 0.468750] [A loss: 0.669217, acc: 0.593750]\n",
"9424: [D loss: 0.716821, acc: 0.517578] [A loss: 0.864149, acc: 0.164062]\n",
"9425: [D loss: 0.696912, acc: 0.488281] [A loss: 0.721371, acc: 0.464844]\n",
"9426: [D loss: 0.706715, acc: 0.505859] [A loss: 0.861232, acc: 0.144531]\n",
"9427: [D loss: 0.705686, acc: 0.482422] [A loss: 0.726876, acc: 0.425781]\n",
"9428: [D loss: 0.714535, acc: 0.544922] [A loss: 0.854370, acc: 0.152344]\n",
"9429: [D loss: 0.695587, acc: 0.517578] [A loss: 0.758940, acc: 0.363281]\n",
"9430: [D loss: 0.699021, acc: 0.501953] [A loss: 0.853965, acc: 0.191406]\n",
"9431: [D loss: 0.702874, acc: 0.500000] [A loss: 0.770582, acc: 0.375000]\n",
"9432: [D loss: 0.712773, acc: 0.503906] [A loss: 0.799286, acc: 0.277344]\n",
"9433: [D loss: 0.690182, acc: 0.515625] [A loss: 0.768327, acc: 0.347656]\n",
"9434: [D loss: 0.701150, acc: 0.511719] [A loss: 0.782455, acc: 0.328125]\n",
"9435: [D loss: 0.700910, acc: 0.527344] [A loss: 0.788787, acc: 0.285156]\n",
"9436: [D loss: 0.702559, acc: 0.509766] [A loss: 0.780489, acc: 0.304688]\n",
"9437: [D loss: 0.709684, acc: 0.488281] [A loss: 0.784357, acc: 0.285156]\n",
"9438: [D loss: 0.700877, acc: 0.523438] [A loss: 0.842390, acc: 0.199219]\n",
"9439: [D loss: 0.693068, acc: 0.544922] [A loss: 0.835832, acc: 0.203125]\n",
"9440: [D loss: 0.688142, acc: 0.525391] [A loss: 0.806447, acc: 0.246094]\n",
"9441: [D loss: 0.709625, acc: 0.494141] [A loss: 0.892753, acc: 0.097656]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9442: [D loss: 0.704594, acc: 0.500000] [A loss: 0.706300, acc: 0.480469]\n",
"9443: [D loss: 0.717986, acc: 0.500000] [A loss: 0.893557, acc: 0.132812]\n",
"9444: [D loss: 0.710859, acc: 0.513672] [A loss: 0.769343, acc: 0.328125]\n",
"9445: [D loss: 0.704774, acc: 0.521484] [A loss: 0.878814, acc: 0.117188]\n",
"9446: [D loss: 0.695501, acc: 0.529297] [A loss: 0.742105, acc: 0.378906]\n",
"9447: [D loss: 0.695186, acc: 0.533203] [A loss: 0.807083, acc: 0.253906]\n",
"9448: [D loss: 0.708195, acc: 0.513672] [A loss: 0.771790, acc: 0.304688]\n",
"9449: [D loss: 0.721568, acc: 0.494141] [A loss: 0.932814, acc: 0.066406]\n",
"9450: [D loss: 0.699999, acc: 0.521484] [A loss: 0.763444, acc: 0.367188]\n",
"9451: [D loss: 0.702397, acc: 0.535156] [A loss: 0.874182, acc: 0.121094]\n",
"9452: [D loss: 0.703972, acc: 0.517578] [A loss: 0.718935, acc: 0.410156]\n",
"9453: [D loss: 0.717513, acc: 0.470703] [A loss: 0.873667, acc: 0.136719]\n",
"9454: [D loss: 0.715531, acc: 0.443359] [A loss: 0.755365, acc: 0.339844]\n",
"9455: [D loss: 0.705315, acc: 0.507812] [A loss: 0.822939, acc: 0.214844]\n",
"9456: [D loss: 0.696745, acc: 0.537109] [A loss: 0.782709, acc: 0.296875]\n",
"9457: [D loss: 0.715364, acc: 0.498047] [A loss: 0.875836, acc: 0.160156]\n",
"9458: [D loss: 0.696779, acc: 0.529297] [A loss: 0.736630, acc: 0.402344]\n",
"9459: [D loss: 0.682910, acc: 0.548828] [A loss: 0.851951, acc: 0.207031]\n",
"9460: [D loss: 0.697112, acc: 0.529297] [A loss: 0.749503, acc: 0.406250]\n",
"9461: [D loss: 0.695100, acc: 0.533203] [A loss: 0.803753, acc: 0.246094]\n",
"9462: [D loss: 0.701876, acc: 0.513672] [A loss: 0.827434, acc: 0.160156]\n",
"9463: [D loss: 0.688269, acc: 0.548828] [A loss: 0.764161, acc: 0.359375]\n",
"9464: [D loss: 0.696198, acc: 0.548828] [A loss: 0.844885, acc: 0.179688]\n",
"9465: [D loss: 0.697195, acc: 0.531250] [A loss: 0.774788, acc: 0.343750]\n",
"9466: [D loss: 0.710932, acc: 0.484375] [A loss: 0.818773, acc: 0.203125]\n",
"9467: [D loss: 0.708511, acc: 0.503906] [A loss: 0.756839, acc: 0.375000]\n",
"9468: [D loss: 0.690324, acc: 0.535156] [A loss: 0.873673, acc: 0.160156]\n",
"9469: [D loss: 0.694266, acc: 0.537109] [A loss: 0.737554, acc: 0.375000]\n",
"9470: [D loss: 0.706014, acc: 0.494141] [A loss: 0.850589, acc: 0.132812]\n",
"9471: [D loss: 0.706717, acc: 0.503906] [A loss: 0.750741, acc: 0.406250]\n",
"9472: [D loss: 0.718960, acc: 0.501953] [A loss: 0.927740, acc: 0.089844]\n",
"9473: [D loss: 0.693879, acc: 0.539062] [A loss: 0.678278, acc: 0.562500]\n",
"9474: [D loss: 0.721467, acc: 0.490234] [A loss: 0.828417, acc: 0.214844]\n",
"9475: [D loss: 0.699778, acc: 0.529297] [A loss: 0.746582, acc: 0.382812]\n",
"9476: [D loss: 0.704542, acc: 0.509766] [A loss: 0.822581, acc: 0.187500]\n",
"9477: [D loss: 0.693797, acc: 0.513672] [A loss: 0.784770, acc: 0.312500]\n",
"9478: [D loss: 0.699212, acc: 0.515625] [A loss: 0.824701, acc: 0.203125]\n",
"9479: [D loss: 0.697221, acc: 0.531250] [A loss: 0.784389, acc: 0.312500]\n",
"9480: [D loss: 0.707716, acc: 0.482422] [A loss: 0.841681, acc: 0.210938]\n",
"9481: [D loss: 0.715902, acc: 0.478516] [A loss: 0.776993, acc: 0.316406]\n",
"9482: [D loss: 0.699143, acc: 0.527344] [A loss: 0.806067, acc: 0.242188]\n",
"9483: [D loss: 0.703579, acc: 0.500000] [A loss: 0.804741, acc: 0.246094]\n",
"9484: [D loss: 0.708049, acc: 0.509766] [A loss: 0.870785, acc: 0.179688]\n",
"9485: [D loss: 0.700276, acc: 0.511719] [A loss: 0.735689, acc: 0.398438]\n",
"9486: [D loss: 0.701577, acc: 0.523438] [A loss: 0.890684, acc: 0.128906]\n",
"9487: [D loss: 0.694468, acc: 0.541016] [A loss: 0.697366, acc: 0.519531]\n",
"9488: [D loss: 0.722440, acc: 0.503906] [A loss: 0.925274, acc: 0.066406]\n",
"9489: [D loss: 0.704031, acc: 0.503906] [A loss: 0.701793, acc: 0.527344]\n",
"9490: [D loss: 0.716613, acc: 0.523438] [A loss: 0.914846, acc: 0.089844]\n",
"9491: [D loss: 0.701683, acc: 0.505859] [A loss: 0.717216, acc: 0.480469]\n",
"9492: [D loss: 0.715254, acc: 0.494141] [A loss: 0.867608, acc: 0.167969]\n"
],
"name": "stdout"
},
{
"output_type": "stream",
"text": [
"9493: [D loss: 0.704450, acc: 0.507812] [A loss: 0.728420, acc: 0.445312]\n",
"9494: [D loss: 0.710740, acc: 0.515625] [A loss: 0.897375, acc: 0.128906]\n",
"9495: [D loss: 0.697659, acc: 0.523438] [A loss: 0.675909, acc: 0.542969]\n",
"9496: [D loss: 0.719921, acc: 0.519531] [A loss: 0.826123, acc: 0.210938]\n",
"9497: [D loss: 0.680664, acc: 0.568359] [A loss: 0.746240, acc: 0.382812]\n",
"9498: [D loss: 0.707859, acc: 0.515625] [A loss: 0.873617, acc: 0.207031]\n",
"9499: [D loss: 0.696889, acc: 0.517578] [A loss: 0.763512, acc: 0.304688]\n",
"9500: [D loss: 0.711182, acc: 0.521484] [A loss: 0.805462, acc: 0.234375]\n",
"9501: [D loss: 0.690703, acc: 0.515625] [A loss: 0.809114, acc: 0.234375]\n",
"9502: [D loss: 0.709943, acc: 0.492188] [A loss: 0.809055, acc: 0.238281]\n",
"9503: [D loss: 0.717363, acc: 0.470703] [A loss: 0.849488, acc: 0.191406]\n",
"9504: [D loss: 0.690007, acc: 0.531250] [A loss: 0.749575, acc: 0.378906]\n",
"9505: [D loss: 0.710609, acc: 0.519531] [A loss: 0.898717, acc: 0.156250]\n",
"9506: [D loss: 0.687754, acc: 0.562500] [A loss: 0.729313, acc: 0.433594]\n",
"9507: [D loss: 0.712994, acc: 0.539062] [A loss: 0.886627, acc: 0.148438]\n",
"9508: [D loss: 0.699554, acc: 0.525391] [A loss: 0.693353, acc: 0.527344]\n",
"9509: [D loss: 0.706329, acc: 0.533203] [A loss: 0.910965, acc: 0.125000]\n",
"9510: [D loss: 0.704521, acc: 0.509766] [A loss: 0.708189, acc: 0.480469]\n",
"9511: [D loss: 0.707647, acc: 0.507812] [A loss: 0.770818, acc: 0.312500]\n",
"9512: [D loss: 0.704925, acc: 0.503906] [A loss: 0.766961, acc: 0.339844]\n",
"9513: [D loss: 0.713390, acc: 0.474609] [A loss: 0.860958, acc: 0.167969]\n",
"9514: [D loss: 0.696824, acc: 0.523438] [A loss: 0.807532, acc: 0.246094]\n",
"9515: [D loss: 0.690577, acc: 0.511719] [A loss: 0.753048, acc: 0.351562]\n",
"9516: [D loss: 0.698833, acc: 0.537109] [A loss: 0.812668, acc: 0.250000]\n",
"9517: [D loss: 0.706093, acc: 0.511719] [A loss: 0.771824, acc: 0.304688]\n",
"9518: [D loss: 0.703484, acc: 0.505859] [A loss: 0.805212, acc: 0.242188]\n",
"9519: [D loss: 0.709326, acc: 0.482422] [A loss: 0.763774, acc: 0.382812]\n",
"9520: [D loss: 0.713175, acc: 0.511719] [A loss: 0.872727, acc: 0.125000]\n",
"9521: [D loss: 0.708818, acc: 0.476562] [A loss: 0.752300, acc: 0.378906]\n",
"9522: [D loss: 0.701967, acc: 0.517578] [A loss: 0.859075, acc: 0.183594]\n",
"9523: [D loss: 0.710141, acc: 0.466797] [A loss: 0.721787, acc: 0.425781]\n",
"9524: [D loss: 0.699804, acc: 0.527344] [A loss: 0.944442, acc: 0.066406]\n",
"9525: [D loss: 0.710440, acc: 0.466797] [A loss: 0.673216, acc: 0.605469]\n",
"9526: [D loss: 0.728608, acc: 0.513672] [A loss: 0.943906, acc: 0.058594]\n
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment