Skip to content

Instantly share code, notes, and snippets.

@monajalal
Created March 28, 2019 05:30
Show Gist options
  • Save monajalal/4654e1bd9cb7424ada19b0dc6ba15137 to your computer and use it in GitHub Desktop.
Save monajalal/4654e1bd9cb7424ada19b0dc6ba15137 to your computer and use it in GitHub Desktop.
category_prediction_using_transfer_learning
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 66,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"torch.backends.cudnn.deterministic = True\n",
"torch.backends.cudnn.benchmark = False\n",
"torch.manual_seed(2809)\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 67,
"metadata": {},
"outputs": [],
"source": [
"from graphviz import Digraph\n",
"\n",
"from torch.autograd import Variable\n",
"\n",
"\n",
"# make_dot was moved to https://github.com/szagoruyko/pytorchviz\n",
"from torchviz import make_dot"
]
},
{
"cell_type": "code",
"execution_count": 68,
"metadata": {},
"outputs": [],
"source": [
"# -*- coding: utf-8 -*-\n",
"\"\"\"\n",
"Transfer Learning Tutorial\n",
"==========================\n",
"**Author**: `Sasank Chilamkurthy <https://chsasank.github.io>`_\n",
"\n",
"In this tutorial, you will learn how to train your network using\n",
"transfer learning. You can read more about the transfer learning at `cs231n\n",
"notes <http://cs231n.github.io/transfer-learning/>`__\n",
"\n",
"Quoting these notes,\n",
"\n",
" In practice, very few people train an entire Convolutional Network\n",
" from scratch (with random initialization), because it is relatively\n",
" rare to have a dataset of sufficient size. Instead, it is common to\n",
" pretrain a ConvNet on a very large dataset (e.g. ImageNet, which\n",
" contains 1.2 million images with 1000 categories), and then use the\n",
" ConvNet either as an initialization or a fixed feature extractor for\n",
" the task of interest.\n",
"\n",
"These two major transfer learning scenarios look as follows:\n",
"\n",
"- **Finetuning the convnet**: Instead of random initializaion, we\n",
" initialize the network with a pretrained network, like the one that is\n",
" trained on imagenet 1000 dataset. Rest of the training looks as\n",
" usual.\n",
"- **ConvNet as fixed feature extractor**: Here, we will freeze the weights\n",
" for all of the network except that of the final fully connected\n",
" layer. This last fully connected layer is replaced with a new one\n",
" with random weights and only this layer is trained.\n",
"\n",
"\"\"\"\n",
"# License: BSD\n",
"# Author: Sasank Chilamkurthy\n",
"\n",
"from __future__ import print_function, division\n",
"\n",
"import torch\n",
"import torch.nn as nn\n",
"import torch.optim as optim\n",
"from torch.optim import lr_scheduler\n",
"import numpy as np\n",
"import torchvision\n",
"from torchvision import datasets, models, transforms\n",
"import matplotlib.pyplot as plt\n",
"import time\n",
"import os\n",
"import copy\n",
"\n",
"plt.ion() # interactive mode"
]
},
{
"cell_type": "code",
"execution_count": 69,
"metadata": {},
"outputs": [],
"source": [
"######################################################################\n",
"# Load Data\n",
"# ---------\n",
"#\n",
"# We will use torchvision and torch.utils.data packages for loading the\n",
"# data.\n",
"#\n",
"# The problem we're going to solve today is to train a model to classify\n",
"# **ants** and **bees**. We have about 120 training images each for ants and bees.\n",
"# There are 75 validation images for each class. Usually, this is a very\n",
"# small dataset to generalize upon, if trained from scratch. Since we\n",
"# are using transfer learning, we should be able to generalize reasonably\n",
"# well.\n",
"#\n",
"# This dataset is a very small subset of imagenet.\n",
"#\n",
"# .. Note ::\n",
"# Download the data from\n",
"# `here <https://download.pytorch.org/tutorial/hymenoptera_data.zip>`_\n",
"# and extract it to the current directory.\n",
"\n",
"# Data augmentation and normalization for training\n",
"# Just normalization for validation\n",
"\n",
"\n",
"#using the weight sampling method from https://github.com/ptrblck/pytorch_misc/blob/master/weighted_sampling.py#L25\n",
"\n",
"# transforms.RandomRotation(20), does it reduce or increase acc\n",
"# should I use transforms.RandomRotation(10) instead?\n",
"data_transforms = {\n",
" 'train': transforms.Compose([\n",
" transforms.RandomResizedCrop(224),\n",
" transforms.RandomHorizontalFlip(),\n",
" transforms.RandomRotation(20),\n",
" transforms.ColorJitter(0.3, 0.3, 0.3),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
" ]),\n",
"# 'val': transforms.Compose([\n",
"# transforms.Resize(256),\n",
"# transforms.CenterCrop(224),\n",
"# transforms.ToTensor(),\n",
"# transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
"# ]),\n",
" \n",
" 'test': transforms.Compose([\n",
" transforms.Resize(256),\n",
" transforms.CenterCrop(224),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
" ]),\n",
"}\n",
"\n",
"\n",
"data_dir = \"../5fold_CV/images/fold0\"\n",
"\n",
"class MonaDataset(datasets.folder.ImageFolder):\n",
" def __init__(self, root, transform=None, target_transform=None,\n",
" loader=datasets.folder.default_loader):\n",
" super(MonaDataset, self).__init__(root, transform, target_transform, loader)\n",
"\n",
" def __getitem__(self, index):\n",
" path, target = self.samples[index]\n",
" sample = self.loader(path)\n",
" if self.transform is not None:\n",
" sample = self.transform(sample)\n",
" if self.target_transform is not None:\n",
" target = self.target_transform(target)\n",
" return sample, target, path\n",
"\n",
"\n",
"image_datasets = {x: MonaDataset(os.path.join(data_dir, x),\n",
" data_transforms[x])\n",
" for x in ['train', 'test']}\n",
"\n",
"\n",
"\n",
"\n",
"dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=4,\n",
" shuffle=True, num_workers=4) ###, sampler = sampler\n",
" for x in ['train', 'test']}\n",
"dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'test']}\n",
"\n",
"\n",
"class_names = image_datasets['train'].classes\n",
"\n",
"device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"######################################################################\n",
"# Visualize a few images\n",
"# ^^^^^^^^^^^^^^^^^^^^^^\n",
"# Let's visualize a few training images so as to understand the data\n",
"# augmentations.\n",
"\n",
"def imshow(inp, title=None):\n",
" \"\"\"Imshow for Tensor.\"\"\"\n",
" inp = inp.numpy().transpose((1, 2, 0))\n",
" mean = np.array([0.485, 0.456, 0.406])\n",
" std = np.array([0.229, 0.224, 0.225])\n",
" inp = std * inp + mean\n",
" inp = np.clip(inp, 0, 1)\n",
" plt.imshow(inp)\n",
" if title is not None:\n",
" plt.title(title)\n",
" plt.pause(0.001) # pause a bit so that plots are updated\n",
"\n",
"\n",
"# Get a batch of training data\n",
"inputs, classes, im_paths = next(iter(dataloaders['train']))\n",
"\n",
"# Make a grid from batch\n",
"out = torchvision.utils.make_grid(inputs)\n",
"\n",
"imshow(out, title=[class_names[x] for x in classes])\n"
]
},
{
"cell_type": "code",
"execution_count": 71,
"metadata": {},
"outputs": [],
"source": [
"######################################################################\n",
"# Training the model\n",
"# ------------------\n",
"#\n",
"# Now, let's write a general function to train a model. Here, we will\n",
"# illustrate:\n",
"#\n",
"# - Scheduling the learning rate\n",
"# - Saving the best model\n",
"#\n",
"# In the following, parameter ``scheduler`` is an LR scheduler object from\n",
"# ``torch.optim.lr_scheduler``.\n",
"\n",
"\n",
"def train_model(model, criterion, optimizer, scheduler, num_epochs=25):\n",
" since = time.time()\n",
"\n",
" best_model_wts = copy.deepcopy(model.state_dict())\n",
" best_acc = 0.0\n",
"\n",
" for epoch in range(num_epochs):\n",
" print('Epoch {}/{}'.format(epoch, num_epochs - 1))\n",
" print('-' * 10)\n",
"\n",
" # Each epoch has a training and validation phase\n",
" ##for phase in ['train', 'test']:\n",
" for phase in ['train']:\n",
" if phase == 'train':\n",
" scheduler.step()\n",
" model.train() # Set model to training mode\n",
" else:\n",
" model.eval() # Set model to evaluate mode\n",
"\n",
" running_loss = 0.0\n",
" running_corrects = 0\n",
"\n",
" # Iterate over data.\n",
" for inputs, labels, im_paths in dataloaders[phase]:\n",
" inputs = inputs.to(device)\n",
" labels = labels.to(device)\n",
"\n",
" # zero the parameter gradients\n",
" optimizer.zero_grad()\n",
"\n",
" # forward\n",
" # track history if only in train\n",
" with torch.set_grad_enabled(phase == 'train'):\n",
" outputs = model(inputs)\n",
" _, preds = torch.max(outputs, 1)\n",
" loss = criterion(outputs, labels)\n",
"\n",
" # backward + optimize only if in training phase\n",
" if phase == 'train':\n",
" loss.backward()\n",
" optimizer.step()\n",
"\n",
" # statistics\n",
" running_loss += loss.item() * inputs.size(0)\n",
" running_corrects += torch.sum(preds == labels.data)\n",
"\n",
" epoch_loss = running_loss / dataset_sizes[phase]\n",
" epoch_acc = running_corrects.double() / dataset_sizes[phase]\n",
"\n",
" print('{} Loss: {:.4f} Acc: {:.4f}'.format(\n",
" phase, epoch_loss, epoch_acc))\n",
"\n",
" # deep copy the model\n",
" # if phase == 'val' and epoch_acc > best_acc:\n",
" # best_acc = epoch_acc\n",
" # best_model_wts = copy.deepcopy(model.state_dict())\n",
"\n",
" print()\n",
"\n",
" time_elapsed = time.time() - since\n",
" print('Training complete in {:.0f}m {:.0f}s'.format(\n",
" time_elapsed // 60, time_elapsed % 60))\n",
"# print('Best val Acc: {:4f}'.format(best_acc))\n",
"\n",
" # load best model weights\n",
"# model.load_state_dict(best_model_wts)\n",
" return model"
]
},
{
"cell_type": "code",
"execution_count": 72,
"metadata": {},
"outputs": [],
"source": [
"\n",
"######################################################################\n",
"# Visualizing the model predictions\n",
"# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
"#\n",
"# Generic function to display predictions for a few images\n",
"#\n",
"\n",
"def visualize_model(model, num_images=6):\n",
" was_training = model.training\n",
" model.eval()\n",
" images_so_far = 0\n",
" fig = plt.figure()\n",
"\n",
" with torch.no_grad():\n",
" #for i, (inputs, labels) in enumerate(dataloaders['test]):\n",
" for i, (inputs, labels) in enumerate(dataloaders['train']):\n",
"\n",
" inputs = inputs.to(device)\n",
" labels = labels.to(device)\n",
"\n",
" outputs = model(inputs)\n",
" _, preds = torch.max(outputs, 1)\n",
"\n",
" for j in range(inputs.size()[0]):\n",
" images_so_far += 1\n",
" ax = plt.subplot(num_images//2, 2, images_so_far)\n",
" ax.axis('off')\n",
" ax.set_title('predicted: {}'.format(class_names[preds[j]]))\n",
" imshow(inputs.cpu().data[j])\n",
"\n",
" if images_so_far == num_images:\n",
" model.train(mode=was_training)\n",
" return\n",
" model.train(mode=was_training)"
]
},
{
"cell_type": "code",
"execution_count": 73,
"metadata": {},
"outputs": [],
"source": [
"######################################################################\n",
"# Finetuning the convnet\n",
"# ----------------------\n",
"#\n",
"# Load a pretrained model and reset final fully connected layer.\n",
"#\n",
"\n",
"###class_weights = torch.FloatTensor(weight).cuda() # for weighted loss function use this\n",
"#model_ft = models.resnet18(pretrained=True)\n",
"model_ft = models.resnet50(pretrained=True)\n",
"###model_ft = models.densenet161(pretrained=True)\n",
"\n",
"\n",
"num_ftrs = model_ft.fc.in_features\n",
"###num_ftrs = model_ft.classifier.in_features #for densenet161\n",
"model_ft.fc = nn.Linear(num_ftrs, 16)\n",
"###model_ft.classifier = nn.Linear(num_ftrs, 9) # for densenet161\n",
"\n",
"model_ft = model_ft.to(device)\n",
"\n",
"###criterion = nn.CrossEntropyLoss(weight=class_weights) #for weighted loss function use this\n",
"criterion = nn.CrossEntropyLoss()\n",
"\n",
"# Observe that all parameters are being optimized\n",
"optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9) #use SGD\n",
"###optimizer_ft = optim.Adam(params=model_ft.parameters(), amsgrad=True, lr=0.001) #use ADAM\n",
"# Decay LR by a factor of 0.1 every 7 epochs\n",
"exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1)\n"
]
},
{
"cell_type": "code",
"execution_count": 74,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 0/49\n",
"----------\n",
"train Loss: 2.2980 Acc: 0.3395\n",
"\n",
"Epoch 1/49\n",
"----------\n",
"train Loss: 2.0134 Acc: 0.4193\n",
"\n",
"Epoch 2/49\n",
"----------\n",
"train Loss: 1.8726 Acc: 0.4378\n",
"\n",
"Epoch 3/49\n",
"----------\n",
"train Loss: 1.7556 Acc: 0.4527\n",
"\n",
"Epoch 4/49\n",
"----------\n",
"train Loss: 1.6287 Acc: 0.5065\n",
"\n",
"Epoch 5/49\n",
"----------\n",
"train Loss: 1.6297 Acc: 0.5158\n",
"\n",
"Epoch 6/49\n",
"----------\n",
"train Loss: 1.5273 Acc: 0.5380\n",
"\n",
"Epoch 7/49\n",
"----------\n",
"train Loss: 1.3633 Acc: 0.5807\n",
"\n",
"Epoch 8/49\n",
"----------\n",
"train Loss: 1.1810 Acc: 0.6568\n",
"\n",
"Epoch 9/49\n",
"----------\n",
"train Loss: 1.0792 Acc: 0.6920\n",
"\n",
"Epoch 10/49\n",
"----------\n",
"train Loss: 1.1572 Acc: 0.6735\n",
"\n",
"Epoch 11/49\n",
"----------\n",
"train Loss: 1.0163 Acc: 0.6790\n",
"\n",
"Epoch 12/49\n",
"----------\n",
"train Loss: 0.9810 Acc: 0.7161\n",
"\n",
"Epoch 13/49\n",
"----------\n",
"train Loss: 1.0172 Acc: 0.6957\n",
"\n",
"Epoch 14/49\n",
"----------\n",
"train Loss: 1.0113 Acc: 0.7087\n",
"\n",
"Epoch 15/49\n",
"----------\n",
"train Loss: 0.9567 Acc: 0.7106\n",
"\n",
"Epoch 16/49\n",
"----------\n",
"train Loss: 1.0587 Acc: 0.6846\n",
"\n",
"Epoch 17/49\n",
"----------\n",
"train Loss: 1.0007 Acc: 0.7087\n",
"\n",
"Epoch 18/49\n",
"----------\n",
"train Loss: 0.9456 Acc: 0.7124\n",
"\n",
"Epoch 19/49\n",
"----------\n",
"train Loss: 0.8935 Acc: 0.7384\n",
"\n",
"Epoch 20/49\n",
"----------\n",
"train Loss: 0.9623 Acc: 0.7143\n",
"\n",
"Epoch 21/49\n",
"----------\n",
"train Loss: 0.9756 Acc: 0.7013\n",
"\n",
"Epoch 22/49\n",
"----------\n",
"train Loss: 0.9551 Acc: 0.7161\n",
"\n",
"Epoch 23/49\n",
"----------\n",
"train Loss: 0.9509 Acc: 0.7236\n",
"\n",
"Epoch 24/49\n",
"----------\n",
"train Loss: 0.9670 Acc: 0.6957\n",
"\n",
"Epoch 25/49\n",
"----------\n",
"train Loss: 0.9041 Acc: 0.7180\n",
"\n",
"Epoch 26/49\n",
"----------\n",
"train Loss: 0.9233 Acc: 0.7403\n",
"\n",
"Epoch 27/49\n",
"----------\n",
"train Loss: 0.9275 Acc: 0.7087\n",
"\n",
"Epoch 28/49\n",
"----------\n",
"train Loss: 0.9083 Acc: 0.7087\n",
"\n",
"Epoch 29/49\n",
"----------\n",
"train Loss: 0.9706 Acc: 0.7124\n",
"\n",
"Epoch 30/49\n",
"----------\n",
"train Loss: 0.9871 Acc: 0.6920\n",
"\n",
"Epoch 31/49\n",
"----------\n",
"train Loss: 0.9317 Acc: 0.7069\n",
"\n",
"Epoch 32/49\n",
"----------\n",
"train Loss: 0.9124 Acc: 0.7217\n",
"\n",
"Epoch 33/49\n",
"----------\n",
"train Loss: 0.9505 Acc: 0.7236\n",
"\n",
"Epoch 34/49\n",
"----------\n",
"train Loss: 0.9392 Acc: 0.7124\n",
"\n",
"Epoch 35/49\n",
"----------\n",
"train Loss: 0.9165 Acc: 0.7199\n",
"\n",
"Epoch 36/49\n",
"----------\n",
"train Loss: 0.9208 Acc: 0.7180\n",
"\n",
"Epoch 37/49\n",
"----------\n",
"train Loss: 0.9267 Acc: 0.7347\n",
"\n",
"Epoch 38/49\n",
"----------\n",
"train Loss: 0.8939 Acc: 0.7310\n",
"\n",
"Epoch 39/49\n",
"----------\n",
"train Loss: 1.0188 Acc: 0.6902\n",
"\n",
"Epoch 40/49\n",
"----------\n",
"train Loss: 0.9464 Acc: 0.7143\n",
"\n",
"Epoch 41/49\n",
"----------\n",
"train Loss: 0.9299 Acc: 0.7161\n",
"\n",
"Epoch 42/49\n",
"----------\n",
"train Loss: 0.9700 Acc: 0.7180\n",
"\n",
"Epoch 43/49\n",
"----------\n",
"train Loss: 0.9873 Acc: 0.7069\n",
"\n",
"Epoch 44/49\n",
"----------\n",
"train Loss: 0.8863 Acc: 0.7477\n",
"\n",
"Epoch 45/49\n",
"----------\n",
"train Loss: 0.9600 Acc: 0.6994\n",
"\n",
"Epoch 46/49\n",
"----------\n",
"train Loss: 0.9633 Acc: 0.7384\n",
"\n",
"Epoch 47/49\n",
"----------\n",
"train Loss: 0.9168 Acc: 0.7384\n",
"\n",
"Epoch 48/49\n",
"----------\n",
"train Loss: 0.9476 Acc: 0.7310\n",
"\n",
"Epoch 49/49\n",
"----------\n",
"train Loss: 0.9221 Acc: 0.7254\n",
"\n",
"Training complete in 5m 28s\n"
]
}
],
"source": [
"model_ft = train_model(model_ft, criterion, optimizer_ft, exp_lr_scheduler, num_epochs=50)"
]
},
{
"cell_type": "code",
"execution_count": 75,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"tensor([[ 4., 1., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 2., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.,\n",
" 0., 1.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 1., 1., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 4., 1., 0., 0.,\n",
" 0., 1.],\n",
" [ 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 1., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 1., 1., 0., 2., 0., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 1., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 1., 0., 2., 1., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 1., 0., 0., 0., 4., 44., 0., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 3., 0., 0.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 2.,\n",
" 0., 0.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 2., 8.,\n",
" 1., 2.],\n",
" [ 0., 1., 0., 0., 0., 0., 0., 1., 0., 1., 2., 2., 0., 0.,\n",
" 12., 7.],\n",
" [ 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 8.]])\n",
"0.6667,0.5000,nan,0.0000,0.3333,0.0000,0.3333,0.2500,0.0000,0.5000,0.8980,1.0000,0.0000,0.5714,0.4615,1.0000\n",
"class 1 --> accuracy: 66.67, correct predictions: 4, all: 6\n",
"class 2 --> accuracy: 50.00, correct predictions: 2, all: 4\n",
"class 3 --> accuracy: nan, correct predictions: 0, all: 0\n",
"class 4 --> accuracy: 0.00, correct predictions: 0, all: 1\n",
"class 5 --> accuracy: 33.33, correct predictions: 1, all: 3\n",
"class 6 --> accuracy: 0.00, correct predictions: 0, all: 6\n",
"class 7 --> accuracy: 33.33, correct predictions: 1, all: 3\n",
"class 8 --> accuracy: 25.00, correct predictions: 1, all: 4\n",
"class 9 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 10 --> accuracy: 50.00, correct predictions: 2, all: 4\n",
"class 11 --> accuracy: 89.80, correct predictions: 44, all: 49\n",
"class 12 --> accuracy: 100.00, correct predictions: 3, all: 3\n",
"class 13 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 14 --> accuracy: 57.14, correct predictions: 8, all: 14\n",
"class 15 --> accuracy: 46.15, correct predictions: 12, all: 26\n",
"class 16 --> accuracy: 100.00, correct predictions: 8, all: 8\n",
"total correct: 86, total samples: 135\n",
"length is: 135\n",
"10392.jpg, [2.3662103387778188e-07, 0.00038598585524596274, 0.009592914022505283, 2.5076751626329497e-05, 4.398878809297457e-05, 0.0006519253365695477, 0.0007828503730706871, 9.565110667608678e-05, 4.192246706224978e-05, 4.41701968156849e-06, 0.0001288605126319453, 0.9384024739265442, 1.8088884417011286e-06, 1.5409565094159916e-05, 0.049244627356529236, 0.0005818487843498588]\n",
"10364.jpg, [5.774360033683479e-06, 0.0002590364310890436, 0.0005065830773673952, 1.0322162779630162e-05, 4.504207754507661e-05, 0.019380373880267143, 0.0012390341144055128, 0.1040545329451561, 2.811997546814382e-05, 4.8697453166823834e-05, 0.8218845129013062, 0.00520968297496438, 7.457417086698115e-05, 7.432698112097569e-06, 0.02207004278898239, 0.025176208466291428]\n",
"10279.jpg, [0.0005778370541520417, 0.00013909349218010902, 3.7535510273301043e-06, 2.2178002723194368e-07, 5.076999968878226e-06, 0.00015210402489174157, 3.234890073144925e-06, 0.02278214506804943, 1.7927446833709837e-06, 0.010878806933760643, 0.9642300009727478, 4.25838743467466e-06, 1.3645423280195246e-07, 3.683533407183859e-07, 0.0011604143073782325, 6.097558434703387e-05]\n",
"10035.jpg, [4.480835116282833e-07, 6.53391452942742e-06, 1.0490780510963305e-07, 2.570743049545854e-07, 1.9169640097516094e-07, 2.5672923129604897e-06, 3.275225424204109e-08, 1.3308404959389009e-05, 2.1054412968624092e-07, 0.00011978958355030045, 0.9997987151145935, 5.6839278840925545e-05, 1.1284917444953635e-09, 6.850141431868906e-08, 7.703587812102342e-07, 1.4984746599111531e-07]\n",
"10200.jpg, [3.0352875910466537e-05, 0.0077527048997581005, 0.005127150099724531, 0.006198175251483917, 0.01626967266201973, 0.008694437332451344, 0.05434386432170868, 0.0037796692922711372, 0.0011614925460889935, 0.0006593497237190604, 0.0018823753343895078, 0.007597063668072224, 1.633515603316482e-05, 5.7993132941192016e-05, 0.05764371529221535, 0.8287856578826904]\n",
"10238.jpg, [0.00021925698092672974, 0.007181731518357992, 0.0019638114608824253, 0.015105202794075012, 0.033843498677015305, 0.008112002164125443, 0.016040831804275513, 0.0020500770770013332, 0.008174190297722816, 0.0018779357196763158, 0.011004706844687462, 0.003683317918330431, 0.012794270180165768, 0.8753437995910645, 0.001911175437271595, 0.0006942842155694962]\n",
"10044.jpg, [0.0003932445833925158, 0.017303043976426125, 0.0003207740664947778, 0.0001561827230034396, 0.011038982309401035, 0.0001680395071161911, 0.00013343241880647838, 0.00789665337651968, 0.0019822095055133104, 0.8916022777557373, 0.06124800071120262, 0.00016339236753992736, 0.00017846477567218244, 0.0003059201699215919, 0.0040648626163601875, 0.0030445444863289595]\n",
"10379.jpg, [1.0707956299427224e-08, 3.862972153001465e-05, 2.064959744529915e-06, 2.130278886625092e-08, 9.403516543216028e-08, 7.796796808179352e-07, 8.281580790026055e-07, 0.00036582519533112645, 1.1773759744926338e-09, 3.309293242637068e-05, 0.017309438437223434, 7.599703621963272e-08, 1.6842653660642526e-10, 2.09347508217661e-08, 0.640988826751709, 0.3412603735923767]\n",
"10220.jpg, [1.457064399801311e-06, 0.006285868585109711, 9.658561612013727e-05, 0.000504953961353749, 0.0007287762127816677, 7.643261778866872e-05, 2.995071918121539e-05, 0.002330547431483865, 1.3420311006484553e-05, 0.013558966107666492, 0.9741233587265015, 0.00046008292702026665, 3.2469267807755386e-07, 9.069169209396932e-06, 0.000949468812905252, 0.000830760458484292]\n",
"10352.jpg, [0.002016589976847172, 0.14071935415267944, 0.10351946204900742, 0.01260767038911581, 0.02766476944088936, 0.013308885507285595, 0.028410449624061584, 0.4056636095046997, 0.004367390181869268, 0.011801925487816334, 0.03721325471997261, 0.03740841522812843, 0.0075413682498037815, 0.002643406391143799, 0.1365506947040558, 0.02856278233230114]\n",
"10087.jpg, [1.2425649401848204e-05, 0.0011317201424390078, 0.0009420408168807626, 0.0003223929088562727, 0.0016625416465103626, 0.002989661181345582, 0.019247908145189285, 0.004186161328107119, 0.00017351502901874483, 9.967113874154165e-05, 0.0008248639642260969, 0.00039974451647140086, 0.0003303753328509629, 0.00023812537256162614, 0.05882115289568901, 0.9086177349090576]\n",
"10230.jpg, [1.5348749856227705e-20, 5.225625011478209e-18, 2.839345842654102e-18, 7.0745853773787285e-22, 9.541874127295221e-18, 2.7521039306287466e-18, 9.705095986567696e-16, 8.80261914249815e-14, 3.488647054741889e-18, 1.3296075316783225e-18, 7.377219557477038e-17, 1.936527809443728e-22, 9.217500151773916e-22, 2.993058049999658e-20, 3.6333414055178537e-09, 1.0]\n",
"10048.jpg, [3.1133266020333394e-05, 5.558280463446863e-06, 2.0558636606438085e-06, 1.1553111107787117e-06, 2.551872057665605e-06, 4.942762097925879e-05, 1.7059026049537351e-06, 9.066429629456252e-05, 0.007634083740413189, 0.001862559700384736, 0.9899153709411621, 0.00034963482175953686, 2.7992895411443897e-05, 1.3754644896835089e-05, 2.948260544144432e-06, 9.366998710902408e-06]\n",
"10077.jpg, [0.0003644978569354862, 0.02445174567401409, 0.0268173199146986, 0.14815036952495575, 0.07324934750795364, 0.07534733414649963, 0.22453418374061584, 0.02273745834827423, 0.006346749607473612, 0.006140034645795822, 0.12124200165271759, 0.10881874710321426, 0.009120793081820011, 0.0034856752026826143, 0.06262478232383728, 0.08656901121139526]\n",
"10007.jpg, [7.899571130565164e-08, 1.227828579430934e-05, 8.768344628151681e-08, 2.4529640541004483e-07, 8.780428117916017e-08, 7.529866707045585e-05, 3.0113790217001224e-07, 1.2599723959283438e-05, 2.0504334941051638e-07, 2.0584528101608157e-05, 0.9998664855957031, 1.0761074008769356e-05, 1.342658180192302e-07, 1.5935702890601533e-07, 6.26017254035105e-07, 1.5471520953269646e-07]\n",
"10015.jpg, [0.0032403150107711554, 0.0288600604981184, 0.0021949985530227423, 0.00032055555493570864, 0.03435240685939789, 0.00026923054247163236, 0.0006741328397765756, 0.3909558057785034, 0.0001465187524445355, 0.12931789457798004, 0.028202980756759644, 0.0002709380933083594, 2.2979071218287572e-05, 0.00011648586223600432, 0.3657223582267761, 0.015332368202507496]\n",
"10059.jpg, [4.9704078264767304e-05, 0.011441941373050213, 0.00988064706325531, 0.0005590837099589407, 0.0038043225649744272, 0.003703582566231489, 0.030242012813687325, 0.0033373399637639523, 0.0011565325548872352, 0.001852830988354981, 0.006263152230530977, 0.9099429249763489, 5.003160549676977e-05, 0.005532979499548674, 0.011499474756419659, 0.0006833692314103246]\n",
"10141.jpg, [0.009165968745946884, 0.05008544772863388, 0.01266980729997158, 0.006539083551615477, 0.08343607932329178, 0.06937951594591141, 0.06997251510620117, 0.34547385573387146, 0.008465387858450413, 0.019665086641907692, 0.22624975442886353, 0.008796795271337032, 0.0172390379011631, 0.01278378814458847, 0.04592789709568024, 0.014150058850646019]\n",
"10294.jpg, [0.0018009612103924155, 0.02445993945002556, 0.008783867582678795, 0.0010851873084902763, 0.023507453501224518, 0.025955891236662865, 0.016016509383916855, 0.014436563476920128, 0.014382454566657543, 0.009536507539451122, 0.016019323840737343, 0.002431788481771946, 0.008049115538597107, 0.16679614782333374, 0.6639260649681091, 0.0028122372459620237]\n",
"10227.jpg, [0.0005911809857934713, 0.06276290863752365, 0.028155475854873657, 0.0029935534112155437, 0.0051326141692698, 0.01292156707495451, 0.005371398292481899, 0.10780508816242218, 0.002294480800628662, 0.030484579503536224, 0.23203317821025848, 0.08406145870685577, 0.000735031149815768, 0.0009355318616144359, 0.3938409686088562, 0.02988089993596077]\n",
"10136.jpg, [0.012743860483169556, 0.12299606949090958, 0.0067620305344462395, 0.0006394183728843927, 0.06727176159620285, 0.012692786753177643, 0.004568834789097309, 0.03208662196993828, 0.10785427689552307, 0.3719356656074524, 0.13334907591342926, 0.017148084938526154, 0.01880061812698841, 0.003456198377534747, 0.011175709776580334, 0.07651911675930023]\n",
"10351.jpg, [5.002025318390224e-06, 7.0254868660413194e-06, 7.999187801033258e-07, 1.0837701438504155e-06, 2.2568558051716536e-05, 7.856325282773469e-06, 1.501744236520608e-06, 0.000730845145881176, 3.3170704227813985e-06, 0.0021327934227883816, 0.9970544576644897, 7.201700100267772e-06, 4.199989334097154e-08, 1.6003497194105876e-06, 2.2617512513534166e-05, 1.3867899042452336e-06]\n",
"10186.jpg, [1.899654978609086e-15, 3.9145214847380316e-11, 8.479692642504588e-12, 9.986288732792906e-14, 2.760477176705356e-12, 2.637373826275269e-12, 1.7663229212594445e-10, 1.8809870327984868e-10, 2.540761654887258e-13, 1.926697299967084e-12, 3.072015558602814e-11, 6.278280033442801e-14, 1.9418121789708468e-16, 2.4975605012410827e-14, 1.6434576082247077e-06, 0.9999983310699463]\n",
"10336.jpg, [1.3199670320318546e-05, 0.00010489251872058958, 2.100847450492438e-05, 4.004499351140112e-05, 5.695884465239942e-05, 0.00047568572335876524, 3.185767127433792e-05, 0.00021199198090471327, 0.00014170209760777652, 0.0003197429177816957, 0.9963706731796265, 0.0018942506285384297, 1.9195042114006355e-05, 6.796503294026479e-05, 0.00018921356240753084, 4.1600789700169116e-05]\n",
"10127.jpg, [6.530682003358379e-05, 0.0005433352198451757, 0.0011793270241469145, 0.0011712831910699606, 0.0008695712895132601, 0.008260768838226795, 0.0018326687859371305, 0.0007604543352499604, 0.005894558038562536, 0.0008431478054262698, 0.945715606212616, 0.02724176086485386, 0.0002039292303379625, 0.0033132960088551044, 0.001711126882582903, 0.00039377613575197756]\n",
"10001.jpg, [0.0027381174731999636, 0.5029097199440002, 0.022561436519026756, 0.00910989660769701, 0.005293773952871561, 0.007079236209392548, 0.004390011075884104, 0.10494288802146912, 0.0030555555131286383, 0.07121900469064713, 0.1289222240447998, 0.029954055324196815, 0.0009370127227157354, 0.002838773187249899, 0.08552909642457962, 0.018519146367907524]\n",
"10129.jpg, [0.0006523057818412781, 0.00013735111861024052, 7.433637074427679e-05, 4.7359564632643014e-05, 0.0004616275546140969, 0.0008694917196407914, 0.00011103509314125404, 0.001453689532354474, 0.005858565215021372, 0.004164485726505518, 0.9831820130348206, 0.0003935312561225146, 0.0001187418238259852, 0.0001975198247237131, 0.0012508925283327699, 0.0010270841885358095]\n",
"10076.jpg, [0.030817149206995964, 0.0033551163505762815, 0.00015815292135812342, 8.67400158313103e-05, 0.008297493681311607, 0.0021624951623380184, 0.00016946857795119286, 0.04139275103807449, 0.06334786117076874, 0.57809978723526, 0.24777518212795258, 0.0007712304941378534, 0.000886624155100435, 0.0006893952377140522, 0.020705565810203552, 0.0012849865015596151]\n",
"10013.jpg, [1.785103631846141e-05, 0.0006282267277128994, 5.509292532224208e-05, 7.688842015340924e-05, 5.5107691878220066e-05, 0.0011304788058623672, 0.00010044296504929662, 0.0008507733582518995, 5.778988270321861e-05, 0.0013202551053836942, 0.9937394857406616, 0.0005525192827917635, 3.428917261771858e-05, 8.067394810495898e-05, 0.0012364562135189772, 6.380246486514807e-05]\n",
"10004.jpg, [1.2250703548488673e-05, 0.9968633651733398, 0.00020249559020157903, 6.01823630859144e-05, 9.225884241459426e-06, 9.573976421961561e-05, 0.000133973837364465, 0.00033870129846036434, 4.543060640571639e-06, 0.0008338919724337757, 0.0006888517527841032, 0.00016546559345442802, 2.5396420824108645e-05, 9.083253189601237e-07, 0.00021197772002778947, 0.00035300187300890684]\n",
"10252.jpg, [2.703834616113454e-05, 4.0641614759806544e-05, 5.755739766755141e-05, 7.657427340745926e-05, 0.00017262458277400583, 0.0006342959241010249, 0.00022750890639144927, 3.6143297620583326e-05, 0.007224060595035553, 4.759254807140678e-05, 0.0004904040251858532, 4.829879617318511e-05, 0.5076502561569214, 0.4830228090286255, 0.0001955411134986207, 4.85984273836948e-05]\n",
"10027.jpg, [2.093270978775763e-07, 0.0012333086924627423, 0.00011650726082734764, 2.2175581761985086e-06, 1.1602668337218347e-06, 0.49685877561569214, 0.00016006217629183084, 0.000921034486964345, 2.294825435456005e-06, 1.6927950127865188e-05, 0.3541717529296875, 0.006495743524283171, 4.866560061600467e-07, 7.743671631033067e-06, 0.1398996114730835, 0.00011216977145522833]\n",
"10104.jpg, [4.180963878752664e-05, 0.0016643863637000322, 0.0005144530441612005, 1.7318816389888525e-05, 8.052388147916645e-05, 0.0004144623235333711, 0.0001515878102509305, 0.006583126727491617, 6.744464826624608e-06, 0.0004225088923703879, 0.016904838383197784, 4.60282972198911e-05, 2.9421591989375884e-06, 5.003967817174271e-05, 0.9712285995483398, 0.0018706123810261488]\n",
"10113.jpg, [1.4389291624547496e-12, 3.4254510339337685e-09, 1.6204096953131852e-09, 1.8497838677467016e-11, 6.407997465984749e-10, 1.4980080509374716e-09, 2.142106048097503e-08, 1.6006373115828865e-08, 1.2305126362299745e-10, 1.9572160558922747e-10, 1.3674841259359027e-09, 4.852615531625304e-11, 3.0857662345479264e-13, 5.358715207681719e-12, 2.056018820439931e-05, 0.9999793767929077]\n",
"10047.jpg, [0.0003826146712526679, 0.00040501542389392853, 1.7747866877471097e-05, 1.383415929012699e-05, 7.732924132142216e-06, 0.0002678916498553008, 1.2869753845734522e-05, 0.00042471004417166114, 5.5134776630438864e-05, 0.003000334370881319, 0.994802713394165, 0.0001339715818176046, 3.0133098334772512e-05, 6.324930291157216e-05, 0.00034267656155861914, 3.934240885428153e-05]\n",
"10394.jpg, [8.428519322478678e-06, 0.0007189973839558661, 9.583450446370989e-05, 7.22407094144728e-06, 0.00010344368638470769, 0.0005778741906397045, 0.0008312552236020565, 0.00014689727686345577, 0.0002507077588234097, 9.641009819461033e-05, 0.0006686623673886061, 5.015869191993261e-06, 0.00013779876462649554, 0.00034772735671140254, 0.4992031157016754, 0.49680066108703613]\n",
"10010.jpg, [4.93110237584915e-05, 0.00010449026012793183, 8.22518122731708e-05, 5.928620521444827e-05, 8.435546624241397e-05, 0.0003794771328102797, 7.516259211115539e-05, 0.000545869639609009, 0.0020611067302525043, 0.006407959386706352, 0.9876348376274109, 0.0016691962955519557, 0.00024730071891099215, 0.0003837412514258176, 0.000203433315618895, 1.2193666407256387e-05]\n",
"10264.jpg, [0.00016230742039624602, 0.47328850626945496, 0.017722811549901962, 0.0016612482722848654, 0.0034047402441501617, 0.001232443260960281, 0.00048279244219884276, 0.09639338403940201, 0.002989652333781123, 0.1455003172159195, 0.1867930144071579, 0.04072830080986023, 0.01578502170741558, 0.00043919088784605265, 0.011289591901004314, 0.0021265640389174223]\n",
"10343.jpg, [3.4376835174043663e-06, 0.0005708526587113738, 0.0011438828660175204, 0.00014659030421171337, 0.00041802911437116563, 0.0021152524277567863, 0.00013458391185849905, 0.00194056227337569, 0.0001223249564645812, 0.0009625931270420551, 0.8553116321563721, 0.1208595484495163, 1.293440982408356e-05, 7.11926695657894e-05, 0.015241855755448341, 0.0009447227348573506]\n",
"10316.jpg, [0.000866521499119699, 0.016415782272815704, 0.0022246232256293297, 0.006846033502370119, 0.07294214516878128, 0.002823470626026392, 0.016426075249910355, 0.009730714373290539, 0.05365626513957977, 0.01029286626726389, 0.10300940275192261, 0.0018208952387794852, 0.23591944575309753, 0.44080567359924316, 0.0068956720642745495, 0.01932441256940365]\n",
"10183.jpg, [2.8364871695885086e-07, 7.526599006268953e-07, 4.204422111797612e-06, 8.768778570811264e-06, 4.916123998555122e-06, 7.556197488156613e-06, 1.3104726349411067e-05, 3.5862578329215467e-07, 0.00035084501723758876, 9.167543169041892e-08, 0.00014701735926792026, 0.00014887993165757507, 0.04677369445562363, 0.9525372385978699, 2.077296585412114e-06, 1.933084377014893e-07]\n",
"10234.jpg, [0.0001343231851933524, 6.331712938845158e-06, 1.0757552217910415e-06, 3.632579591794638e-06, 0.0001750396768329665, 0.00011765871749958023, 1.4165676475386135e-05, 4.6349410695256665e-05, 0.0006469573127105832, 0.0003287337895017117, 0.9983891248703003, 8.604738104622811e-05, 1.556623465148732e-05, 2.6303026970708743e-05, 3.6686267321783816e-06, 4.968122084392235e-06]\n",
"10022.jpg, [2.3723734557279386e-05, 5.472366319736466e-05, 1.3165166819817387e-05, 3.288956213509664e-05, 3.631904110079631e-05, 0.0003591762506403029, 4.172942863078788e-05, 0.00023004482500255108, 0.0004376605502329767, 0.0004228940815664828, 0.9976205229759216, 0.00034999355557374656, 9.224272071151063e-05, 0.00017445179400965571, 9.458675049245358e-05, 1.5830717529752292e-05]\n",
"10096.jpg, [0.00018485960026737303, 0.0025104032829403877, 0.0001159685998572968, 7.58478490752168e-05, 9.269289876101539e-05, 0.0015897535486146808, 0.0001121572422562167, 0.009617559611797333, 0.0003177307662554085, 0.007554300595074892, 0.9744083881378174, 0.0017869503935799003, 0.00026121168048121035, 0.00012010141654172912, 0.0011238643201068044, 0.00012812178465537727]\n",
"10145.jpg, [2.1130879268582703e-09, 9.300292731495574e-05, 6.265395495574921e-05, 5.564310185945942e-07, 4.654157237382606e-06, 0.0001909439597511664, 0.0002920975675806403, 0.00013530311116483063, 5.985643838357646e-06, 5.485493238666095e-05, 0.00020333942666184157, 8.718996832612902e-05, 3.274128914654284e-07, 1.8338241716264747e-06, 0.38143622875213623, 0.6174310445785522]\n",
"10381.jpg, [3.226422268198803e-05, 0.0005080882692709565, 0.00010585742711555213, 0.0002069370384560898, 6.345763540593907e-05, 0.0005897311493754387, 0.0001691485522314906, 0.00029455189360305667, 0.00039997848216444254, 0.0003791485505644232, 0.9916382431983948, 0.0036886173766106367, 0.0005274401046335697, 0.0008145103347487748, 0.0004959283396601677, 8.617786079412326e-05]\n",
"10125.jpg, [0.00013930261775385588, 0.2515762448310852, 0.0030223182402551174, 0.01973848231136799, 0.0016189597081393003, 0.0007732564117759466, 0.0012871228391304612, 0.032207194715738297, 0.0003789589391089976, 0.41519901156425476, 0.23470133543014526, 0.0020905949641019106, 0.0005517948884516954, 0.0004974020412191749, 0.02193273790180683, 0.0142851946875453]\n",
"10386.jpg, [2.044540633505676e-05, 0.0010500203352421522, 2.2857273506815545e-05, 7.462880603270605e-05, 3.5294200642965734e-05, 3.9440608816221356e-05, 1.2563897143991198e-05, 0.0006167640094645321, 4.797516521648504e-05, 0.04247310012578964, 0.954916775226593, 0.0005743228830397129, 2.7504629542818293e-06, 7.69861726439558e-05, 2.6638894269126467e-05, 9.437982953386381e-06]\n",
"10154.jpg, [8.700591570232064e-05, 0.020561009645462036, 0.0005944213480688632, 0.0006931889220140874, 0.00020638154819607735, 0.0012879130663350224, 0.00016570748994126916, 0.11037842929363251, 0.0001084769974113442, 0.04400576651096344, 0.8092337846755981, 0.00041764293564483523, 0.00017021549865603447, 0.00011924177670152858, 0.011556516401469707, 0.00041440484346821904]\n",
"10361.jpg, [5.470879305136123e-09, 2.8536535410239594e-06, 4.069121587235713e-06, 1.770481219409703e-07, 2.31866924877977e-05, 2.6014933609985746e-05, 0.004129077773541212, 6.033711088093696e-06, 3.2821117201820016e-06, 2.9420280611702765e-07, 4.418052981236542e-07, 1.637375021346088e-06, 8.589177724616093e-08, 6.3076577134779654e-06, 0.007103512529283762, 0.9886929392814636]\n",
"10287.jpg, [0.0013669065665453672, 0.023253660649061203, 0.026880845427513123, 0.007922778837382793, 0.019411031156778336, 0.5289040207862854, 0.10183040797710419, 0.028697403147816658, 0.008858729153871536, 0.0008344368543475866, 0.04595407471060753, 0.009633292444050312, 0.12508611381053925, 0.02283370867371559, 0.041948750615119934, 0.006583916489034891]\n",
"10023.jpg, [0.0013019134057685733, 0.1463937610387802, 0.001185258268378675, 0.00017512444173917174, 0.0030424988362938166, 0.00837793480604887, 0.002558588283136487, 0.1325272172689438, 0.015313920564949512, 0.5408850908279419, 0.06884763389825821, 0.0008326125680468976, 4.102943421457894e-05, 0.004570946097373962, 0.06823746114969254, 0.00570900971069932]\n",
"10118.jpg, [2.118509291904047e-05, 3.892234599334188e-05, 1.5050300135044381e-05, 1.550307024444919e-05, 5.5463537137256935e-05, 0.00016506608517374843, 3.128053140244447e-05, 0.0001953080209204927, 0.00016479617625009269, 0.0002978338743560016, 0.9985419511795044, 0.00025067332899197936, 3.843659214908257e-06, 5.2604755182983354e-05, 0.00013882220082450658, 1.1761293535528239e-05]\n",
"10245.jpg, [0.0009727155556902289, 0.04998866096138954, 0.013433277606964111, 0.0029487828724086285, 0.012386366724967957, 0.0005897539667785168, 0.00320812058635056, 0.019893832504749298, 0.00437384657561779, 0.00912451557815075, 0.04245847836136818, 0.8225048184394836, 4.300007640267722e-05, 0.0005444894195534289, 0.015267984941601753, 0.0022612982429564]\n",
"10332.jpg, [1.3157755063275545e-07, 7.974974323587958e-06, 2.8180233258012777e-08, 5.479209335135238e-07, 3.4149766179325525e-06, 1.048591116159514e-06, 3.252253577556985e-08, 3.964733878092375e-06, 9.540927749185357e-06, 0.012322158552706242, 0.9876170754432678, 2.5906476366799325e-05, 3.5552272237282523e-09, 6.797148671466857e-06, 4.3874368316210166e-07, 8.866916232364019e-07]\n",
"10189.jpg, [8.892385267245118e-06, 0.0018486377084627748, 0.0010001802584156394, 0.00011882706894539297, 4.556999556371011e-05, 0.1385233849287033, 0.003396841464564204, 0.001177480211481452, 0.000151173779158853, 1.2157069249951746e-05, 0.011051422916352749, 8.029973832890391e-05, 0.0028609861619770527, 0.007637915667146444, 0.8317030072212219, 0.00038317704456858337]\n",
"10085.jpg, [1.584913547958422e-06, 1.0582926734059583e-05, 1.7437043879908742e-06, 2.2711803921993123e-06, 7.337014721997548e-06, 0.0025467341765761375, 3.6623250707634725e-06, 0.00011129820632049814, 0.0001502221275586635, 0.00011876093776663765, 0.9967417120933533, 0.00010147927241632715, 7.74902873672545e-05, 3.918303991667926e-05, 8.206224447349086e-05, 3.85792191082146e-06]\n",
"10366.jpg, [1.5478265370794109e-12, 4.905545303302006e-09, 2.2826297796374462e-10, 1.2887538258787856e-12, 1.6264845026370267e-10, 5.397723379374497e-10, 1.9470713930047623e-09, 1.7298877708071814e-07, 1.2119741321647837e-10, 2.8125239914800204e-09, 5.766724342493035e-08, 1.0458886361142117e-12, 2.2502313868581064e-13, 5.63471977410579e-11, 0.0003842430596705526, 0.9996154308319092]\n",
"10175.jpg, [0.0004483280936256051, 0.0008083119755610824, 0.0002859096275642514, 8.743433136260137e-05, 0.00047439130139537156, 0.04145652800798416, 0.0006637874175794423, 0.005725516006350517, 0.0021981389727443457, 0.0010314300889149308, 0.9298980236053467, 0.002430432243272662, 7.19176750862971e-05, 0.0002780473150778562, 0.013893664814531803, 0.00024811990442685783]\n",
"10079.jpg, [9.941408014456279e-15, 1.053570425346706e-11, 9.681313910270273e-13, 1.5374363735505981e-15, 9.377116054129497e-13, 5.393809314122033e-13, 4.006798712263837e-11, 4.427806565843184e-09, 2.8675496764433983e-13, 1.8350401493477664e-12, 7.826488068740645e-11, 1.7949921995145555e-15, 5.541779231860264e-16, 9.87458473821444e-15, 2.2627052658208413e-06, 0.9999977350234985]\n",
"10207.jpg, [0.5767257213592529, 0.012871929444372654, 9.673924796516076e-05, 0.00010513181041460484, 0.013031533919274807, 0.0015462684677913785, 0.00013639466487802565, 0.004866274073719978, 0.02430129423737526, 0.19062355160713196, 0.10350057482719421, 0.0003305183199699968, 4.547013759292895e-06, 0.0022492539137601852, 0.06853236258029938, 0.001077885739505291]\n",
"10061.jpg, [2.3207199774333276e-05, 0.000755063898395747, 8.809185965219513e-05, 7.512595038861036e-05, 5.1462480769259855e-05, 0.0034883683547377586, 0.00012758641969412565, 0.0005565377068705857, 0.00031521826167590916, 0.00043290856410749257, 0.9888918995857239, 0.002916884608566761, 7.553047908004373e-05, 0.00035125622525811195, 0.0017403906676918268, 0.00011040578101528808]\n",
"10221.jpg, [0.026239974424242973, 0.5219356417655945, 0.008356872946023941, 0.0012039607390761375, 0.01298408955335617, 0.002846291521564126, 0.002588798990473151, 0.1779753416776657, 0.0006880820728838444, 0.17258328199386597, 0.018426012247800827, 0.0070276628248393536, 0.004754816647619009, 0.0003617266775108874, 0.03671636804938316, 0.005310948938131332]\n",
"10173.jpg, [4.597003226081142e-06, 0.0008293899009004235, 0.0019829492084681988, 0.0008146176114678383, 7.71750055719167e-05, 0.00834614410996437, 0.0004037338658235967, 0.0006985099171288311, 0.0001503207313362509, 0.00026539855753071606, 0.1274547427892685, 0.0004329823423177004, 0.0006127020460553467, 0.00371816148981452, 0.8536615967750549, 0.0005469819880090654]\n",
"10312.jpg, [0.0077765630558133125, 0.025545863434672356, 0.007216276600956917, 0.010424897074699402, 0.0119712445884943, 0.1978495866060257, 0.013543698936700821, 0.02610148675739765, 0.02365339919924736, 0.008000746369361877, 0.42891719937324524, 0.15969087183475494, 0.04953968897461891, 0.02487916871905327, 0.0034125251695513725, 0.0014768521068617702]\n",
"10176.jpg, [5.51916355107096e-06, 2.533789120207075e-05, 0.00012627523392438889, 1.5401850760099478e-05, 0.00037782973959110677, 0.00035640300484374166, 0.00023104231513570994, 2.79932046396425e-05, 0.002741672098636627, 2.8838154321420006e-05, 0.0007824767963029444, 0.00024477526312693954, 0.6570984125137329, 0.3377908766269684, 0.00014401006046682596, 3.1754552765050903e-06]\n",
"10126.jpg, [0.00016388072981499135, 0.013270805589854717, 0.025777781382203102, 0.05126370117068291, 0.04911147058010101, 0.028212621808052063, 0.3523378372192383, 0.010543112643063068, 0.0011150984792038798, 0.00030164606869220734, 0.012784527614712715, 0.41565102338790894, 0.0001272453519050032, 0.002293567406013608, 0.027977515012025833, 0.009068104438483715]\n",
"10167.jpg, [9.139266232693899e-09, 2.0365671389299678e-06, 2.6841453859560716e-07, 3.4132664694652703e-08, 6.1611949604412075e-06, 5.619041075988207e-06, 1.6536829207325354e-05, 1.1618627695497707e-06, 1.6019735085137654e-06, 4.303603418520652e-06, 9.65527433436364e-05, 3.447221388341859e-07, 9.078666707296179e-09, 6.357875292906101e-08, 0.0020189688075333834, 0.9978463649749756]\n",
"10117.jpg, [0.005369726102799177, 0.013005425222218037, 0.0003352126805111766, 3.2830786949489266e-05, 0.0014755668817088008, 0.005669860634952784, 0.0002074961521429941, 0.30143073201179504, 0.00035907758865505457, 0.027496758848428726, 0.4515710473060608, 0.0006494983681477606, 2.936342752946075e-06, 7.197637023637071e-05, 0.15746022760868073, 0.03486163541674614]\n",
"10282.jpg, [0.0002306406677234918, 0.018608741462230682, 0.0003555947623681277, 0.00011081349657615647, 0.0021579868625849485, 0.0008349153213202953, 0.0002843266411218792, 0.0029669504147022963, 0.0342293381690979, 0.8291582465171814, 0.09390408545732498, 0.0014679018640890718, 0.008013089187443256, 0.004809547681361437, 0.0012504223268479109, 0.001617390662431717]\n",
"10214.jpg, [8.226291538449004e-05, 0.0023045376874506474, 0.00030402105767279863, 0.00030985489138402045, 0.000949404202401638, 0.006756152492016554, 0.0004719794378615916, 0.00037257702206261456, 0.015824902802705765, 0.0018564204219728708, 0.00859471969306469, 0.0002352431183680892, 0.20172260701656342, 0.7556692361831665, 0.003625825047492981, 0.0009202006040140986]\n",
"10280.jpg, [3.839080818579532e-06, 0.00011092209024354815, 0.00026538423844613135, 1.761321800586302e-05, 0.0003446084156166762, 0.0003051514213439077, 0.0048922947607934475, 0.00165621901396662, 7.383635966107249e-05, 1.8455129975336604e-05, 0.0003761693660635501, 0.00014375345199368894, 1.1477720363473054e-05, 2.3202575903269462e-05, 0.011061158962547779, 0.980695903301239]\n",
"10062.jpg, [0.0032643566373735666, 0.002058225916698575, 0.0030060347635298967, 0.0008612240781076252, 0.019359063357114792, 0.045058660209178925, 0.0032336742151528597, 0.08693987876176834, 0.1799137145280838, 0.023998556658625603, 0.5599909424781799, 0.0038836223538964987, 0.002627618843689561, 0.0061825113371014595, 0.05137940123677254, 0.008242609910666943]\n",
"10160.jpg, [0.00889623910188675, 0.0753079503774643, 0.14570188522338867, 0.0214473158121109, 0.13920117914676666, 0.04952811077237129, 0.08749538660049438, 0.07382135838270187, 0.027870209887623787, 0.018860429525375366, 0.10895142704248428, 0.133144348859787, 0.016766779124736786, 0.06055077910423279, 0.014781924895942211, 0.017674630507826805]\n",
"10236.jpg, [1.1792743862315547e-05, 0.0002495796652510762, 3.5775858123088256e-05, 3.365167140145786e-05, 3.812288196058944e-05, 0.0005557794356718659, 5.0312126404605806e-05, 0.0004199664981570095, 2.6234652978018858e-05, 0.0023971886839717627, 0.9938109517097473, 0.002233328530564904, 7.06917853676714e-05, 1.0553923857514746e-05, 3.7380312278401107e-05, 1.8592725609778427e-05]\n",
"10120.jpg, [0.0066380626522004604, 0.001172131858766079, 0.00011798242485383525, 0.00012496570707298815, 0.00016283561126329005, 0.006690907292068005, 0.00021607070812024176, 0.0014034409541636705, 0.0007249744958244264, 0.004902949556708336, 0.9730252027511597, 0.0009508621878921986, 0.00031809290521778166, 0.0007333849207498133, 0.0025408912915736437, 0.0002772004227153957]\n",
"10174.jpg, [1.3780714652966708e-05, 5.8091118262382224e-05, 3.860930974042276e-06, 1.555132712383056e-06, 2.906929330492858e-05, 3.42529165209271e-05, 2.992483814523439e-06, 0.002677074633538723, 8.40075226733461e-05, 0.011598491109907627, 0.9854031801223755, 2.7466116080177017e-05, 4.406670939260948e-07, 7.325061687879497e-06, 5.2608029363909736e-05, 5.797574431198882e-06]\n",
"10008.jpg, [5.888802115805447e-05, 7.568578439531848e-05, 4.364436972537078e-05, 0.00011239431478315964, 4.95739295729436e-05, 0.0016566894482821226, 7.795036799507216e-05, 0.00019493364379741251, 0.0015035920077934861, 0.0004186955629847944, 0.993320107460022, 0.0006274934858083725, 0.0002454044297337532, 0.001135044964030385, 0.000372051727026701, 0.00010793764522532001]\n",
"10384.jpg, [1.6124997273436747e-05, 0.0035701238084584475, 0.005474956706166267, 0.0028008371591567993, 0.010586713440716267, 0.04819971323013306, 0.11525597423315048, 0.010810592211782932, 0.0005204853950999677, 0.003486133646219969, 0.44851982593536377, 0.3084457516670227, 0.0001383403578074649, 0.000950573303271085, 0.03861961513757706, 0.0026042056269943714]\n",
"10054.jpg, [0.0011063436977565289, 0.006097661796957254, 0.0015767245786264539, 0.0009414374362677336, 0.0019877280574291945, 0.1509505957365036, 0.0029820173513144255, 0.010890978388488293, 0.00284176180139184, 0.0009859849233180285, 0.06888839602470398, 0.001119781518355012, 0.03707796707749367, 0.006440005265176296, 0.7055715918540955, 0.0005409740260802209]\n",
"10184.jpg, [4.336402525950689e-08, 4.126111161895096e-05, 9.941462849383242e-06, 1.7021150711116206e-07, 2.456445145071484e-06, 3.58423640136607e-05, 9.900618351821322e-06, 0.0006038054125383496, 1.542645236440876e-06, 1.859844269347377e-05, 0.0009024296305142343, 5.110128427077143e-07, 4.05032167805075e-08, 5.760026056123024e-07, 0.7305200099945068, 0.26785287261009216]\n",
"10226.jpg, [0.0006050397059880197, 0.0023481829557567835, 0.00017000619845930487, 0.0034681493416428566, 0.0010746557964012027, 0.02549276128411293, 0.0011775345774367452, 0.0012812252389267087, 0.0017054866766557097, 0.0008361748186871409, 0.9562750458717346, 0.0033012395724654198, 0.00015380106924567372, 0.0011645684717223048, 0.0008383906097151339, 0.00010776735143736005]\n",
"10302.jpg, [1.0311171081411885e-06, 0.00011149768397444859, 0.0022723879665136337, 1.1552706382644828e-05, 0.0007078676135279238, 0.0013697484973818064, 0.0003418856067582965, 0.0002928659087046981, 8.960709237726405e-06, 2.525324271118734e-05, 0.00022339544375427067, 0.8137299418449402, 3.355811273308973e-08, 7.1739364102541e-06, 0.1808464676141739, 5.000142482458614e-05]\n",
"10363.jpg, [0.0009523102198727429, 0.001107946620322764, 0.0007476542959921062, 0.00098155636806041, 0.0013525269459933043, 0.028987620025873184, 0.0019274252699688077, 0.007038922049105167, 0.004105864558368921, 0.0025398375000804663, 0.93068528175354, 0.0016792905516922474, 0.011318485252559185, 0.0008223343174904585, 0.003514002775773406, 0.0022389302030205727]\n",
"10056.jpg, [0.0007707754266448319, 0.00022346642799675465, 1.635187436477281e-05, 1.5397421520901844e-05, 3.691937308758497e-05, 9.044838225236163e-05, 3.751428812392987e-05, 0.002942192368209362, 9.560711987433024e-06, 0.002041302155703306, 0.9932041168212891, 0.00022709245968144387, 1.2869584224972641e-06, 2.88530281977728e-06, 5.3995292546460405e-05, 0.00032682548044249415]\n",
"10203.jpg, [1.146257454820443e-05, 0.0009559733443893492, 0.00019043720385525376, 0.0005270781693980098, 0.00012245337711647153, 0.012927031144499779, 0.0001861456985352561, 0.0007343121105805039, 0.0005108005716465414, 0.00036097364500164986, 0.9619876146316528, 0.01997591368854046, 0.00016846430662553757, 0.00028339712298475206, 0.0010438900208100677, 1.394179435010301e-05]\n",
"10223.jpg, [0.002475389279425144, 0.01974891684949398, 0.00649876007810235, 0.004846375901252031, 0.02154904417693615, 0.0024947768542915583, 0.015124908648431301, 0.1453721523284912, 0.004212534055113792, 0.02250445820391178, 0.03982570022344589, 0.010226413607597351, 9.399111877428368e-05, 0.0011928713647648692, 0.2799997925758362, 0.4238338768482208]\n",
"10377.jpg, [3.1447350465896307e-06, 0.0007816455326974392, 0.000645093503408134, 0.00022142693342175335, 5.7333709264639765e-05, 0.030467912554740906, 0.0006287266151048243, 0.0009096600115299225, 0.00010301276779500768, 7.757753337500617e-05, 0.040243685245513916, 0.0001436623715562746, 0.001279685297049582, 0.0014005863340571523, 0.9226260185241699, 0.0004107426502741873]\n",
"10119.jpg, [4.447725586942397e-06, 0.0003830821078736335, 1.378939577989513e-05, 6.840234709670767e-05, 1.0865451258723624e-05, 0.0001779339072527364, 2.4077291527646594e-05, 0.00019991800945717841, 0.0001667539618210867, 0.0028913121204823256, 0.9953461289405823, 0.000498601293656975, 4.150648237555288e-05, 0.0001312211388722062, 3.9456808735849336e-05, 2.4202815893659135e-06]\n",
"10163.jpg, [0.00011488082964206114, 0.0009697185014374554, 2.8277316232561134e-05, 3.585355443647131e-05, 0.0001222313439939171, 0.0023739079479128122, 6.306234718067572e-05, 0.0015871685463935137, 0.0016537579940631986, 0.01984327845275402, 0.9674223065376282, 0.0003578574687708169, 3.678699067677371e-05, 3.348082100274041e-05, 0.0020166714675724506, 0.003340789582580328]\n",
"10073.jpg, [0.9884530901908875, 2.9532937332987785e-05, 6.586616564163705e-06, 2.906125018853345e-06, 3.159830885124393e-05, 0.0001831393747124821, 1.892635009426158e-05, 0.006002450827509165, 0.0035196749959141016, 3.953319537686184e-05, 0.001493335934355855, 3.053746695513837e-05, 7.979183465067763e-06, 4.177809842076385e-06, 0.00015467793855350465, 2.174277324229479e-05]\n",
"10161.jpg, [1.6783586431756703e-07, 4.0582749534223694e-06, 1.68046335602412e-05, 2.253295861009974e-05, 1.3154957741789985e-05, 1.1558458936633542e-05, 7.067253318382427e-05, 5.443698682938702e-07, 0.009842091239988804, 1.3716659168494516e-06, 0.00016319569840561599, 0.00023397344921249896, 0.35315802693367004, 0.6364336013793945, 2.500735172361601e-05, 3.1947197385306936e-06]\n",
"10218.jpg, [0.0004178789386060089, 0.03679988905787468, 0.001279712887480855, 0.0010683971922844648, 0.0005478765815496445, 0.03640294820070267, 0.0016852322733029723, 0.029369020834565163, 0.00023511756444349885, 0.17697730660438538, 0.5373448133468628, 0.001305371173657477, 0.0006906931521371007, 0.007295260671526194, 0.1614920049905777, 0.007088434416800737]\n",
"10139.jpg, [4.868746031339022e-11, 4.717661283848429e-08, 3.469899638730567e-07, 7.552452552239686e-10, 5.547112991166614e-08, 4.2255280163772113e-07, 3.1340084660769207e-06, 9.737502296047751e-06, 7.901149956524023e-09, 1.0234139580234114e-07, 1.9166161564498907e-06, 2.0968732883375196e-07, 3.8253580858516045e-11, 1.1705171054643415e-08, 0.2237502634525299, 0.7762337923049927]\n",
"10165.jpg, [0.0012155747972428799, 0.02820424921810627, 0.01631278544664383, 0.004329435992985964, 0.015489335171878338, 0.063621386885643, 0.10842784494161606, 0.5832122564315796, 0.003119507571682334, 0.0077187973074615, 0.07692669332027435, 0.005518998950719833, 0.010629001073539257, 0.009373774752020836, 0.042794544249773026, 0.023105917498469353]\n",
"10321.jpg, [6.877223768242402e-06, 0.00023871248413342983, 6.12458388786763e-05, 5.03339251736179e-05, 1.5408342733280733e-05, 0.0017478683730587363, 0.0001223563012899831, 0.00020625005709007382, 0.00010028461110778153, 0.0002591021766420454, 0.9945800304412842, 0.00026211136719211936, 0.00016925505769904703, 0.00010210488107986748, 0.001881202100776136, 0.0001968297437997535]\n",
"10368.jpg, [1.444157793351053e-09, 1.92505993368286e-07, 4.637737731627567e-07, 2.116162800547272e-09, 1.0883665169103551e-07, 7.107167334652331e-07, 6.097252480685711e-06, 1.6796520867501386e-05, 5.897127408616143e-08, 3.308146290237346e-08, 6.875576673337491e-06, 2.160928680439156e-08, 1.2897172219084041e-08, 6.142109043594246e-08, 0.04068143665790558, 0.9592871069908142]\n",
"10102.jpg, [2.529335915824049e-06, 6.784737342968583e-05, 7.388036465272307e-06, 7.603520771226613e-06, 1.2529967534646858e-05, 7.78243484091945e-05, 1.655424784985371e-05, 0.00010839362948900089, 2.3139602490118705e-05, 0.0022301971912384033, 0.9962053894996643, 5.5781205446692184e-05, 9.296772986999713e-07, 0.00046653987374156713, 0.0007030706037767231, 1.4261648175306618e-05]\n",
"10093.jpg, [1.7796912743506255e-06, 0.0012960686581209302, 0.0016703952569514513, 5.4456399084301665e-05, 4.548227661871351e-05, 0.00038984985440038145, 7.963774260133505e-05, 0.0014404857065528631, 0.0001908472622744739, 0.0017536694649606943, 0.005740403663367033, 0.9791856408119202, 7.681977876927704e-05, 0.0006974730058573186, 0.007369545754045248, 7.458900654455647e-06]\n",
"10009.jpg, [1.2553593933262164e-06, 0.0004220835689920932, 8.754272857913747e-05, 1.0339178516005632e-05, 1.6234876966336742e-05, 0.0009650018182583153, 0.00020527673768810928, 0.0011049845488741994, 3.5759539969149046e-06, 0.0003142592904623598, 0.9718862771987915, 0.0011070624459534883, 1.2461473488656338e-06, 1.0699167432903778e-05, 0.019834034144878387, 0.004030080046504736]\n",
"10058.jpg, [2.3533633793704212e-05, 9.762274567037821e-05, 0.00021959359582979232, 2.5463572455919348e-05, 3.0593026167480275e-05, 0.0025320195127278566, 0.0006467008497565985, 0.0023421975784003735, 7.061002543196082e-05, 0.00012848347250837833, 0.910842776298523, 0.07940474897623062, 1.2351032410151674e-06, 8.90670926310122e-05, 0.003440441098064184, 0.00010492456931388006]\n",
"10253.jpg, [1.6987061826512218e-05, 3.6169767554383725e-05, 1.0753897186077666e-05, 6.7621049311128445e-06, 4.365992936072871e-06, 9.077361755771562e-05, 1.4397414815903176e-05, 0.00012197044998174533, 9.990403486881405e-05, 0.0003536313015501946, 0.998873770236969, 0.00022791644732933491, 0.00010166200809180737, 1.703287307464052e-05, 1.901198447740171e-05, 4.7503858695563395e-06]\n",
"10219.jpg, [0.00016073753067757934, 0.012983039021492004, 0.0011920292163267732, 0.0010370055679231882, 0.0003845236205961555, 0.009263935498893261, 0.0013255377998575568, 0.0016609503654763103, 0.0005734304431825876, 0.0019220688845962286, 0.9494528770446777, 0.009990529157221317, 0.00016661953122820705, 0.0002621698076836765, 0.005502638407051563, 0.004122044891119003]\n",
"10369.jpg, [4.969159019132348e-16, 3.899688731656692e-13, 5.18930818454999e-13, 5.177209369219317e-16, 9.932856539227389e-14, 5.258455888605684e-13, 3.01088112775183e-11, 1.9567434339506917e-09, 1.3617885912344842e-14, 4.68940783828007e-14, 2.017073140458958e-11, 4.588101130777503e-16, 2.910950484225571e-16, 2.622712795571458e-15, 1.3332555681699887e-06, 0.9999986886978149]\n",
"10029.jpg, [0.0009353131172247231, 0.4567667543888092, 0.02592535875737667, 0.2523742616176605, 0.10219879448413849, 0.0014440991217270494, 0.0140247056260705, 0.01760115660727024, 0.00043611094588413835, 0.0049012587405741215, 0.07356606423854828, 0.037477537989616394, 0.00025227590231224895, 0.000458774680737406, 0.004112400580197573, 0.007525131572037935]\n",
"10395.jpg, [0.9994350075721741, 2.0373940060380846e-05, 1.8222857534055947e-07, 6.870160884631105e-09, 2.7044277430832153e-06, 1.9281187633168884e-06, 2.6134770791941264e-07, 4.1998100641649216e-05, 1.2438890735211316e-06, 0.0003953605191782117, 3.7070367397973314e-05, 1.094035837923002e-06, 1.4042393914337481e-08, 7.465718709909197e-08, 6.150655099190772e-05, 1.1710502576534054e-06]\n",
"10265.jpg, [2.043115898686665e-08, 1.468299188900346e-07, 5.015257897866832e-07, 6.027626113791484e-07, 1.8521325273468392e-07, 2.6927650651487056e-06, 1.0786453685796005e-06, 3.186273289657038e-08, 0.0001826835796236992, 1.1143077927044942e-07, 8.70062576723285e-05, 5.208618858887348e-06, 0.020081287249922752, 0.9796363115310669, 2.0402421796461567e-06, 9.736625372624985e-08]\n",
"10092.jpg, [1.254388098459458e-05, 0.0006079184822738171, 0.00011016703501809388, 3.992826805188088e-06, 1.7115735317929648e-05, 0.00032214337261393666, 0.00013859324099030346, 0.010379728861153126, 5.0488291890360415e-05, 0.0016484124353155494, 0.024006789550185204, 2.744144876487553e-05, 4.158048795943614e-06, 1.7763193682185374e-05, 0.19885697960853577, 0.763795793056488]\n",
"10019.jpg, [4.0688948502065614e-05, 0.00010168847802560776, 4.919779712508898e-06, 1.2793227142537944e-05, 3.1377414870803477e-06, 0.0006094414857216179, 5.790153863927117e-06, 0.00019586617418099195, 9.234312528860755e-06, 6.310929165920243e-05, 0.998583197593689, 9.422664879821241e-05, 1.770627477526432e-06, 9.298852091887966e-06, 0.00023225590121001005, 3.248426946811378e-05]\n",
"10255.jpg, [0.9976490139961243, 4.270090812497074e-06, 7.132907171580882e-07, 6.136578463156184e-07, 9.270558621210512e-06, 3.077437213505618e-05, 3.5402390494709834e-06, 0.0009058431605808437, 0.0003945516364183277, 6.441612640628591e-05, 0.000896244018804282, 4.48773516836809e-06, 9.329070280728047e-07, 1.6033974361562287e-06, 2.549141936469823e-05, 8.057788363657892e-06]\n",
"10037.jpg, [0.009334099479019642, 0.19816836714744568, 0.0022002256009727716, 0.0002400255179964006, 0.0038929623551666737, 0.0008099130354821682, 0.0003856632101815194, 0.12729282677173615, 0.012591256760060787, 0.4510643184185028, 0.1551603078842163, 0.003134573809802532, 0.0002322419168194756, 0.00018409940821584314, 0.01826215349137783, 0.017046919092535973]\n",
"10339.jpg, [0.0026877224445343018, 0.16605620086193085, 0.03304905816912651, 0.005897960625588894, 0.19229523837566376, 0.020698102191090584, 0.055761080235242844, 0.32944703102111816, 0.0057463813573122025, 0.017212843522429466, 0.08819117397069931, 0.006576557643711567, 0.011164836585521698, 0.015569331124424934, 0.033872418105602264, 0.015774164348840714]\n",
"10389.jpg, [3.2037437449616846e-06, 0.0007873764843679965, 0.0004011306446045637, 1.5250020624080207e-05, 0.0009206821559928358, 0.0014891648897901177, 0.0015781017718836665, 0.00023034280457068235, 0.0002565717149991542, 0.00024394357751589268, 0.00027030124329030514, 7.925082900328562e-05, 8.935607183957472e-05, 2.7989761292701587e-05, 0.04416860267519951, 0.9494386911392212]\n",
"10170.jpg, [9.349452767537514e-08, 1.5575207612528175e-07, 5.444225052997353e-07, 6.472003377666624e-08, 3.734665142474114e-06, 2.579085048637353e-05, 8.122948997879575e-07, 6.052749085938558e-05, 8.684917247592239e-07, 1.258012889593374e-05, 0.9997739195823669, 1.0573324288998265e-05, 9.470614514839326e-09, 3.55482541181118e-07, 0.00010676866804715246, 3.238016233808594e-06]\n",
"10164.jpg, [0.001990415621548891, 0.013517196290194988, 0.012698830105364323, 0.25997039675712585, 0.5032905340194702, 0.0397786982357502, 0.07516337186098099, 0.022390300408005714, 0.009280560538172722, 0.0019866637885570526, 0.0255331601947546, 0.022633899003267288, 0.0016968253767117858, 0.004029085859656334, 0.0017864387482404709, 0.004253501538187265]\n",
"10116.jpg, [0.0007803646731190383, 0.34787002205848694, 0.011192727833986282, 0.0033785600680857897, 0.010182847268879414, 0.004065539222210646, 0.013940884731709957, 0.006131972186267376, 0.0026692224200814962, 0.03027540072798729, 0.01747868023812771, 0.00861517246812582, 0.004017725586891174, 0.007224285509437323, 0.03203781694173813, 0.5001388192176819]\n",
"10202.jpg, [2.4242037852673093e-06, 1.9560757209546864e-05, 3.1145536922849715e-06, 5.58037345399498e-06, 5.405497176980134e-06, 0.0005438131629489362, 8.243051524914335e-06, 4.171870750724338e-05, 6.425358151318505e-05, 4.783858094015159e-05, 0.9979037046432495, 5.312806024448946e-05, 1.0880890840780921e-05, 9.532201511319727e-05, 0.0011360483476892114, 5.899079405935481e-05]\n",
"10309.jpg, [4.654773874790408e-05, 0.0004048944392707199, 0.00012017769768135622, 0.00017991771164815873, 0.00011306328815408051, 0.001276034046895802, 0.00039241655031219125, 0.00038686711923219264, 0.0011933159548789263, 0.0003597448521759361, 0.9861415028572083, 0.0009280455415137112, 0.004134484566748142, 0.0030022142454981804, 0.0009154144208878279, 0.0004054595483466983]\n",
"10391.jpg, [0.000540565000846982, 0.001567790168337524, 0.005514351651072502, 0.007875142619013786, 0.18126101791858673, 0.16433919966220856, 0.4488925039768219, 0.007667723577469587, 0.0115865683183074, 0.000372345675714314, 0.018090685829520226, 0.10681840777397156, 0.0035128069575875998, 0.004859611392021179, 0.025567838922142982, 0.011533387936651707]\n",
"10078.jpg, [2.0510431131270934e-09, 1.797664367586549e-06, 1.0606639989418909e-05, 4.940332232195033e-08, 7.173147196226637e-07, 7.410689249809366e-06, 7.164366252254695e-06, 0.0002012818440562114, 8.563279862983109e-08, 8.837990208121482e-06, 0.0017591032665222883, 2.8817679776693694e-05, 1.3566628931371838e-09, 2.3185110364920547e-07, 0.9759501814842224, 0.022023726254701614]\n",
"10237.jpg, [7.07471335772425e-05, 0.015222528018057346, 0.01181960292160511, 0.005763694178313017, 0.005671087186783552, 0.005945301614701748, 0.016986610367894173, 0.046226151287555695, 8.000570232979953e-05, 0.0033282979857176542, 0.13550962507724762, 0.05313640460371971, 6.729494634782895e-05, 0.0008391444571316242, 0.6414346098899841, 0.0578988641500473]\n",
"10204.jpg, [2.609656803542748e-06, 0.00016960434732027352, 0.00039698227192275226, 2.095825766446069e-05, 1.3291460163600277e-05, 0.005179240368306637, 0.0007571852183900774, 0.0003775365767069161, 3.228936475352384e-05, 1.6638077795505524e-05, 0.02225513570010662, 2.680780744412914e-05, 0.0008040383108891547, 0.0014567745383828878, 0.9682289958000183, 0.00026189940399490297]\n",
"10303.jpg, [3.352572093717754e-06, 0.00270079355686903, 2.936762393801473e-05, 3.307535371277481e-05, 0.0011689235689118505, 7.281691068783402e-05, 1.3113489330862649e-05, 0.033824604004621506, 0.0013854707358404994, 0.8272922039031982, 0.13278301060199738, 4.460339914658107e-05, 5.261053502181312e-06, 3.900304000126198e-05, 0.0005437033833004534, 6.088039299356751e-05]\n",
"10005.jpg, [8.42644993781505e-08, 6.578899956366513e-06, 3.7984721075190464e-06, 1.404222302880953e-06, 6.25863367531565e-06, 1.975963095901534e-05, 1.5247043847921304e-05, 5.255879500509764e-07, 0.0002231095131719485, 4.586278009810485e-06, 6.651212606811896e-05, 1.667298420215957e-05, 0.0011911168694496155, 0.9983282685279846, 0.00011360087228240445, 2.3876400518929586e-06]\n",
"10367.jpg, [0.018498066812753677, 0.03274774178862572, 0.003294856520369649, 0.003062034025788307, 0.005471142940223217, 0.0020893269684165716, 0.0009636316681280732, 0.025899365544319153, 0.047692157328128815, 0.012577896937727928, 0.2737575173377991, 0.5622816681861877, 4.8947193135973066e-05, 0.00032455287873744965, 0.010119395330548286, 0.0011716961162164807]\n",
"10081.jpg, [0.0002648504450917244, 0.1451336294412613, 0.0009655309841036797, 0.0006926925852894783, 0.003230591071769595, 0.0013755068648606539, 0.0005919093964621425, 0.04398081824183464, 0.0007996093481779099, 0.5576682686805725, 0.24041831493377686, 0.0024263053201138973, 0.00015286689449567348, 0.0002189871738664806, 0.0013749554054811597, 0.0007052195724099874]\n",
"10305.jpg, [1.195418874999632e-09, 5.7533174668833453e-08, 2.2191171566987578e-08, 4.799069941441303e-08, 1.739228849828578e-08, 1.766412260906236e-08, 4.594457436724042e-08, 4.451433888164047e-09, 3.570169064914808e-05, 1.1591841264646519e-08, 1.126623283198569e-05, 1.1900501704076305e-05, 3.2372434361604974e-05, 0.9999085664749146, 5.055137819454103e-08, 1.8672223767168816e-09]\n",
"10277.jpg, [8.566875476390123e-05, 0.003030042164027691, 0.006140801124274731, 0.0028874026611447334, 0.009456130675971508, 0.011643719859421253, 0.0072844927199184895, 0.006395626813173294, 0.09536251425743103, 0.004692770540714264, 0.22324995696544647, 0.08216381818056107, 0.2221924066543579, 0.30766141414642334, 0.01592942699790001, 0.0018237741896882653]\n",
"10318.jpg, [3.075363565585576e-05, 0.0007959450013004243, 0.0008911059121601284, 0.00044816677109338343, 0.0002143415476894006, 0.0566374808549881, 0.00494889123365283, 0.0009665301186032593, 0.0006996531737968326, 0.000491741462610662, 0.5615449547767639, 0.12538672983646393, 9.001621947390959e-06, 0.0006585841183550656, 0.23922011256217957, 0.007055996917188168]\n",
"10080.jpg, [0.0001093325627152808, 0.0011564693413674831, 0.00048719902406446636, 0.00010238502727588639, 0.0001187266971101053, 0.004604402929544449, 0.0006107544177211821, 0.003312992863357067, 0.0001262418954866007, 0.0008456819923594594, 0.9799331426620483, 0.004761998075991869, 0.0004001095949206501, 0.0002469921892043203, 0.003031848929822445, 0.0001517399650765583]\n",
"10148.jpg, [0.0021100114099681377, 0.019694793969392776, 0.0005462245317175984, 0.0004830900579690933, 0.0015471380902454257, 0.0001295117981499061, 0.00026122923009097576, 0.17955230176448822, 0.0002522243303246796, 0.7059442400932312, 0.08425071835517883, 0.00251186010427773, 4.488038939598482e-06, 5.489787508849986e-05, 0.0018040398135781288, 0.0008533476502634585]\n",
"10156.jpg, [0.04540877044200897, 0.0001229752815561369, 1.1622359124885406e-05, 1.3800023225485347e-05, 0.06997022032737732, 0.000140715143061243, 0.00010594412015052512, 0.004022679291665554, 0.7726542949676514, 0.07907912880182266, 0.02758282981812954, 0.00016902702918741852, 0.0004264961462467909, 0.00019399718439672142, 2.2554630049853586e-05, 7.499327330151573e-05]\n",
"10383.jpg, [0.0001705976901575923, 0.0007248555775731802, 0.0018342466792091727, 0.004213429056107998, 0.00137982121668756, 0.0014535568188875914, 0.0026452867314219475, 0.0005807472043670714, 0.21963462233543396, 0.000741242547519505, 0.18102899193763733, 0.00428790133446455, 0.4707147181034088, 0.1041199117898941, 0.0033946866169571877, 0.0030753272585570812]\n",
"10083.jpg, [1.8183895957989832e-13, 8.59173843181793e-11, 2.2346863248756677e-10, 4.2183693604175287e-13, 1.4401872923397452e-11, 1.1231159752211894e-10, 3.964213401985717e-09, 1.2018719530715316e-07, 8.046393412675101e-12, 2.000634900800602e-11, 3.753850563725791e-09, 3.153369926289784e-12, 1.7262034087296013e-13, 3.300942418710262e-12, 0.0005316542810760438, 0.9994682669639587]\n",
"10063.jpg, [9.86210071940441e-11, 3.419716065877765e-08, 1.1391761489676355e-07, 1.362756307798918e-07, 3.681808191657865e-09, 1.2716466812889848e-07, 1.6282628223507345e-07, 3.826985395249949e-09, 0.00010558970097918063, 7.020105385890929e-09, 3.787561581702903e-05, 1.1224159607081674e-05, 0.04749355837702751, 0.9523510336875916, 1.3715963120830565e-07, 1.0734047428684335e-08]\n"
]
}
],
"source": [
"import ntpath\n",
"from torch.utils.data.sampler import WeightedRandomSampler\n",
"\n",
"model_ft.eval()\n",
"\n",
"nb_classes = 16\n",
"\n",
"import torch.nn.functional as F\n",
"\n",
"confusion_matrix = torch.zeros(nb_classes, nb_classes)\n",
"\n",
"_classes = []\n",
"_preds = []\n",
"predicted_labels = []\n",
"\n",
"class_probs = torch.Tensor()\n",
"\n",
"\n",
"\n",
"\n",
"im_paths = []\n",
"with torch.no_grad():\n",
" for i, (inputs, classes, im_path) in enumerate(dataloaders['test']):\n",
" \n",
"\n",
" im_paths.append(im_path)\n",
" inputs = inputs.to(device)\n",
" \n",
" classes = classes.to(device)\n",
" classes_list = classes.cpu().detach().numpy().tolist()\n",
" _classes[:]=[i+1 for i in classes_list]\n",
" outputs = model_ft(inputs)\n",
" \n",
" \n",
"\n",
" class_probs = class_probs.cuda()\n",
" \n",
" class_probs = torch.cat((class_probs, F.softmax(outputs, 1)))\n",
" \n",
" _, preds = torch.max(outputs, 1)\n",
" preds_list = preds.cpu().detach().numpy().tolist()\n",
" _preds[:]=[i+1 for i in preds_list]\n",
" \n",
" predicted_labels.append(preds.cpu().detach().numpy().tolist())\n",
" for t, p in zip(classes.view(-1), preds.view(-1)):\n",
" confusion_matrix[t.long(), p.long()] += 1\n",
" \n",
"print(confusion_matrix)\n",
"per_class_accuracies = (confusion_matrix.diag()/confusion_matrix.sum(1)).cpu().detach().numpy().tolist()\n",
"\n",
"print(','.join(\"{:2.04f}\".format(x) for x in per_class_accuracies))\n",
"total_correct = 0\n",
"total = 0\n",
"for i in range(nb_classes):\n",
" total_correct += int(confusion_matrix[i][i].numpy())\n",
" total += int(confusion_matrix.sum(dim=1)[i].numpy())\n",
" print(\"class {:d} --> accuracy: {:.2f}, correct predictions: {:d}, all: {:d}\".format(i+1, (confusion_matrix.diag()/confusion_matrix.sum(1))[i]*100, int(confusion_matrix[i][i].numpy()), int(confusion_matrix.sum(dim=1)[i].numpy())))\n",
" \n",
"\n",
"print(\"total correct: {}, total samples: {}\".format(total_correct, total))\n",
"\n",
"flattened_im_paths = flattened = [item for sublist in im_paths for item in sublist]\n",
"\n",
"print(\"length is: \", len(flattened_im_paths))\n",
"for i in range(len(flattened_im_paths)):\n",
" class_p = class_probs[i].cpu().detach().numpy().tolist()\n",
"\n",
" print('{}, {}'.format(ntpath.basename(flattened_im_paths[i]), class_p))\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 134,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/scratch/sjn-p3/anaconda/anaconda3/lib/python3.6/site-packages/torchvision-0.2.1-py3.6.egg/torchvision/transforms/transforms.py:188: UserWarning: The use of the transforms.Scale transform is deprecated, please use transforms.Resize instead.\n"
]
}
],
"source": [
"import pandas as pd\n",
"from PIL import Image\n",
"input_path = \"/scratch2/NAACL2018/fine_tuning/official_tut/10folds/9_test_all/\"\n",
"vector_fh = open('correct_fold9_test_resnet_9class.txt', 'a+')\n",
"df = pd.read_csv('../../10fold_mona/test-9.csv')\n",
"\n",
"\n",
"scaler = transforms.Scale((224, 224))\n",
"normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n",
"to_tensor = transforms.ToTensor()\n",
"\n",
"\n",
"for i in range(df.shape[0]):\n",
" theme = df.iloc[i]['Q3 Theme']\n",
" filename = input_path + str(df.iloc[i]['ID']) + '.jpg'\n",
" img = Image.open(filename)\n",
" image = normalize(to_tensor(scaler(img))).unsqueeze(0).to(device)\n",
" model_ft = model_ft.cuda()\n",
" tf_last_layer_chopped = nn.Sequential(*list(model_ft.children())[:-1])\n",
" output = tf_last_layer_chopped(image)\n",
" nd_arr = output.cpu().detach().numpy().reshape(1, 2048)\n",
" np.savetxt(vector_fh, nd_arr)"
]
},
{
"cell_type": "code",
"execution_count": 135,
"metadata": {},
"outputs": [],
"source": [
"input_path =\"/scratch2/NAACL2018/fine_tuning/official_tut/10folds/9_train_all/\"\n",
"vector_fh = open('correct_fold9_train_resnet_9class.txt', 'a+')\n",
"df = pd.read_csv('../../10fold_mona/train-9.csv')\n",
"\n",
"for i in range(df.shape[0]):\n",
" theme = df.iloc[i]['Q3 Theme']\n",
" filename = input_path + str(df.iloc[i]['ID']) + '.jpg'\n",
" img = Image.open(filename)\n",
" image = normalize(to_tensor(scaler(img))).unsqueeze(0).to(device)\n",
" model_ft = model_ft.cuda()\n",
" tf_last_layer_chopped = nn.Sequential(*list(model_ft.children())[:-1])\n",
" output = tf_last_layer_chopped(image)\n",
" nd_arr = output.cpu().detach().numpy().reshape(1, 2048)\n",
" np.savetxt(vector_fh, nd_arr)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fold 0 \n",
"0.0000,0.2857,0.6923,0.0000,0.0909,0.5000,0.3125,0.0000,0.1429\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 2 --> accuracy: 28.57, correct predictions: 4, all: 14\n",
"class 3 --> accuracy: 69.23, correct predictions: 18, all: 26\n",
"class 4 --> accuracy: 0.00, correct predictions: 0, all: 5\n",
"class 5 --> accuracy: 9.09, correct predictions: 1, all: 11\n",
"class 6 --> accuracy: 50.00, correct predictions: 3, all: 6\n",
"class 7 --> accuracy: 31.25, correct predictions: 5, all: 16\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 14.29, correct predictions: 1, all: 7\n",
"total correct: 32, total samples: 91\n",
" \n",
"-----------------------\n",
"\n",
"fold 1\n",
"\n",
"0.0000,0.3571,0.8846,0.2000,0.1818,0.5000,0.2500,0.0000,0.7143\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 35.71, correct predictions: 5, all: 14\n",
"class 3 --> accuracy: 88.46, correct predictions: 23, all: 26\n",
"class 4 --> accuracy: 20.00, correct predictions: 1, all: 5\n",
"class 5 --> accuracy: 18.18, correct predictions: 2, all: 11\n",
"class 6 --> accuracy: 50.00, correct predictions: 3, all: 6\n",
"class 7 --> accuracy: 25.00, correct predictions: 4, all: 16\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 71.43, correct predictions: 5, all: 7\n",
"total correct: 43, total samples: 90\n",
" \n",
"-------------------------------------------------\n",
"\n",
"fold 2\n",
"\n",
"0.0000,0.2857,0.7308,0.2000,0.2727,0.5000,0.4000,0.0000,0.0000\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 28.57, correct predictions: 4, all: 14\n",
"class 3 --> accuracy: 73.08, correct predictions: 19, all: 26\n",
"class 4 --> accuracy: 20.00, correct predictions: 1, all: 5\n",
"class 5 --> accuracy: 27.27, correct predictions: 3, all: 11\n",
"class 6 --> accuracy: 50.00, correct predictions: 3, all: 6\n",
"class 7 --> accuracy: 40.00, correct predictions: 6, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 0.00, correct predictions: 0, all: 7\n",
"total correct: 36, total samples: 89\n",
" \n",
"------------------------------------------------------------------------------------------\n",
"\n",
"fold 3\n",
"\n",
"0.0000,0.2143,0.8077,0.8000,0.3000,0.5000,0.6667,0.0000,0.3333\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 21.43, correct predictions: 3, all: 14\n",
"class 3 --> accuracy: 80.77, correct predictions: 21, all: 26\n",
"class 4 --> accuracy: 80.00, correct predictions: 4, all: 5\n",
"class 5 --> accuracy: 30.00, correct predictions: 3, all: 10\n",
"class 6 --> accuracy: 50.00, correct predictions: 3, all: 6\n",
"class 7 --> accuracy: 66.67, correct predictions: 10, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 33.33, correct predictions: 2, all: 6\n",
"total correct: 46, total samples: 87\n",
" \n",
"--------------------------------------------------------------------------------------------------------------------------------\n",
"\n",
"fold 4\n",
"\n",
"0.0000,0.2857,0.7692,0.2500,0.4000,0.1667,0.2000,0.0000,0.5000\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 28.57, correct predictions: 4, all: 14\n",
"class 3 --> accuracy: 76.92, correct predictions: 20, all: 26\n",
"class 4 --> accuracy: 25.00, correct predictions: 1, all: 4\n",
"class 5 --> accuracy: 40.00, correct predictions: 4, all: 10\n",
"class 6 --> accuracy: 16.67, correct predictions: 1, all: 6\n",
"class 7 --> accuracy: 20.00, correct predictions: 3, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 50.00, correct predictions: 3, all: 6\n",
"total correct: 36, total samples: 86\n",
" \n",
"-----------------------------------------------------------------------------------------------------------------------------------------------------------\n",
"fold 5\n",
"\n",
"0.0000,0.5000,0.7692,0.5000,0.1000,0.5000,0.6667,0.0000,0.1667\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 50.00, correct predictions: 7, all: 14\n",
"class 3 --> accuracy: 76.92, correct predictions: 20, all: 26\n",
"class 4 --> accuracy: 50.00, correct predictions: 2, all: 4\n",
"class 5 --> accuracy: 10.00, correct predictions: 1, all: 10\n",
"class 6 --> accuracy: 50.00, correct predictions: 3, all: 6\n",
"class 7 --> accuracy: 66.67, correct predictions: 10, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 16.67, correct predictions: 1, all: 6\n",
"total correct: 44, total samples: 86\n",
" \n",
"---------------------------------------------------------------------------------------------------------------\n",
"\n",
"fold 6\n",
"\n",
"0.0000,0.6154,0.8077,0.2500,0.2000,0.3333,0.8000,0.0000,0.1667\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 61.54, correct predictions: 8, all: 13\n",
"class 3 --> accuracy: 80.77, correct predictions: 21, all: 26\n",
"class 4 --> accuracy: 25.00, correct predictions: 1, all: 4\n",
"class 5 --> accuracy: 20.00, correct predictions: 2, all: 10\n",
"class 6 --> accuracy: 33.33, correct predictions: 2, all: 6\n",
"class 7 --> accuracy: 80.00, correct predictions: 12, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 16.67, correct predictions: 1, all: 6\n",
"total correct: 47, total samples: 85\n",
" \n",
"--------------------------------------------------------------------------------------------------------------------------------------------------------------\n",
"\n",
"fold 7\n",
"\n",
"0.0000,0.3077,0.8846,0.2500,0.1000,0.2000,0.5333,0.0000,0.0000\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 30.77, correct predictions: 4, all: 13\n",
"class 3 --> accuracy: 88.46, correct predictions: 23, all: 26\n",
"class 4 --> accuracy: 25.00, correct predictions: 1, all: 4\n",
"class 5 --> accuracy: 10.00, correct predictions: 1, all: 10\n",
"class 6 --> accuracy: 20.00, correct predictions: 1, all: 5\n",
"class 7 --> accuracy: 53.33, correct predictions: 8, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 3\n",
"class 9 --> accuracy: 0.00, correct predictions: 0, all: 6\n",
"total correct: 38, total samples: 84\n",
" \n",
"--------------------------------------------------------------------------------\n",
"\n",
"fold 8\n",
"\n",
"0.0000,0.3846,0.7692,0.0000,0.1000,0.4000,0.6667,0.0000,0.3333\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 38.46, correct predictions: 5, all: 13\n",
"class 3 --> accuracy: 76.92, correct predictions: 20, all: 26\n",
"class 4 --> accuracy: 0.00, correct predictions: 0, all: 4\n",
"class 5 --> accuracy: 10.00, correct predictions: 1, all: 10\n",
"class 6 --> accuracy: 40.00, correct predictions: 2, all: 5\n",
"class 7 --> accuracy: 66.67, correct predictions: 10, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 9 --> accuracy: 33.33, correct predictions: 2, all: 6\n",
"total correct: 40, total samples: 83\n",
" \n",
"----------------------------------------------------------------------------------------------------------------------\n",
"\n",
"fold 9\n",
"\n",
"0.0000,0.0000,0.7692,0.0000,0.0000,0.2000,0.6667,0.0000,0.3333\n",
"class 1 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 2 --> accuracy: 0.00, correct predictions: 0, all: 13\n",
"class 3 --> accuracy: 76.92, correct predictions: 20, all: 26\n",
"class 4 --> accuracy: 0.00, correct predictions: 0, all: 4\n",
"class 5 --> accuracy: 0.00, correct predictions: 0, all: 10\n",
"class 6 --> accuracy: 20.00, correct predictions: 1, all: 5\n",
"class 7 --> accuracy: 66.67, correct predictions: 10, all: 15\n",
"class 8 --> accuracy: 0.00, correct predictions: 0, all: 2\n",
"class 9 --> accuracy: 33.33, correct predictions: 2, all: 6\n",
"total correct: 33, total samples: 83\n",
" \n",
"-----------------------------------------------------------------------------------------------------------------------------------------------------------------\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 126,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"macro2 37.500\n",
"macro3 82.308\n",
"macro5 28.155\n",
"macro7 51.974\n",
"total acc normal 57.296\n",
"micro2 37.637\n",
"micro3 82.309\n",
"micro5 28.363\n",
"micro7 52.042\n"
]
}
],
"source": [
"total_corrects_normal_loss = 36 + 36 + 35 + 39 + 37 + 39 + 44 + 36+ 38 + 33\n",
"totals = 67 + 67 + 66 + 65 + 65 + 65 + 64 + 64 + 64 + 64\n",
"\n",
"\n",
"\n",
"c2_corr_normal_loss = 5 + 6 + 4 + 3 + 5 + 6 + 8 + 4 + 6 + 4\n",
"c2_tot = 14 + 14 + 14 + 14 + 14 + 14 + 13 + 13 +13 + 13\n",
"\n",
"c3_corr_normal_loss = 19 + 22 + 21 + 23 + 22 + 21 + 23 + 21 + 21 + 21\n",
"c3_tot = 26 + 26 + 26 + 26 + 26 + 26 + 26 + 26 + 26 + 26\n",
"\n",
"\n",
"c5_corr_normal_loss = 2 +3 + 2 + 4 + 6 + 3 + 3 +2 +2 +2\n",
"c5_tot = 11 + 11 + 11 + 10 + 10 + 10 + 10 + 10 + 10 + 10\n",
"\n",
"\n",
"c7_corr_normal_loss = 10 + 5 + 8 + 9 + 4 + 9 + 10 + 9 + 9 + 6\n",
"c7_tot = 16 + 16 + 15 +15 + 15 + 15 + 15 + 15 + 15 + 15\n",
"\n",
"\n",
"acc2_normal = c2_corr_normal_loss / c2_tot\n",
"acc3_normal = c3_corr_normal_loss / c3_tot\n",
"acc5_normal = c5_corr_normal_loss / c5_tot\n",
"acc7_normal = c7_corr_normal_loss / c7_tot\n",
"\n",
"\n",
"\n",
"acc_normal = total_corrects_normal_loss / totals\n",
"\n",
"\n",
"print('macro2 {:.3f}'.format(acc2_normal*100))\n",
"print('macro3 {:.3f}'.format(acc3_normal*100))\n",
"print('macro5 {:.3f}'.format(acc5_normal*100))\n",
"print('macro7 {:.3f}'.format(acc7_normal*100))\n",
"\n",
"\n",
"\n",
"print('total acc normal {:.3f}'.format(acc_normal*100))\n",
"\n",
"#micro acc\n",
"micro2 = (35.71 + 42.86 + 28.57 + 21.43 + 35.71 + 42.86 + 61.54 + 30.77 + 46.15 + 30.77)/10\n",
"micro3 = (73.08 + 84.62 + 80.77 + 88.46 + 84.62 + 80.77 + 88.46 + 80.77 + 80.77 + 80.77)/10\n",
"micro5 = (18.18 + 27.27 + 18.18 + 40.00 + 60.00 + 30.00 + 30.00 + 20.00 + 20.00 + 20.00)/10 \n",
"micro7 = (62.50 + 31.25 + 53.33 + 60.00 + 26.67 + 60.00 + 66.67 + 60.00 + 60.00 + 40.00)/10\n",
"\n",
"print('micro2 {:.3f}'.format(micro2))\n",
"print('micro3 {:.3f}'.format(micro3))\n",
"print('micro5 {:.3f}'.format(micro5))\n",
"print('micro7 {:.3f}'.format(micro7))\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 147,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"acc class 1 normal 0.000 and weighted 14.286\n",
"acc class 2 normal 33.088 and weighted 27.941\n",
"acc class 3 normal 76.923 and weighted 65.385\n",
"acc class 4 normal 25.000 and weighted 31.818\n",
"acc class 5 normal 18.447 and weighted 7.767\n",
"acc class 6 normal 42.105 and weighted 47.368\n",
"acc class 7 normal 50.000 and weighted 42.763\n",
"acc class 8 normal 0.000 and weighted 0.000\n",
"acc class 9 normal 30.159 and weighted 30.159\n",
"total acc normal 45.602, and weighted 39.468\n"
]
}
],
"source": [
"total_corrects_normal_loss = 33 + 43 + 36 + 46 + 36 + 44 + 45 + 38 + 40 + 33\n",
"total_corects_weighted_loss = 36 + 33 + 28 + 39 + 30 + 36 + 42 + 35 + 34 + 28\n",
"totals = 91 + 90 + 89 + 87 + 86 + 86 + 85 + 84 + 83 + 83\n",
"\n",
"c1_corr_normal_loss = 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0\n",
"c1_corr_weighted_loss = 3 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0\n",
"c1_tot = 3 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2\n",
"\n",
"c2_corr_normal_loss = 5 + 5 + 4 + 3 + 4 + 7 + 8 + 4 + 5 + 0\n",
"c2_corr_weighted_loss = 3 + 8 + 3 + 1 + 3 + 4 + 7 + 4 + 5 + 0\n",
"c2_tot = 14 + 14 + 14 + 14 + 14 + 14 + 13 + 13 + 13 + 13\n",
"\n",
"c3_corr_normal_loss = 17 + 23 + 19 + 21 + 20 + 20 + 17 + 23 + 20 + 20\n",
"c3_corr_weighted_loss = 15 + 16 + 13 + 20 + 20 + 15 + 19 + 19 + 16 + 17\n",
"c3_tot = 26 + 26 + 26 + 26 + 26 + 26 + 26 + 26 + 26 + 26\n",
"\n",
"c4_corr_normal_loss = 0 + 1 + 1 + 4 + 1 + 2 + 1 + 1 + 0 + 0 \n",
"c4_corr_weighted_loss = 0 + 2 + 1 + 4 + 1 + 2 + 1 + 1 + 2 + 0\n",
"c4_tot = 5 + 5 + 5 + 5 + 4 + 4 + 4 + 4 + 4 + 4\n",
"\n",
"c5_corr_normal_loss = 1 + 2 + 3 + 3 + 4 + 1 + 3 + 1 + 1 + 0 \n",
"c5_corr_weighted_loss = 1 + 0 + 2 + 2 + 0 + 1 + 1 + 1 + 0 + 0\n",
"c5_tot = 11 + 11 + 11 + 10 + 10 + 10 + 10 + 10 + 10 + 10\n",
"\n",
"c6_corr_normal_loss = 4 + 3 + 3 + 3 + 1 + 3 + 3 + 1 + 2 + 1\n",
"c6_corr_weighted_loss = 4 + 4 + 3 + 2 + 1 + 5 + 4 + 1 + 2 + 1\n",
"c6_tot = 6 + 6 + 6 + 6 + 6 + 6 + 6 + 5 + 5 + 5\n",
"\n",
"c7_corr_normal_loss = 5 + 4 + 6 + 10 + 3 + 10 + 10 + 8 + 10 + 10\n",
"c7_corr_weighted_loss = 9 + 1 + 5 + 7 + 2 + 9 + 8 + 8 + 8 + 8\n",
"c7_tot = 16 + 16 + 15 + 15 + 15 + 15 + 15 + 15 + 15 + 15\n",
"\n",
"c8_corr_normal_loss = 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0\n",
"c8_corr_weighted_loss = 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0\n",
"c8_tot = 3 + 3 + 3 + 3 + 3 + 3 + 3 + 3 + 2 + 2\n",
"\n",
"c9_corr_normal_loss = 1 + 5 + 0 + 2 + 3 + 1 + 3 + 0 + 2 + 2\n",
"c9_corr_weighted_loss = 4 + 2 + 1 + 3 + 3 + 0 + 2 + 1 + 1 + 2\n",
"c9_tot = 7 + 7 + 7 + 6 + 6 + 6 + 6 + 6 + 6 + 6\n",
"\n",
"acc1_normal = c1_corr_normal_loss / c1_tot\n",
"acc2_normal = c2_corr_normal_loss / c2_tot\n",
"acc3_normal = c3_corr_normal_loss / c3_tot\n",
"acc4_normal = c4_corr_normal_loss / c4_tot\n",
"acc5_normal = c5_corr_normal_loss / c5_tot\n",
"acc6_normal = c6_corr_normal_loss / c6_tot\n",
"acc7_normal = c7_corr_normal_loss / c7_tot\n",
"acc8_normal = c8_corr_normal_loss / c8_tot\n",
"acc9_normal = c9_corr_normal_loss / c9_tot\n",
"\n",
"\n",
"acc1_weighted = c1_corr_weighted_loss / c1_tot\n",
"acc2_weighted = c2_corr_weighted_loss / c2_tot\n",
"acc3_weighted = c3_corr_weighted_loss / c3_tot\n",
"acc4_weighted = c4_corr_weighted_loss / c4_tot\n",
"acc5_weighted = c5_corr_weighted_loss / c5_tot\n",
"acc6_weighted = c6_corr_weighted_loss / c6_tot\n",
"acc7_weighted = c7_corr_weighted_loss / c7_tot\n",
"acc8_weighted = c8_corr_weighted_loss / c8_tot\n",
"acc9_weighted = c9_corr_weighted_loss / c9_tot\n",
"\n",
"\n",
"acc_normal = total_corrects_normal_loss / totals\n",
"acc_weighted = total_corects_weighted_loss / totals\n",
"\n",
"print('acc class 1 normal {:.3f} and weighted {:.3f}'.format(acc1_normal*100, acc1_weighted*100))\n",
"print('acc class 2 normal {:.3f} and weighted {:.3f}'.format(acc2_normal*100, acc2_weighted*100))\n",
"print('acc class 3 normal {:.3f} and weighted {:.3f}'.format(acc3_normal*100, acc3_weighted*100))\n",
"print('acc class 4 normal {:.3f} and weighted {:.3f}'.format(acc4_normal*100, acc4_weighted*100))\n",
"print('acc class 5 normal {:.3f} and weighted {:.3f}'.format(acc5_normal*100, acc5_weighted*100))\n",
"print('acc class 6 normal {:.3f} and weighted {:.3f}'.format(acc6_normal*100, acc6_weighted*100))\n",
"print('acc class 7 normal {:.3f} and weighted {:.3f}'.format(acc7_normal*100, acc7_weighted*100))\n",
"print('acc class 8 normal {:.3f} and weighted {:.3f}'.format(acc8_normal*100, acc8_weighted*100))\n",
"print('acc class 9 normal {:.3f} and weighted {:.3f}'.format(acc9_normal*100, acc9_weighted*100))\n",
"\n",
"\n",
"print('total acc normal {:.3f}, and weighted {:.3f}'.format(acc_normal*100, acc_weighted*100))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment