Skip to content

Instantly share code, notes, and snippets.

@mjamroz
Last active January 19, 2020 12:32
Show Gist options
  • Save mjamroz/d7bbca3bd28f4b5f36ee0cd031763616 to your computer and use it in GitHub Desktop.
Save mjamroz/d7bbca3bd28f4b5f36ee0cd031763616 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python3
from mxnet.gluon import data, SymbolBlock, utils
from gluoncv.model_zoo import get_model
from mxnet import nd, cpu, gpu, io, metric
from multiprocessing import cpu_count
import argparse
classes = ['Cl1', 'lass2', 'ass3', 'ss4']
parser = argparse.ArgumentParser(description='Batch prediction')
parser.add_argument('--num-gpus', type=int, default=0,
help='number of gpus to use.')
parser.add_argument('-j', '--num-data-workers', dest='num_workers', default=cpu_count(), type=int,
help='number of preprocessing workers')
parser.add_argument('--batch-size', type=int, default=32,
help='batch size per device (CPU/GPU).')
parser.add_argument('--model-name', type=str, default='resnet18_v1',
help='type of model to use.')
parser.add_argument('--model', type=str, default='model',
help='type of model to use. Expected params')
parser.add_argument('--one-class', default=True, action='store_true',
help='whether display probability for best class only')
opt = parser.parse_args()
num_gpus = opt.num_gpus
batch_size = opt.batch_size
batch_size *= max(1, num_gpus)
context = [gpu(i) for i in range(num_gpus)] if num_gpus > 0 else [cpu()]
input_size = 224
mean_rgb = [123.68, 116.779, 103.939]
std_rgb = [58.393, 57.12, 57.375]
tp, fn, fp, tn = 0,0,0,0
acc_top1 = metric.Accuracy()
def roc(suppose, predicted):
global tp, fn, fp, tn
if suppose == 'Cl1':
if suppose == predicted:
tp += 1
else:
fn += 1
else:
if predicted == 'Cl1':
fp += 1
else:
tn += 1
def batch_fn(batch, ctx):
data = utils.split_and_load(batch.data[0], ctx_list=ctx, batch_axis=0)
label = utils.split_and_load(batch.label[0], ctx_list=ctx, batch_axis=0)
return data, label
net = get_model(opt.model_name, ctx=context, classes=4, pretrained=False)
net.load_parameters(opt.model, ctx=context)
test_data = io.ImageRecordIter(
path_imgrec = "tests/test_crims.rec",
path_imgidx = "tests/test_crims.idx",
preprocess_threads = opt.num_workers,
shuffle = False,
batch_size = batch_size,
rand_crop = False,
resize = input_size,
data_shape = (3, input_size, input_size),
mean_r = mean_rgb[0],
mean_g = mean_rgb[1],
mean_b = mean_rgb[2],
std_r = std_rgb[0],
std_g = std_rgb[1],
std_b = std_rgb[2],
)
for i, batch in enumerate(test_data):
data, labels = batch_fn(batch, context)
preds = [net(X) for X in data]
acc_top1.update(labels, preds)
for pred,label in zip(preds, labels):
sm = nd.softmax(pred)
for s in range(pred.shape[0]):
ind = nd.topk(pred, k=len(classes))[s].astype('int')
class_true = label[s].asscalar().astype('int')
class_pred = ind[0].asscalar()
out = "%12s pred as " % (classes[class_true])
_classes = 1 if opt.one_class else len(classes)
for j in range(_classes):
out += '%12s %.3f' % (classes[ind[j].asscalar()], sm[s][ind[j]].asscalar())
roc(classes[class_true], classes[class_pred])
total = 24334.0
print(opt.model, "ACC", 1.0*(tp+tn)/total, acc_top1.get()[1])
@mjamroz
Copy link
Author

mjamroz commented Dec 27, 2019

example script for batch prediction on cpu or gpu using gluon mxnet

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment