Skip to content

Instantly share code, notes, and snippets.

@Melihemin
Created December 19, 2021 08:59
Show Gist options
  • Save Melihemin/f4830ae352292ae7b3a38642d22527fb to your computer and use it in GitHub Desktop.
Save Melihemin/f4830ae352292ae7b3a38642d22527fb to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.7.6","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"## In this tutorial, I will show how to code a license plate recognizer for Indian license plates using deep learning and some image processing.\n### Find the detailed explanation of the project in this blog: https://towardsdatascience.com/ai-based-indian-license-plate-detector-de9d48ca8951?source=friends_link&sk=a2cbd70e630f6dc3d030e3bae34d98ef","metadata":{}},{"cell_type":"code","source":"import matplotlib.pyplot as plt\nimport numpy as np\nimport cv2\nimport tensorflow as tf\nfrom sklearn.metrics import f1_score \nfrom tensorflow.keras import optimizers\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\nfrom tensorflow.keras.layers import Dense, Flatten, MaxPooling2D, Dropout, Conv2D","metadata":{"id":"ycRjhI25UC-P","execution":{"iopub.status.busy":"2021-12-19T08:23:46.853943Z","iopub.execute_input":"2021-12-19T08:23:46.854302Z","iopub.status.idle":"2021-12-19T08:23:46.860324Z","shell.execute_reply.started":"2021-12-19T08:23:46.854272Z","shell.execute_reply":"2021-12-19T08:23:46.859196Z"},"trusted":true},"execution_count":10,"outputs":[]},{"cell_type":"code","source":"# Loads the data required for detecting the license plates from cascade classifier.\nplate_cascade = cv2.CascadeClassifier('../input/ai-indian-license-plate-recognition-data/indian_license_plate.xml')\n# add the path to 'india_license_plate.xml' file.","metadata":{"id":"fMDZHcIuGJLe","execution":{"iopub.status.busy":"2021-12-19T08:23:47.028729Z","iopub.execute_input":"2021-12-19T08:23:47.029056Z","iopub.status.idle":"2021-12-19T08:23:47.039103Z","shell.execute_reply.started":"2021-12-19T08:23:47.029026Z","shell.execute_reply":"2021-12-19T08:23:47.038324Z"},"trusted":true},"execution_count":11,"outputs":[]},{"cell_type":"code","source":"def detect_plate(img, text=''): # the function detects and perfors blurring on the number plate.\n plate_img = img.copy()\n roi = img.copy()\n plate_rect = plate_cascade.detectMultiScale(plate_img, scaleFactor = 1.2, minNeighbors = 7) # detects numberplates and returns the coordinates and dimensions of detected license plate's contours.\n for (x,y,w,h) in plate_rect:\n roi_ = roi[y:y+h, x:x+w, :] # extracting the Region of Interest of license plate for blurring.\n plate = roi[y:y+h, x:x+w, :]\n cv2.rectangle(plate_img, (x+2,y), (x+w-3, y+h-5), (51,181,155), 3) # finally representing the detected contours by drawing rectangles around the edges.\n if text!='':\n plate_img = cv2.putText(plate_img, text, (x-w//2,y-h//2), \n cv2.FONT_HERSHEY_COMPLEX_SMALL , 0.5, (51,181,155), 1, cv2.LINE_AA)\n \n return plate_img, plate # returning the processed image.","metadata":{"id":"r6BZ2WY8GJHM","execution":{"iopub.status.busy":"2021-12-19T08:23:47.207448Z","iopub.execute_input":"2021-12-19T08:23:47.207749Z","iopub.status.idle":"2021-12-19T08:23:47.217998Z","shell.execute_reply.started":"2021-12-19T08:23:47.207721Z","shell.execute_reply":"2021-12-19T08:23:47.216029Z"},"trusted":true},"execution_count":12,"outputs":[]},{"cell_type":"code","source":"# Testing the above function\ndef display(img_, title=''):\n img = cv2.cvtColor(img_, cv2.COLOR_BGR2RGB)\n fig = plt.figure(figsize=(10,6))\n ax = plt.subplot(111)\n ax.imshow(img)\n plt.axis('off')\n plt.title(title)\n plt.show()\n\n#img = cv2.imread('../input/ai-indian-license-plate-recognition-data/car.jpg')\nimg = cv2.imread('../input/carslicance/x5_896093345gw8.jpg')\ndisplay(img, 'input image')","metadata":{"id":"RJ8ScvVJGgH_","outputId":"a9bc10bf-339e-422d-980f-d97120e6ac64","execution":{"iopub.status.busy":"2021-12-19T08:55:29.259161Z","iopub.execute_input":"2021-12-19T08:55:29.259488Z","iopub.status.idle":"2021-12-19T08:55:29.432087Z","shell.execute_reply.started":"2021-12-19T08:55:29.259458Z","shell.execute_reply":"2021-12-19T08:55:29.431344Z"},"trusted":true},"execution_count":73,"outputs":[]},{"cell_type":"code","source":"# Getting plate prom the processed image\noutput_img, plate = detect_plate(img)","metadata":{"id":"TIMcAMmUGgFB","execution":{"iopub.status.busy":"2021-12-19T08:55:30.019960Z","iopub.execute_input":"2021-12-19T08:55:30.020310Z","iopub.status.idle":"2021-12-19T08:55:30.054487Z","shell.execute_reply.started":"2021-12-19T08:55:30.020273Z","shell.execute_reply":"2021-12-19T08:55:30.053454Z"},"trusted":true},"execution_count":74,"outputs":[]},{"cell_type":"code","source":"display(output_img, 'detected license plate in the input image')","metadata":{"id":"zCfOMO__HEUf","outputId":"10a225cc-e32b-45ca-dc61-ddfea614b562","execution":{"iopub.status.busy":"2021-12-19T08:55:31.683321Z","iopub.execute_input":"2021-12-19T08:55:31.683665Z","iopub.status.idle":"2021-12-19T08:55:31.840025Z","shell.execute_reply.started":"2021-12-19T08:55:31.683631Z","shell.execute_reply":"2021-12-19T08:55:31.839232Z"},"trusted":true},"execution_count":75,"outputs":[]},{"cell_type":"code","source":"display(plate, 'extracted license plate from the image')","metadata":{"id":"kGk622P-HERv","outputId":"e5fac915-27e2-45da-8a1a-a2a5226049a4","execution":{"iopub.status.busy":"2021-12-19T08:55:33.424947Z","iopub.execute_input":"2021-12-19T08:55:33.425314Z","iopub.status.idle":"2021-12-19T08:55:33.489497Z","shell.execute_reply.started":"2021-12-19T08:55:33.425282Z","shell.execute_reply":"2021-12-19T08:55:33.488424Z"},"trusted":true},"execution_count":76,"outputs":[]},{"cell_type":"code","source":"# Match contours to license plate or character template\ndef find_contours(dimensions, img) :\n\n # Find all contours in the image\n cntrs, _ = cv2.findContours(img.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n\n # Retrieve potential dimensions\n lower_width = dimensions[0]\n upper_width = dimensions[1]\n lower_height = dimensions[2]\n upper_height = dimensions[3]\n \n # Check largest 5 or 15 contours for license plate or character respectively\n cntrs = sorted(cntrs, key=cv2.contourArea, reverse=True)[:15]\n \n ii = cv2.imread('contour.jpg')\n \n x_cntr_list = []\n target_contours = []\n img_res = []\n for cntr in cntrs :\n # detects contour in binary image and returns the coordinates of rectangle enclosing it\n intX, intY, intWidth, intHeight = cv2.boundingRect(cntr)\n \n # checking the dimensions of the contour to filter out the characters by contour's size\n if intWidth > lower_width and intWidth < upper_width and intHeight > lower_height and intHeight < upper_height :\n x_cntr_list.append(intX) #stores the x coordinate of the character's contour, to used later for indexing the contours\n\n char_copy = np.zeros((44,24))\n # extracting each character using the enclosing rectangle's coordinates.\n char = img[intY:intY+intHeight, intX:intX+intWidth]\n char = cv2.resize(char, (20, 40))\n \n cv2.rectangle(ii, (intX,intY), (intWidth+intX, intY+intHeight), (50,21,200), 2)\n plt.imshow(ii, cmap='gray')\n\n # Make result formatted for classification: invert colors\n char = cv2.subtract(255, char)\n\n # Resize the image to 24x44 with black border\n char_copy[2:42, 2:22] = char\n char_copy[0:2, :] = 0\n char_copy[:, 0:2] = 0\n char_copy[42:44, :] = 0\n char_copy[:, 22:24] = 0\n\n img_res.append(char_copy) # List that stores the character's binary image (unsorted)\n \n # Return characters on ascending order with respect to the x-coordinate (most-left character first)\n \n plt.show()\n # arbitrary function that stores sorted list of character indeces\n indices = sorted(range(len(x_cntr_list)), key=lambda k: x_cntr_list[k])\n img_res_copy = []\n for idx in indices:\n img_res_copy.append(img_res[idx])# stores character images according to their index\n img_res = np.array(img_res_copy)\n\n return img_res","metadata":{"id":"MzopHrMvUC-Z","execution":{"iopub.status.busy":"2021-12-19T08:55:35.677493Z","iopub.execute_input":"2021-12-19T08:55:35.677814Z","iopub.status.idle":"2021-12-19T08:55:35.693537Z","shell.execute_reply.started":"2021-12-19T08:55:35.677784Z","shell.execute_reply":"2021-12-19T08:55:35.692594Z"},"trusted":true},"execution_count":77,"outputs":[]},{"cell_type":"code","source":"# Find characters in the resulting images\ndef segment_characters(image) :\n\n # Preprocess cropped license plate image\n img_lp = cv2.resize(image, (333, 75))\n img_gray_lp = cv2.cvtColor(img_lp, cv2.COLOR_BGR2GRAY)\n _, img_binary_lp = cv2.threshold(img_gray_lp, 200, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU)\n img_binary_lp = cv2.erode(img_binary_lp, (3,3))\n img_binary_lp = cv2.dilate(img_binary_lp, (3,3))\n\n LP_WIDTH = img_binary_lp.shape[0]\n LP_HEIGHT = img_binary_lp.shape[1]\n\n # Make borders white\n img_binary_lp[0:3,:] = 255\n img_binary_lp[:,0:3] = 255\n img_binary_lp[72:75,:] = 255\n img_binary_lp[:,330:333] = 255\n\n # Estimations of character contours sizes of cropped license plates\n dimensions = [LP_WIDTH/6,\n LP_WIDTH/2,\n LP_HEIGHT/10,\n 2*LP_HEIGHT/3]\n plt.imshow(img_binary_lp, cmap='gray')\n plt.show()\n cv2.imwrite('contour.jpg',img_binary_lp)\n\n # Get contours within cropped license plate\n char_list = find_contours(dimensions, img_binary_lp)\n\n return char_list","metadata":{"id":"h23diSmEUC-e","execution":{"iopub.status.busy":"2021-12-19T08:55:37.455558Z","iopub.execute_input":"2021-12-19T08:55:37.455875Z","iopub.status.idle":"2021-12-19T08:55:37.467509Z","shell.execute_reply.started":"2021-12-19T08:55:37.455845Z","shell.execute_reply":"2021-12-19T08:55:37.466408Z"},"trusted":true},"execution_count":78,"outputs":[]},{"cell_type":"code","source":"# Let's see the segmented characters\nchar = segment_characters(plate)","metadata":{"id":"OGhFmSnYUC-j","outputId":"e9aea986-3422-4173-b4d8-11582f80d2c2","execution":{"iopub.status.busy":"2021-12-19T08:55:38.870945Z","iopub.execute_input":"2021-12-19T08:55:38.871463Z","iopub.status.idle":"2021-12-19T08:55:39.159973Z","shell.execute_reply.started":"2021-12-19T08:55:38.871338Z","shell.execute_reply":"2021-12-19T08:55:39.159012Z"},"trusted":true},"execution_count":79,"outputs":[]},{"cell_type":"code","source":"for i in range(8):\n plt.subplot(1, 8, i+1)\n plt.imshow(char[i], cmap='gray')\n plt.axis('off')","metadata":{"id":"rZoiyrDaUC-p","outputId":"e9210536-ff7e-4214-ce8a-ff1274656036","execution":{"iopub.status.busy":"2021-12-19T08:55:44.108615Z","iopub.execute_input":"2021-12-19T08:55:44.108988Z","iopub.status.idle":"2021-12-19T08:55:44.356324Z","shell.execute_reply.started":"2021-12-19T08:55:44.108955Z","shell.execute_reply":"2021-12-19T08:55:44.355334Z"},"trusted":true},"execution_count":80,"outputs":[]},{"cell_type":"markdown","source":"### Model for characters","metadata":{"id":"QXhqHfXLUC-9"}},{"cell_type":"code","source":"import tensorflow.keras.backend as K\ntrain_datagen = ImageDataGenerator(rescale=1./255, width_shift_range=0.1, height_shift_range=0.1)\npath = '../input/ai-indian-license-plate-recognition-data/data/data'\ntrain_generator = train_datagen.flow_from_directory(\n path+'/train', # this is the target directory\n target_size=(28,28), # all images will be resized to 28x28\n batch_size=1,\n class_mode='sparse')\n\nvalidation_generator = train_datagen.flow_from_directory(\n path+'/val', # this is the target directory\n target_size=(28,28), # all images will be resized to 28x28 batch_size=1,\n class_mode='sparse')","metadata":{"id":"BhrsmfX9UC_p","outputId":"55577343-eb1d-4980-99c0-59ecf8eefd99","execution":{"iopub.status.busy":"2021-12-19T08:26:15.267489Z","iopub.execute_input":"2021-12-19T08:26:15.267886Z","iopub.status.idle":"2021-12-19T08:26:15.860173Z","shell.execute_reply.started":"2021-12-19T08:26:15.267855Z","shell.execute_reply":"2021-12-19T08:26:15.858677Z"},"trusted":true},"execution_count":26,"outputs":[]},{"cell_type":"code","source":"# Metrics for checking the model performance while training\ndef f1score(y, y_pred):\n return f1_score(y, tf.math.argmax(y_pred, axis=1), average='micro') \n\ndef custom_f1score(y, y_pred):\n return tf.py_function(f1score, (y, y_pred), tf.double)","metadata":{"id":"WXdiO1Kq9kPI","execution":{"iopub.status.busy":"2021-12-19T08:26:18.476103Z","iopub.execute_input":"2021-12-19T08:26:18.476478Z","iopub.status.idle":"2021-12-19T08:26:18.482407Z","shell.execute_reply.started":"2021-12-19T08:26:18.476448Z","shell.execute_reply":"2021-12-19T08:26:18.481323Z"},"trusted":true},"execution_count":27,"outputs":[]},{"cell_type":"code","source":"K.clear_session()\nmodel = Sequential()\nmodel.add(Conv2D(16, (22,22), input_shape=(28, 28, 3), activation='relu', padding='same'))\nmodel.add(Conv2D(32, (16,16), input_shape=(28, 28, 3), activation='relu', padding='same'))\nmodel.add(Conv2D(64, (8,8), input_shape=(28, 28, 3), activation='relu', padding='same'))\nmodel.add(Conv2D(64, (4,4), input_shape=(28, 28, 3), activation='relu', padding='same'))\nmodel.add(MaxPooling2D(pool_size=(4, 4)))\nmodel.add(Dropout(0.4))\nmodel.add(Flatten())\nmodel.add(Dense(128, activation='relu'))\nmodel.add(Dense(36, activation='softmax'))\n\nmodel.compile(loss='sparse_categorical_crossentropy', optimizer=optimizers.Adam(lr=0.0001), metrics=[custom_f1score])","metadata":{"id":"8IjCdBYrp4EK","execution":{"iopub.status.busy":"2021-12-19T08:26:19.497853Z","iopub.execute_input":"2021-12-19T08:26:19.498200Z","iopub.status.idle":"2021-12-19T08:26:21.734727Z","shell.execute_reply.started":"2021-12-19T08:26:19.498165Z","shell.execute_reply":"2021-12-19T08:26:21.733829Z"},"trusted":true},"execution_count":28,"outputs":[]},{"cell_type":"code","source":"model.summary()","metadata":{"execution":{"iopub.status.busy":"2021-12-19T08:26:22.630079Z","iopub.execute_input":"2021-12-19T08:26:22.630446Z","iopub.status.idle":"2021-12-19T08:26:22.638947Z","shell.execute_reply.started":"2021-12-19T08:26:22.630416Z","shell.execute_reply":"2021-12-19T08:26:22.637376Z"},"trusted":true},"execution_count":29,"outputs":[]},{"cell_type":"code","source":"class stop_training_callback(tf.keras.callbacks.Callback):\n def on_epoch_end(self, epoch, logs={}):\n if(logs.get('val_custom_f1score') > 0.99):\n self.model.stop_training = True","metadata":{"id":"w5aaqsHABUwx","execution":{"iopub.status.busy":"2021-12-19T08:26:23.717679Z","iopub.execute_input":"2021-12-19T08:26:23.718008Z","iopub.status.idle":"2021-12-19T08:26:23.724901Z","shell.execute_reply.started":"2021-12-19T08:26:23.717976Z","shell.execute_reply":"2021-12-19T08:26:23.722439Z"},"trusted":true},"execution_count":30,"outputs":[]},{"cell_type":"code","source":"batch_size = 1\ncallbacks = [stop_training_callback()]\nmodel.fit_generator(\n train_generator,\n steps_per_epoch = train_generator.samples // batch_size,\n validation_data = validation_generator, \n epochs = 80, verbose=1, callbacks=callbacks)","metadata":{"id":"KPAtDd_Jp4BP","outputId":"77203b1e-c29d-408a-8ae4-8707569449a8","execution":{"iopub.status.busy":"2021-12-19T08:26:24.657890Z","iopub.execute_input":"2021-12-19T08:26:24.658234Z","iopub.status.idle":"2021-12-19T08:28:30.202700Z","shell.execute_reply.started":"2021-12-19T08:26:24.658201Z","shell.execute_reply":"2021-12-19T08:28:30.201731Z"},"trusted":true},"execution_count":31,"outputs":[]},{"cell_type":"code","source":"model.save('license.h5')","metadata":{"execution":{"iopub.status.busy":"2021-12-19T08:28:30.204941Z","iopub.execute_input":"2021-12-19T08:28:30.205350Z","iopub.status.idle":"2021-12-19T08:28:30.263897Z","shell.execute_reply.started":"2021-12-19T08:28:30.205298Z","shell.execute_reply":"2021-12-19T08:28:30.263151Z"},"trusted":true},"execution_count":32,"outputs":[]},{"cell_type":"code","source":"# Predicting the output\ndef fix_dimension(img): \n new_img = np.zeros((28,28,3))\n for i in range(3):\n new_img[:,:,i] = img\n return new_img\n \ndef show_results():\n dic = {}\n characters = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n for i,c in enumerate(characters):\n dic[i] = c\n\n output = []\n for i,ch in enumerate(char): #iterating over the characters\n img_ = cv2.resize(ch, (28,28), interpolation=cv2.INTER_AREA)\n img = fix_dimension(img_)\n img = img.reshape(1,28,28,3) #preparing image for the model\n y_ = model.predict_classes(img)[0] #predicting the class\n character = dic[y_] #\n output.append(character) #storing the result in a list\n \n plate_number = ''.join(output)\n \n return plate_number\n","metadata":{"id":"3PICNwtZUDAD","outputId":"dadfd56e-ecd7-419e-938a-4275f18261d6","execution":{"iopub.status.busy":"2021-12-19T08:55:57.634985Z","iopub.execute_input":"2021-12-19T08:55:57.635346Z","iopub.status.idle":"2021-12-19T08:55:57.644471Z","shell.execute_reply.started":"2021-12-19T08:55:57.635313Z","shell.execute_reply":"2021-12-19T08:55:57.643512Z"},"trusted":true},"execution_count":81,"outputs":[]},{"cell_type":"code","source":"# Segmented characters and their predicted value.\nplt.figure(figsize=(10,6))\nfor i,ch in enumerate(char):\n img = cv2.resize(ch, (28,28), interpolation=cv2.INTER_AREA)\n plt.subplot(3,4,i+1)\n plt.imshow(img,cmap='gray')\n plt.title(f'predicted: {show_results()[i]}')\n plt.axis('off')\nplt.show()","metadata":{"id":"urZpH4YFUDAI","outputId":"e0ac44da-d228-4e4f-a609-0eef4b9614da","execution":{"iopub.status.busy":"2021-12-19T08:56:02.964880Z","iopub.execute_input":"2021-12-19T08:56:02.965220Z","iopub.status.idle":"2021-12-19T08:56:05.152719Z","shell.execute_reply.started":"2021-12-19T08:56:02.965184Z","shell.execute_reply":"2021-12-19T08:56:05.151984Z"},"trusted":true},"execution_count":82,"outputs":[]},{"cell_type":"code","source":"plate_number = show_results()\ndisplay(output_img, plate_number)","metadata":{"id":"uBboEZgAUDAT","execution":{"iopub.status.busy":"2021-12-19T08:56:12.615232Z","iopub.execute_input":"2021-12-19T08:56:12.615572Z","iopub.status.idle":"2021-12-19T08:56:13.026550Z","shell.execute_reply.started":"2021-12-19T08:56:12.615540Z","shell.execute_reply":"2021-12-19T08:56:13.025661Z"},"trusted":true},"execution_count":84,"outputs":[]},{"cell_type":"code","source":"","metadata":{},"execution_count":null,"outputs":[]}]}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment