A Dashboard application built using django and chart.js
- The uploaded CSV files must contain be in the following format.
Assay | January | November | December | Year | AssayID | MachineID |
---|---|---|---|---|---|---|
Sick panel (MS/MS) | 279 | 219 | 220 | 2020 | SICKPANEL_20 | FI-MSMS |
# Force http to https | |
server { | |
server_name genelookup.research.cgm.genetics.kfshrc.edu.sa www.genelookup.research.cgm.genetics.kfshrc.edu.sa; | |
return 301 https://genelookup.research.cgm.genetics.kfshrc.edu.sa$request_uri; | |
} | |
# NGINX server block | |
server { | |
server_name genelookup.research.cgm.genetics.kfshrc.edu.sa www.genelookup.research.cgm.genetics.kfshrc.edu.sa; | |
<div class="gmail_quote"> | |
<div dir="auto"> | |
<div> | |
<div class="gmail_quote"> | |
<div class="adM"><br></div><u></u> | |
<div style="background-color:transparent;margin:0;padding:0"> | |
<table border="0" cellpadding="0" cellspacing="0" role="presentation" | |
style="background-color:transparent" width="100%"> | |
<tbody> | |
<tr> |
import requests | |
import os | |
import pandas as pd | |
import traceback | |
import time | |
import ntpath | |
import warnings | |
warnings.simplefilter(action='ignore', category=pd.errors.PerformanceWarning) |
def recognize_plate(img, coords): | |
plate_num = "" | |
# separate plate from image | |
xmin, ymin, xmax, ymax = coords | |
box = img[int(ymin)-5:int(ymax)+5, int(xmin)-5:int(xmax)+5] | |
# grayscale and rezise | |
grayimg = cv2.cvtColor(box, cv2.COLOR_RGB2GRAY) | |
grayimg = cv2.resize(grayimg, None, fx = 3, fy = 3, interpolation = cv2.INTER_CUBIC) | |
# threshold the image using Otsus method to preprocess for tesseract |
import cv2 | |
import mediapipe as mp | |
import numpy as np | |
import math | |
mphands = mp.solutions.hands | |
hands = mphands.Hands() | |
mpDraw = mp.solutions.drawing_utils | |
lmlist = [] |
{ | |
"image": "/9j/4AAQSkZJRgABAQEAlgCWAAD/2wCEAAwJCQsJCAwLCgsODQwPEx8UExEREyYbHRcfLSgwLywoLCsyOEg9MjVENissPlU/REpMUFFQMDxYXldOXkhPUE0BDQ4OExATJRQUJU0zLDNNTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTU1NTf/CABEIAUQCBAMBIQACEQEDEQH/xAAwAAACAwEBAQAAAAAAAAAAAAAAAQIDBAUGBwEBAQEBAAAAAAAAAAAAAAAAAAECA//aAAwDAQACEAMQAAAA+iLUQgUyBA0QMUBgMEAAg1GJgCAAAYgbUAAEAGArRFUBS2Uky6ZCFtrITlz6hxqDTO5L3a2MUAQhGgYgk1QAIEIAwBQGxABIaAwaWUJ53oY51Y4u0W8j0UmMt289rN1WcmeTTlOvo3u9XQBx5nzfsM8tb32ELkzPSqasxpvi1glMQ6VDusFWcaqFjpFrusE5ncb1G6aYvP0soTNz850xQxydWet87Ljmq5Ovq6dMBIVyV9i3QtbQPi5nB9bnnsOnVoq4GcEcZqnZbUujoXXMrmZqKdtm/iXVlkkM6bOld7pb3j2TUy3n3XLkgodZk2W2xgzSTtoRYKCRKsjKCCXl9SZkXTQoWEpYyRQK7yhxg6EjqrWXURCQnROMO2bmVz7rkkkZQqTzXqrbVByM5olMa429J62hIvVsM1BRy84x9HON93ToAQ1bXmpFOHOZV2aHGToGYNF+tSajKzUkpMe/O5lvOvuRySV0RmaNFsnCLVCSMktLoUyirIystXHxjn9bPPpPp2EKNlqlC2HOxirRc21y3y1rHDOK3HZOnQlJWtJmWPc3MOFfnkCMVCCJMUlQgDLXGBoSWLpKBJMVoTRihanIgbE7aHMq5Zq0cgU5Lj6jcy3naKnBGyMgiTQpkWSgSIjKkiUYJEJU4QldRYlUoxOczCRUrqSFIi0shMzEjISHEzdKamW |
void getRecognisedText() async { | |
// final inputImage = InputImage.fromFilePath(image.path); | |
String getPrettyJSONString(jsonObject) { | |
var encoder = new JsonEncoder.withIndent(" "); | |
return encoder.convert(jsonObject); | |
} | |
textScanning = true; | |
setState(() {}); | |
const url = 'https://ocrapi-5l4bm6okaa-uc.a.run.app/api'; |
import cv2 | |
import mediapipe as mp | |
import numpy as np | |
import math | |
mphands = mp.solutions.hands | |
hands = mphands.Hands() | |
mpDraw = mp.solutions.drawing_utils | |
lmlist = [] |
import cv2 | |
import time | |
def Facedetect(): | |
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_alt2.xml') | |
cap = cv2.VideoCapture(0) | |
while(True): | |
# Capture frame-by-frame |