Created
January 30, 2018 09:59
-
-
Save lokeshsoni/dc48d90eb6917f00916e71a6b7b98f98 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import warnings | |
warnings.filterwarnings("ignore") | |
import os | |
from shutil import copy | |
from flask import jsonify | |
from time import time | |
import numpy as np | |
from flask import Flask, request, render_template, send_from_directory | |
from keras.models import load_model | |
from keras.preprocessing.image import load_img, img_to_array | |
from keras.applications.resnet50 import ResNet50 as validate | |
from keras.preprocessing import image | |
from keras.applications.resnet50 import preprocess_input, decode_predictions | |
from helper_functions import * | |
app = Flask(__name__) | |
UPLOAD = "uploads" | |
predicted_images = "feedback/predictions/" | |
feedback_images = "feedback/actual/" | |
@app.route('/') | |
def index(): | |
return render_template("index.html", title='Broken Screen Classification') | |
@app.route('/prediction-cli', methods=['POST']) | |
def prediction_cli(): | |
file_remote = request.files['image'] | |
time_stamp = time() | |
extension = (file_remote.filename).split(".")[-1] | |
filename = str(time()).split(".")[0] + str(time()).split(".")[1] + "." + extension | |
file_local = os.path.join(UPLOAD, filename) | |
file_remote.save(file_local) | |
return jsonify(predict(file_local, "json")) | |
@app.route('/upload', methods=['POST']) | |
def upload_image(): | |
file_remote = request.files['image'] | |
time_stamp = time() | |
extension = (file_remote.filename).split(".")[-1] | |
filename = str(time()).split(".")[0] + str(time()).split(".")[1] + "." + extension | |
file_local = os.path.join(UPLOAD, filename) | |
file_remote.save(file_local) | |
return predict(file_local, "html") | |
@app.route('/feedback', methods=['GET']) | |
def feedback(): | |
file_name = request.args.get('filename') | |
result = request.args.get('result') | |
print(file_name) | |
print(str(result)) | |
copy(file_name, feedback_images + str(result)) | |
@app.route('/' + UPLOAD + '/<path:path>') | |
def serve_files(path): | |
return send_from_directory(UPLOAD, path) | |
def predict(file_local, type="json"): | |
predicted_label = None | |
if validate_image(file_local, validate_model): | |
if predict_broken(file_local, broken_model) == "broken": | |
if predict_tempered(file_local, tempered_model) == "tempered": | |
prediction = "VALID image found TEMPERED-BROKEN" | |
predicted_label = "tempered_broken" | |
copy(file_local, predicted_images + "tempered_broken/") | |
else: | |
prediction = "VALID image found SCREEN-BROKEN" | |
predicted_label = "screen_broken" | |
copy(file_local, predicted_images + "screen_broken/") | |
else: | |
if predict_tempered(file_local, tempered_model) == "tempered": | |
prediction = "VALID image found TEMPERED-NON_BROKEN" | |
predicted_label = "tempered_non_broken" | |
copy(file_local, predicted_images + "tempered_non_broken/") | |
else: | |
prediction = "VALID image found SCREEN-NON_BROKEN" | |
predicted_label = "screen_non_broken" | |
copy(file_local, predicted_images + "screen_non_broken/") | |
else: | |
prediction = "INVALID image found" | |
copy(file_local, predicted_images + "invalid/") | |
if type == "json": | |
result = {'isValid' : True, "prediction" : None} | |
if predicted_label is None: | |
result['isValid'] = False | |
else: | |
result['prediction'] = predicted_label | |
return result | |
else: | |
return render_template("predict.html", file_local=file_local, prediction=prediction) | |
if __name__ == "__main__": | |
print("loading validate model") | |
validate_model = validate(weights='imagenet') | |
print("loaded validate model successfully") | |
print("loading broken model") | |
from utils import broken_model | |
broken_model = broken_model() | |
broken_model = broken_model.model | |
print("loaded broken model successfully") | |
print("loading tempered model") | |
from utils import tempered_model | |
tempered_model = tempered_model() | |
tempered_model = tempered_model.model | |
print("loaded tempered model successfully") | |
print("started server") | |
app.run(debug=False, host = '0.0.0.0') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment