Skip to content

Instantly share code, notes, and snippets.

@humamfauzi
Created May 18, 2021 13:37
Show Gist options
  • Save humamfauzi/75819451b588d23cf8cf4ad4619aa872 to your computer and use it in GitHub Desktop.
Save humamfauzi/75819451b588d23cf8cf4ad4619aa872 to your computer and use it in GitHub Desktop.
Example of connecting machine learning with an API in Python
from flask import Flask, jsonify, request
from gevent.pywsgi import WSGIServer
from keras.models import model_from_json
import numpy as np
import json
import joblib
import base64
app = Flask(__name__)
current_model = "dummy"
keras_model = None
@app.route("/", methods=["GET", "POST"])
def checkPages():
print(request.args.get('asdf'))
print()
return jsonify({"success": True}), 200
@app.route("/type", methods=["GET"])
def pickType():
x_cols = [
'sepal_length',
'sepal_width',
'petal_length',
'petal_width'
]
x_val = np.array([
request.args.get(x) for x in x_cols
], dtype=np.float64)
if None in x_val:
return jsonify({"success": False}), 400
if current_model == "dummy":
return jsonify({"success": False}), 400
[ result ] = current_model.predict([x_val])
[ probability ] = current_model.predict_proba([x_val])
return jsonify({
"success": True,
"payload": {
"prediction": numberToName(result),
"probability": {
numberToName(index): value for index, value in enumerate(probability)
}
}
}), 200
@app.route("/update", methods=['PUT'])
def updateSav():
global current_model
newdir = json.loads(request.data)
current_model_directory = newdir['dir']
current_model = joblib.load(current_model_directory)
return jsonify({"success": True}), 200
def numberToName(number):
if number == 0:
return 'setosa'
elif number == 1:
return 'versicolor'
else:
return 'virginica'
@app.route("/update/keras", methods=["PUT"])
def updateKeras():
global keras_model
newdir = json.loads(request.data)
try:
keras_model = loadModelAndWeights(newdir["dir"], newdir["weight_dir"])
keras_model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return jsonify({"success": True}), 200
except:
return jsonify({"success": False}), 400
@app.route("/mnist", methods=["GET"])
def checkMnist():
base64_img = request.args.get("image")
image = base64toArray(base64_img)
[ pred ] = keras_model.predict(image.reshape(1,28,28,1))
result = {
"success": True,
"payload": {
"prediction": str(np.argmax(pred, axis=-1)),
"probability": {
str(index): float(value) for index, value in enumerate(pred)
}
}
}
return jsonify(result), 200
def loadModelAndWeights(model_dir, weights_dir):
loaded_model = None
with open(model_dir, 'r') as json_raw:
loaded_model = model_from_json(json_raw.read())
loaded_model.load_weights(weights_dir)
return loaded_model
def base64toArray(base64Code):
decoded = base64.b64decode(base64Code)
return np.frombuffer(decoded, dtype=np.float32)
if __name__ == '__main__':
http_server = WSGIServer(('', 3030), app)
http_server.serve_forever()
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
import joblib
def trainIris():
X, y = load_iris(return_X_y=True)
model = LogisticRegression(random_state=0).fit(X, y)
fileName = "log_reg_model.sav"
joblib.dump(model, fileName)
return
if __name__ == '__main__':
trainIris()
from keras.models import Sequential
from keras.layers import Dense
from keras.models import model_from_json
from keras.datasets import mnist
from keras.utils import to_categorical
from tensorflow.keras import layers
from keras import Input
import numpy as np
import os
def saveModel(model, model_dir, weight_dir):
model_json = model.to_json()
with open(model_dir, "w") as json_file:
json_file.write(model_json)
model.save_weights(weight_dir)
return
def loadModelAndWeights(model_dir, weights_dir):
loaded_model = None
with open('model.json', 'r') as json_raw:
loaded_model = model_from_json(json_raw.read())
loaded_model.load_weights("model.h5")
return loaded_model
if __name__ == '__main__':
# fix random seed for reproducibility
np.random.seed(7)
num_classes = 10
input_shape = (28, 28, 1)
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Scale images to the [0, 1] range
x_train = x_train.astype("float32") / 255
x_test = x_test.astype("float32") / 255
# Make sure images have shape (28, 28, 1)
x_train = np.expand_dims(x_train, -1)
x_test = np.expand_dims(x_test, -1)
y_train = to_categorical(y_train, num_classes)
y_test = to_categorical(y_test, num_classes)
# create model
model = Sequential(
[
Input(shape=input_shape),
layers.Conv2D(32, kernel_size=(3, 3), activation="relu"),
layers.MaxPooling2D(pool_size=(2, 2)),
layers.Conv2D(64, kernel_size=(3, 3), activation="relu"),
layers.MaxPooling2D(pool_size=(2, 2)),
layers.Flatten(),
layers.Dropout(0.5),
layers.Dense(num_classes, activation="softmax"),
]
)
# Compile model
batch_size = 128
epochs = 15
model.compile(loss="categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_split=0.1)
score = model.evaluate(x_test, y_test, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
saveModel(model, "keras_model.json", "keras_model.h5")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment