Skip to content

Instantly share code, notes, and snippets.

View amansrivastava17's full-sized avatar
🎯
Your I-Can is more important than your IQ

Aman Srivastava amansrivastava17

🎯
Your I-Can is more important than your IQ
View GitHub Profile
public class MLModel {
float[] probabilities = new float[Constant.MODEL_NUM_CLASS];
public void configureHostedModelSource() {
// [START mlkit_cloud_model_source]
FirebaseModelDownloadConditions.Builder conditionsBuilder =
new FirebaseModelDownloadConditions.Builder().requireWifi();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
// Enable advanced conditions on Android Nougat and newer.
// model name given to custom model stored on MLKit
public static String REMOTE_MODEL_NAME = "mlmodel";
// model name given to model stored locally (can be the same as on MLkit)
public static String LOCAL_MODEL_NAME = "mlmodel";
// file for word dict with word to index map
public static String WORD_DICT_FILE = "word_index.json";
// file for model stored locally inside assets
public static String LOCAL_MODEL_FILE = "model.tflite";
// input shape to your model (max value of index in word_index.json file)
public static Integer MODEL_INPUT_SHAPE = 30;
android {
...
}
buildTypes {
release {
...
}
}
aaptOptions {
noCompress "tflite"
dependencies {
// ...
implementation 'com.google.firebase:firebase-ml-model-interpreter:21.0.0'
}
apply plugin: 'com.google.gms.google-services'
def convert_model_to_tflite(keras_model_path):
tf.logging.set_verbosity( tf.logging.ERROR )
converter = tf.contrib.lite.TFLiteConverter.from_keras_model_file(keras_model_path )
converter.post_training_quantize = True
tflite_buffer = converter.convert()
open( 'model.tflite' , 'wb' ).write( tflite_buffer )
print( 'TFLite model created.')
def convert_model_to_tflite(keras_model_path):
tf.logging.set_verbosity( tf.logging.ERROR )
converter = tf.contrib.lite.TFLiteConverter.from_keras_model_file(keras_model_path )
converter.post_training_quantize = True
tflite_buffer = converter.convert()
open( 'model.tflite' , 'wb' ).write( tflite_buffer )
print( 'TFLite model created.')
def test(sentence, model_path, word_index_path)
classifier = models.load_model( 'models/models.h5' )
tokenizer = tf.keras.preprocessing.text.Tokenizer(filters='.,:?{} ')
sentences = re.sub(r'.,:?{}', ' ', sentence)
with open(word_index_path, 'r') as f:
tokenizer.word_index = json.loads(f.read())
tokenized_messages = tokenizer.texts_to_matrix(sentence.split())
p = list(classifier.predict(tokenized_messages)[0])
for index, each in enumerate(p):
from sklearn.preprocessing import LabelEncoder
import tensorflow as tf
from keras.layers import Dense, Input, Dropout
from tensorflow.python.keras import models, optimizers, losses, activations
from keras.layers.normalization import BatchNormalization
from keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.model_selection import train_test_split
LE = LabelEncoder()
def train_and_eval(sentences, label):
# converting categorical label
sentences = [re.sub(r'.,:?{}', ' ', sentence) for sentence in sentences]
corpus = " ".join(sentences)
words = set(doc.split())
word_index = {word: index for index, word in enumerate(words)}
with open( 'word_index.json' , 'w' ) as file:
json.dump( word_index , file )
import csv
sentences , labels = [], []
with open('data.csv','r')as f:
data = csv.reader(f)
for row in data:
sentences.append(row[0])
labels.append(row[1])