Skip to content

Instantly share code, notes, and snippets.

View novasush's full-sized avatar
😉
Happy and Focussed

Sushrut Ashtikar novasush

😉
Happy and Focussed
View GitHub Profile
@novasush
novasush / push.js
Created June 17, 2019 06:07 — forked from davideast/push.js
Send push notifications with node-apn and Firebase
var apn = require("apn");
var Firebase = require("firebase");
var service = new apn.connection({ production: false }); // true for production pipeline
// Create a reference to the push notification queue
var pushRef = new Firebase("<your-firebase>.firebaseio.com/notificationQueue");
// listen for items added to the queue
pushRef.on("child_added", function(snapshot) {
@novasush
novasush / TensorflowFromBrowser.html
Created December 31, 2019 12:34
Training and predicting an equation in chrome browser asynchronously using tensorflow.js
<!DOCTYPE html>
<html>
<head>
<title>Training a model on browser</title>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@latest"></script>
<script lang="js">
async function doTraining(model){
const history =
await model.fit(xs, ys,
@novasush
novasush / Rock_Paper_Scissor.ipynb
Last active March 6, 2020 15:42
A simple python based rock paper scissor game
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@novasush
novasush / setup.py
Last active March 7, 2020 17:43
Setup
import multiprocessing
import tensorflow as tf
import tensorflow_datasets as tfds
from os import getcwd
def create_model():
input_layer = tf.keras.layers.Input(shape=(224, 224, 3))
base_model = tf.keras.applications.MobileNetV2(input_tensor=input_layer,
weights='imagenet',
include_top=False)
base_model.trainable = False
x = tf.keras.layers.GlobalAveragePooling2D()(base_model.output)
x = tf.keras.layers.Dense(2, activation='softmax')(x)
model = tf.keras.models.Model(inputs=input_layer, outputs=x)
dataset_name = 'cats_vs_dogs'
filePath = f"{getcwd()}/../tmp2"
dataset, info = tfds.load(name=dataset_name, split=tfds.Split.TRAIN, with_info=True, data_dir=filePath)
print(info.version)
def preprocess(features):
image = features['image']
image = tf.image.resize(image, (224, 224))
image = image / 255.0
file_pattern = f'{getcwd()}/../tmp2/{dataset_name}/{info.version}/{dataset_name}-train.tfrecord*'
files = tf.data.Dataset.list_files(file_pattern)
# Parallelize the extraction of the stored TFRecords of
# the cats_vs_dogs dataset by using the interleave operation with
# cycle_length = 4 and the number of parallel calls set to tf.data.experimental.AUTOTUNE.
train_dataset = files.interleave(tf.data.TFRecordDataset,
cycle_length=4,
num_parallel_calls=tf.data.experimental.AUTOTUNE)
def read_tfrecord(serialized_example):
# Create the feature description dictionary
feature_description = {
'image': tf.io.FixedLenFeature((), tf.string, ""),
'label': tf.io.FixedLenFeature((), tf.int64, -1),
}
# Parse the serialized_example and decode the image
example = tf.io.parse_single_example(serialized_example, feature_description)
image = tf.io.decode_jpeg(example['image'], channels=3)
# Get the number of CPU cores.
cores = multiprocessing.cpu_count()
print(cores)
# Parallelize the transformation of the train_dataset by using
# the map operation with the number of parallel calls set to
# the number of CPU cores.
train_dataset = train_dataset.map(read_tfrecord,num_parallel_calls=cores)