Skip to content

Instantly share code, notes, and snippets.

View emuccino's full-sized avatar

Eric Muccino emuccino

  • Mindboard
View GitHub Profile
@emuccino
emuccino / untitled0.ipynb
Created August 26, 2020 03:31
Untitled0.ipynb
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
import matplotlib.pyplot as plt
def show_image_compression(samples):
#get encoded samples offloaded to edge
_, offload_end = device_models['end'].predict(samples)
#get encoded samples offloaded to cloud
_, offload_edge = device_models['edge'].predict(offload_end)
#show sample represenations at each device level
for i in range(len(samples)):
print(i)
import numpy as np
from scipy.special import softmax
def get_results(confidence_threshold):
print('confidence threshold:',confidence_threshold)
predictions = []
confidence = []
exit_level = []
from tensorflow.keras.callbacks import EarlyStopping
early_stop = EarlyStopping(
monitor='val_loss', min_delta=0, patience=5, verbose=0, mode='auto',
baseline=None, restore_best_weights=True)
model.fit(x_train, {device:y_train for device in device_names},
batch_size=64,
epochs=1000,
verbose=1,
outputs = {}
#complete model input
inputs = Input(shape=x_train.shape[1:])
net = {'offload':inputs}
#stack all 3 device models together
for device in device_names:
net = device_models[device](net['offload'])
outputs[device] = net[device+'_outputs']
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, Dense, GlobalAvgPool2D, GlobalMaxPool2D, Concatenate, BatchNormalization
from tensorflow.keras.losses import SparseCategoricalCrossentropy
#function compiling device model
def compile_device_model(input_shape=None,n_filters=None,name=None,offload=False):
outputs = {}
inputs = Input(shape=input_shape)
net = inputs
from tensorflow.keras.datasets import mnist
#load mnist data
(x_train,y_train),(x_test,y_test) = mnist.load_data()
#reshape and scale input image data
x_train, x_test = (x.reshape(*x.shape,1)/x.max() for x in (x_train,x_test))
@emuccino
emuccino / loan_data_classifier.py
Last active April 13, 2020 08:28
Train classifiers
#function for building classifier (very similar to the discriminator)
def compile_classifier():
inputs = {}
numeric_nets = []
string_nets = []
for name in numeric_data:
numeric_input = Input(shape=(1,),name=name)
import itertools
import matplotlib.pyplot as plt
def train_gan(n_epochs,n_batch,n_plot,n_eval):
#discriminator/generator training logs
disc_loss_hist = []
gen_loss_hist = []
for epoch in range(n_epochs):
@emuccino
emuccino / loan_data_get_data.py
Last active April 13, 2020 09:34
Batch generating functions
from tensorflow.keras.utils import to_categorical
#function for generating latent samples for synthetic data for generator training
def generate_latent_samples(n):
#generate latent vectors with balanced targets
x = {'latent': np.random.normal(size=(n, latent_dim)),
'target': to_categorical(np.hstack([np.array([_x for _ in range(n//2)]) for _x in range(2)]),2)}
#outputs indicating postive discirmination (target value)