Skip to content

Instantly share code, notes, and snippets.

View edunuke's full-sized avatar

Eduardo Perez Denadai edunuke

  • dataquantix
  • Panamá
View GitHub Profile
@edunuke
edunuke / ResNeXt_gan.py
Created September 24, 2018 13:51 — forked from mjdietzx/ResNeXt_gan.py
Keras/tensorflow implementation of GAN architecture where generator and discriminator networks are ResNeXt.
from keras import layers
from keras import models
import tensorflow as tf
#
# generator input params
#
rand_dim = (1, 1, 2048) # dimension of the generator's input tensor (gaussian noise)
import keras
user_input = keras.layers.Input(shape=(100,), name='Domain_1')
user_vec = keras.layers.Flatten()(keras.layers.Embedding(11, 9, name='Domain_1_embed')(user_input))
user_vec = keras.layers.Dropout(0.5)(user_vec)
#Domain embedding
products_input = keras.layers.Input(shape=(100,), name='Domain_3')
products_vec = keras.layers.Flatten()(keras.layers.Embedding(50, 20, name='Domain_3_embed')(products_input))
products_vec = keras.layers.Dropout(0.5)(products_vec)
import keras
#Domain embedding
user_input = keras.layers.Input(shape=(100,), name='Domain_1')
user_vec = keras.layers.Flatten()(keras.layers.Embedding(11, 9, name='Domain_1_embed')(user_input))
user_vec = keras.layers.Dropout(0.5)(user_vec)
#Domain embedding
products_input = keras.layers.Input(shape=(100,), name='Domain_3')
products_vec = keras.layers.Flatten()(keras.layers.Embedding(50, 20, name='Domain_3_embed')(products_input))
import keras
#Domain embedding
user_input = keras.layers.Input(shape=(10,), name='Domain_1')
user_vec= keras.layers.Dropout(0.8)(keras.layers.Dense(1200, activation='relu',)(user_input))
#Domain embedding
products_input = keras.layers.Input(shape=(10,), name='Domain_2')
products_vec= keras.layers.Dropout(0.8)(keras.layers.Dense(1200, activation='relu',)(products_input))
import keras
#Domain embedding
user_input = keras.layers.Input(shape=(10,), name='Domain_1')
user_vec = keras.layers.Flatten()(keras.layers.Embedding(11, 9, name='Domain_1_embed')(user_input))
user_vec = keras.layers.Dropout(0.5)(user_vec)
#Domain embedding
products_input = keras.layers.Input(shape=(10,), name='Domain_2')
products_vec = keras.layers.Flatten()(keras.layers.Embedding(50, 20, name='Domain_2_embed')(products_input))
#fixed model weights (set them as non trainable)
for m in model_1.layers:
if m.name == 'layer_13':
m.trainable = False
else:
m.trainable = False
#add the new model to the old model
mixed = keras.layers.add([model_1.get_layer(name='layer_13').output, nn2], name='concat')
result_mix_1 = keras.layers.Dense(1, activation='relu', name='result_mixed')(mixed)
input_shape = (500,)
#old model
input_1 = keras.layers.Input(shape=input_shape, name='input_1')
nn = keras.layers.Dense(input_shape[0]*3, activation='relu',name='layer_11')(input_1)
nn = keras.layers.Dropout(0.75)(nn)
nn = keras.layers.normalization.BatchNormalization()(nn)
nn = keras.layers.Dense(input_shape[0]*2, activation='relu',name='layer_12')(nn)
nn = keras.layers.Dropout(0.75)(nn)
nn = keras.layers.normalization.BatchNormalization()(nn)
n_epoch = 1000 #example only
n_batch = 3000 # example only
X=[] #set up your feature space dimensions
y=[] # set your target dimesion
#method 1 for alternating training of the the added transfered layer weight
for i in range(n_epoch):
model_mix_2.fit(X, y, epochs=1, batch_size=n_batch, verbose=1, shuffle=False)
if not self.counter % 2:
model_mix_2.get_layer(name='layer_11').trainable = True
else:
n_epoch = 1000 #example only
n_batch = 3000 #example only
X=[] # set to appropiate feature space dimmensions
y=[] # set the target
#method 2 for alternating training: use callback
class alternate_trainable(keras.callbacks.Callback):
def __init__(self):
self.counter = 0
import pandas as pd
import seaborn as sns
import keras
import shap
#let's load the diamonds dataset
df=sns.load_dataset(name='diamonds')
print(df.head())
print(df.describe())