Skip to content

Instantly share code, notes, and snippets.

View dvgodoy's full-sized avatar

Daniel Voigt Godoy dvgodoy

View GitHub Profile
@dvgodoy
dvgodoy / network.py
Last active April 2, 2018 14:40
Simple network to demonstrate effects of different activation functions
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.initializers import glorot_normal, normal
# ======================= #
# Data generation process #
# ======================= #
from deepreplay.callbacks import ReplayData
from deepreplay.datasets.parabola import load_data
X, y = load_data()
replaydata = ReplayData(X, y, filename='hyperparms_in_action.h5', group_name='part1')
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.initializers import glorot_normal, normal
model = Sequential()
model.add(Dense(input_dim=2,
units=2,
activation='sigmoid',
kernel_initializer=glorot_normal(seed=42),
from deepreplay.replay import Replay
replay = Replay(replay_filename='hyperparms_in_action.h5', group_name='part1')
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1, 1, figsize=(5, 5))
fs = replay.build_feature_space(ax, layer_name='hidden')
# Plot 60th epoch and save it as PNG
fs.plot(epoch=60).savefig('feature_space_epoch60.png', dpi=120)
# Animate and save it as MP4
fs.animate().save('feature_space_animation.mp4', dpi=120, fps=5)
fig = plt.figure(figsize=(12, 6))
ax_fs = plt.subplot2grid((2, 4), (0, 0), colspan=2, rowspan=2)
ax_ph_neg = plt.subplot2grid((2, 4), (0, 2))
ax_ph_pos = plt.subplot2grid((2, 4), (1, 2))
ax_lm = plt.subplot2grid((2, 4), (0, 3))
ax_lh = plt.subplot2grid((2, 4), (1, 3))
fs = replay.build_feature_space(ax_fs, layer_name='hidden')
ph = replay.build_probability_histogram(ax_ph_neg, ax_ph_pos)
lh = replay.build_loss_histogram(ax_lh)
from keras.models import Sequential
from keras.layers import Dense
def build_model(n_layers, input_dim, units, activation, initializer):
if isinstance(units, list):
assert len(units) == n_layers
else:
units = [units] * n_layers
model = Sequential()
from deepreplay.datasets.ball import load_data
X, y = load_data(n_dims=10)
import numpy as np
from keras import backend as K
from keras.initializers import VarianceScaling
fan_in = fan_out = 100
stddev = np.sqrt(1. / fan_in)
normal_values = K.eval(K.random_normal(shape=(fan_in, fan_out), stddev=stddev)).ravel()
truncated_values = K.eval(K.truncated_normal(shape=(fan_in, fan_out), stddev=stddev)).ravel()
var_scaling_values = K.eval(VarianceScaling(mode='fan_in')(shape=(fan_in, fan_out))).ravel()