Skip to content

Instantly share code, notes, and snippets.

Akash Srivastava akashgit

Block or report user

Report or block akashgit

Hide content and notifications from this user.

Learn more about blocking users

Contact Support about this user’s behavior.

Learn more about reporting abuse

Report abuse
View GitHub Profile
View GAN_1000.py
def highdim_syn_data(batch_size, num_components, num_features,**kwargs):
shape=(num_features)
shape_cat=(batch_size,num_components)
cat = ds.Categorical(tf.zeros(num_components, dtype=float32))
mus = [-1*tf.ones(shape, dtype=float32),-.5*tf.ones(shape, dtype=float32),
0*tf.ones(shape, dtype=float32),.5*tf.ones(shape, dtype=float32),
-2*tf.ones(shape, dtype=float32),-2.5*tf.ones(shape, dtype=float32),
10*tf.ones(shape, dtype=float32),.25*tf.ones(shape, dtype=float32),
-13*tf.ones(shape, dtype=float32),-5.5*tf.ones(shape, dtype=float32)]
View GAN_GRID.py
def grid(batch_size, num_components, num_features,**kwargs):
shape=(batch_size,num_features)
shape_cat=(batch_size,num_components)
cat = ds.Categorical(logits=np.log(0.04*np.ones(shape_cat, dtype=float32)))
mus = np.array([np.array([i, j])*np.ones(shape, dtype=float32) for i, j in itertools.product(range(-4, 5, 2),
range(-4, 5, 2))],dtype=float32)
s = 0.05*np.ones(shape, dtype=float32)
sigmas = [s for i in range(num_components)]
components = list((ds.MultivariateNormalDiag(mu, sigma, **kwargs)
for (mu, sigma) in zip(mus, sigmas)))
View GAN_RING.py
def ring(batchsize, num_cluster=8, scale=1, std=.01,**kwargs):
pi_= tf.constant(np.pi)
rand_indices = tf.random_uniform([batchsize], minval=0, maxval=num_cluster, dtype=tf.int32)
base_angle = pi_ * 2 / num_cluster
angle = (base_angle * tf.cast(rand_indices,dtype=float32)) - (pi_ / 2)
mean_0 = tf.expand_dims(scale*tf.cos(angle),1)
mean_1 = tf.expand_dims(scale*tf.sin(angle),1)
mean = tf.concat([mean_0, mean_1], 1)
return ds.Normal(mean, (std**2)*tf.ones_like(mean))
@akashgit
akashgit / plot_softmax.py
Last active Mar 14, 2017
Effect of input normalisation on the softmax function and its gradients.
View plot_softmax.py
import autograd.numpy as np
from autograd import grad,elementwise_grad
def softmax(z):
return (np.exp((z))) / np.sum(np.exp((z)))
nb_of_zs = 200
zs = np.linspace(-10, 10, num=nb_of_zs) # input
zs_1, zs_2 = np.meshgrid(zs, zs) # generate grid
y = np.zeros((nb_of_zs, nb_of_zs, 2)) # initialize output
You can’t perform that action at this time.