Skip to content

Instantly share code, notes, and snippets.

d, r = 2, 2
DTYPE = tf.float32
bijectors = []
num_layers = 6
for i in range(num_layers):
with tf.variable_scope('bijector_%d' % i):
V = tf.get_variable('V', [d, r], dtype=DTYPE) # factor loading
shift = tf.get_variable('shift', [d], dtype=DTYPE) # affine shift
L = tf.get_variable('L', [d * (d + 1) / 2],
dtype=DTYPE) # lower triangular
# quite easy to interpret - multiplying by alpha causes a contraction in volume.
class LeakyReLU(tfb.Bijector):
def __init__(self, alpha=0.5, validate_args=False, name="leaky_relu"):
super(LeakyReLU, self).__init__(
event_ndims=1, validate_args=validate_args, name=name)
self.alpha = alpha
def _forward(self, x):
return tf.where(tf.greater_equal(x, 0), x, self.alpha * x)
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
tfd = tf.contrib.distributions
tfb = tfd.bijectors
base_dist = tfd.MultivariateNormalDiag(loc=tf.zeros([2], tf.float32))
batch_size=512
x2_dist = tfd.Normal(loc=0., scale=4.)
x2_samples = x2_dist.sample(batch_size)
x1 = tfd.Normal(loc=.25 * tf.square(x2_samples),
scale=tf.ones(batch_size, dtype=tf.float32))
x1_samples = x1.sample()
x_samples = tf.stack([x1_samples, x2_samples], axis=1)
@ericjang
ericjang / blender_pointcloud_text_2d.py
Last active December 26, 2017 19:12
Generate 2D points randomly distributed on a flat text mesh in Blender.
# run this from a blender interpreter
# First, create text object, then <SPACE> convert it from Text to a Mesh object. Select it.
import bpy_extras.mesh_utils
import pickle
obj = bpy.context.object # get actively selected object
me = obj.data # get mesh
me.calc_tessface() # recalc tessfaces
points = bpy_extras.mesh_utils.face_random_points(10, me.tessfaces)
@ericjang
ericjang / currentCPI.py
Last active July 30, 2017 01:00
Python script that returns a pandas dataframe containing global Consumer Price Indices
import requests
from bs4 import BeautifulSoup
import pandas as pd
import re
import sys
def currentCPI:
url = 'http://www.tradingeconomics.com/country-list/consumer-price-index-(cpi)'
r = requests.get(url)
soup = BeautifulSoup(r.text)
loss=tf.reduce_mean(-elbo)
lr=tf.constant(0.001)
train_op=tf.train.AdamOptimizer(learning_rate=lr).minimize(loss,var_list=slim.get_model_variables())
init_op=tf.initialize_all_variables()
# get data
data = input_data.read_data_sets('/tmp/', one_hot=True).train
BATCH_SIZE=100
NUM_ITERS=50000
tau0=1.0 # initial temperature
# loss and train ops
kl_tmp = tf.reshape(q_y*(log_q_y-tf.log(1.0/K)),[-1,N,K])
KL = tf.reduce_sum(kl_tmp,[1,2])
elbo=tf.reduce_sum(p_x.log_prob(x),1) - KL
# temperature
tau = tf.Variable(5.0,name="temperature")
# sample and reshape back (shape=(batch_size,N,K))
# set hard=True for ST Gumbel-Softmax
y = tf.reshape(gumbel_softmax(logits_y,tau,hard=False),[-1,N,K])
# generative model p(x|y), i.e. the decoder (shape=(batch_size,200))
net = slim.stack(slim.flatten(y),slim.fully_connected,[256,512])
logits_x = slim.fully_connected(net,784,activation_fn=None)
# (shape=(batch_size,784))
p_x = Bernoulli(logits=logits_x)