Skip to content

Instantly share code, notes, and snippets.

View yoel-zeldes's full-sized avatar
💭
Working on my blog at anotherdatum.com

Yoel Zeldes yoel-zeldes

💭
Working on my blog at anotherdatum.com
View GitHub Profile
# create a mesh of points which will be used for inference
resolution = 1000
vs = np.linspace(x_v.min(), x_v.max(), resolution)
ts = np.linspace(x_t.min(), x_t.max(), resolution)
vs, ts = np.meshgrid(vs, ts)
vs = np.ravel(vs)
ts = np.ravel(ts)
zs, probs = sess.run([z, prob], {visual: vs, textual: ts})
def plot_evaluations(evaluation, cmap, title, labels):
import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import matplotlib.pyplot as plt
np.random.seed(42)
tf.set_random_seed(42)
mnist = input_data.read_data_sets('MNIST_data')
def plot_samples(samples, num_epochs):
IMAGE_WIDTH = 0.7
epochs = np.linspace(0, len(samples) - 1, num_epochs).astype(int)
plt.figure(figsize=(IMAGE_WIDTH * NUM_DIGITS,
len(epochs) * IMAGE_WIDTH))
for epoch_index, epoch in enumerate(epochs):
for digit, image in enumerate(samples[epoch]):
plt.subplot(len(epochs),
NUM_DIGITS,
epoch_index * NUM_DIGITS + digit + 1)
# ...
def _eval_tensor_if_needed(self, path):
"""
Given a path to a tensor file, evaluate the tensor and cache the result in self._tensor_values.
"""
if self._session is None:
return None
if path not in self._tensor_values:
self._tensor_values[path] = self._session.run(self._graph.get_tensor_by_name(path[1:]))
class TfFs(fuse.Operations):
def __init__(self, mount_point, model_path):
self._graph, self._session = _load_model(model_path)
self._files = {}
self._bin_scripts = {}
self._tensor_values = {}
now = time()
self._files['/'] = _create_dir(now)
self._files['/bin'] = _create_dir(now)
self._populate_bin(mount_point)
def _load_model(model_path):
"""
Load a tensorflow model from the given path.
It's assumed the path is either a directory containing a .meta file, or the .meta file itself.
If there's also a file containing the weights with the same name as the .meta file
(without the .meta extension), it'll be loaded as well.
"""
if os.path.isdir(model_path):
meta_filename = [filename for filename in os.listdir(model_path) if filename.endswith('.meta')]
assert len(meta_filename) == 1, 'expecting to get a .meta file or a directory containing a .meta file'
#!/bin/sh
while read local_ref local_sha remote_ref remote_sha
do
if [ "$remote_ref" = "refs/heads/source" ]
then
echo 'pushing output folder (production version) to master...'
pelican content -o output -s publishconf.py
echo anotherdatum.com > output/CNAME
ghp-import output
git push --no-verify git@github.com:yoel-zeldes/yoel-zeldes.github.io.git gh-pages:master
params = {
'encoder_layers': [128], # the encoder will be implemented using a simple feed forward network
'decoder_layers': [128], # and so will the decoder (CNN will be better, but I want to keep the code simple)
'digit_classification_layers': [128], # this is for the conditioning. I'll explain it later on
'activation': tf.nn.sigmoid, # the activation function used by all sub-networks
'decoder_std': 0.5, # the standard deviation of P(x|z) discussed in the first post
'z_dim': 10, # the dimension of the latent space
'digit_classification_weight': 10.0, # this is for the conditioning. I'll explain it later on
'epochs': 20,
'batch_size': 100,
mnist = input_data.read_data_sets('MNIST_data')
input_size = 28 * 28
num_digits = 10
def encoder(x, layers):
for layer in layers:
x = tf.layers.dense(x,
layer,
activation=params['activation'])
mu = tf.layers.dense(x, params['z_dim'])
var = 1e-5 + tf.exp(tf.layers.dense(x, params['z_dim']))
return mu, var