Skip to content

Instantly share code, notes, and snippets.

View yoel-zeldes's full-sized avatar
💭
Working on my blog at anotherdatum.com

Yoel Zeldes yoel-zeldes

💭
Working on my blog at anotherdatum.com
View GitHub Profile
model.init_sims(replace=True) # normalize the word embeddings to have length 1
def neighbors_fnct(node, n_neighbors, dilute_factor):
return [neighbor for neighbor, _ in model.similar_by_word(
node, n_neighbors * dilute_factor)][0:-1:dilute_factor]
def euclidean_dist(n1, n2):
return np.linalg.norm(model.get_vector(n1) - model.get_vector(n2))
from gensim.models import KeyedVectors
model = KeyedVectors.load_word2vec_format(
fname=word2vec_file_path,
binary=True,
limit=100000
)
print morph('tooth', 'light')
print morph('John', 'perfect')
print morph('pillow', 'car')
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(41)
tf.set_random_seed(41)
%matplotlib inline
n = 400
p_c = 0.5
p_m = 0.5
mu_v_0 = 1.0
mu_v_1 = 8.0
mu_v_noise = 17.0
mu_t_0 = 13.0
mu_t_1 = 19.0
mu_t_noise = 10.0
NUM_CLASSES = 2
HIDDEN_STATE_DIM = 1 # using 1 as dimensionality makes it easy to plot z, as we'll do later on
visual = tf.placeholder(tf.float32, shape=[None])
textual = tf.placeholder(tf.float32, shape=[None])
target = tf.placeholder(tf.int32, shape=[None])
h_v = tf.layers.dense(tf.reshape(visual, [-1, 1]),
HIDDEN_STATE_DIM,
activation=tf.nn.tanh)
sess = tf.Session()
def train(train_op, loss):
sess.run(tf.global_variables_initializer())
losses = []
for epoch in xrange(100):
_, l = sess.run([train_op, loss], {visual: x_v,
textual: x_t,
target: c})
losses.append(l)
# create a mesh of points which will be used for inference
resolution = 1000
vs = np.linspace(x_v.min(), x_v.max(), resolution)
ts = np.linspace(x_t.min(), x_t.max(), resolution)
vs, ts = np.meshgrid(vs, ts)
vs = np.ravel(vs)
ts = np.ravel(ts)
zs, probs = sess.run([z, prob], {visual: vs, textual: ts})
def plot_evaluations(evaluation, cmap, title, labels):
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
np.random.seed(42)
tf.set_random_seed(42)
%matplotlib inline
BATCHS_IN_EPOCH = 100
BATCH_SIZE = 10
EPOCHS = 200 # the stream is infinite so one epoch will be defined as BATCHS_IN_EPOCH * BATCH_SIZE
GENERATOR_TRAINING_FACTOR = 10 # for every training of the disctiminator we'll train the generator 10 times
LEARNING_RATE = 0.0007
TEMPERATURE = 0.001 # we use a constant, but for harder problems we should anneal it