Skip to content

Instantly share code, notes, and snippets.

@arogozhnikov
Created August 3, 2015 01:49
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save arogozhnikov/1397de2d5a5e7bfd6565 to your computer and use it in GitHub Desktop.
Save arogozhnikov/1397de2d5a5e7bfd6565 to your computer and use it in GitHub Desktop.
Very minimal Gaussian Bernoulli RBM using batch learning
import numpy
from scipy.special import expit
class GRBM(object):
def __init__(self, n_vis, n_hid, std=0.1):
self.std = std
self.W = numpy.random.normal(loc=-1./n_hid, scale=0.1, size=[n_vis + 1, n_hid + 1])
def hidden_p(self, data):
result = expit(data.dot(self.W))
result[:, -1] = 1.
return result
def sample_hiddens(self, data):
p = self.hidden_p(data)
return numpy.random.random(size=p.shape) < p
def sample_expected(self, hiddens):
expected = hiddens.dot(self.W.T)
expected[:, -1] = 1.
return expected
def transform(self, data):
return self.hidden_p(self.extend_data(data))[:, :-1]
def extend_data(self, data):
return numpy.concatenate([data, numpy.ones([len(data), 1])], axis=1)
def train(self, data, epochs=1, rate=0.1):
data = self.extend_data(data)
for epoch in range(epochs):
hiddens = self.sample_hiddens(data)
expected = self.sample_expected(hiddens)
print 'mse = ', numpy.mean((data - expected) ** 2.)
self.W += (data - expected).T.dot(hiddens) * (rate / len(data))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment