Skip to content

Instantly share code, notes, and snippets.

@linmx0130
Last active January 5, 2017 08:01
Show Gist options
  • Save linmx0130/9f5d1dae5edd0635505f7eedb13556b0 to your computer and use it in GitHub Desktop.
Save linmx0130/9f5d1dae5edd0635505f7eedb13556b0 to your computer and use it in GitHub Desktop.
import theano
import theano.tensor as T
def sigmoid(x):
return 1.0/(1+T.exp(-x))
x = T.vector('x')
W = T.matrix('W')
#y = T.dot(x,W)
y = T.dot(x,W)
sf = T.nnet.softmax(y)
true_dist = T.ivector('true_dist')
loss = T.mean(T.nnet.categorical_crossentropy(sf, true_dist))
v = T.vector('v')
gl = T.jacobian(loss,W)
f = theano.function([x,W,true_dist], [gl])
print (f([-1,1], [[-1,1],[1,1]], [0]))
#f= theano.function([x,W],[sf])
# print(f([-1,1], [[-1,1],[1,1]]))
# cause errors
VJ = T.Lop(loss,W,v)
f2 = theano.function([x,W,true_dist,v],[VJ])
print (f([-1,1], [[-1,1],[1,1]], [0],[1,2]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment