View gist:dfec9e3c9ed684a5c283
convert -density 300 \[TensorFlow\]\ Sequence-to-Sequence\ Models.pdf TensorFlow/output.png
View compress_pdf.md

gs -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/screen -dNOPAUSE -dQUIET -dBATCH -sOutputFile=output.pdf input.pdf

View invert_alexnet_conv5_deploy.prototxt
name: "CaffeNet"
layers {
name: "data"
type: DATA
top: "data"
data_param {
source: "/misc/lmbraid10/dosovits/Datasets/ILSVRC2012/all/val_leveldb"
backend: LEVELDB
batch_size: 16
crop_size: 227
View 128x128_train.prototxt
name: "CaffeNet"
layers {
name: "data"
type: DATA
top: "data"
top: "label"
data_param {
source: "@YOUR_PATH_TO_DATA@/chairs_128x128_reduced/data-lmdb"
batch_size: 64
scale: 0.00390625
View incremental_history_search.txt
An extremely handy tool :: Incremental history searching
In terminal enter:
gedit ~/.inputrc
Then copy paste and save:
"\e[A": history-search-backward
"\e[B": history-search-forward
View cuda library path
export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH
View Pull from Recent Matconvnet
git remote add -t dag matconvnet https://github.com/vlfeat/matconvnet
git pull matconvnet dag
View git_submodules.md

git push --recurse-submodules=on-demand

View matconvnet_compile.txt
addpath matlab
vl_compilenn('enableGPU', 1, 'cudaRoot', '/usr/local/cuda', 'cudaMethod', 'nvcc', 'enableCudnn', 1, 'cudnnRoot', 'local/');
View neural11lines.py
X = np.array([ [0,0,1],[0,1,1],[1,0,1],[1,1,1] ])
y = np.array([[0,1,1,0]]).T
syn0 = 2*np.random.random((3,4)) - 1
syn1 = 2*np.random.random((4,1)) - 1
for j in xrange(60000):
l1 = 1/(1+np.exp(-(np.dot(X,syn0))))
l2 = 1/(1+np.exp(-(np.dot(l1,syn1))))
l2_delta = (y - l2)*(l2*(1-l2))
l1_delta = l2_delta.dot(syn1.T) * (l1 * (1-l1))
syn1 += l1.T.dot(l2_delta)