Skip to content

Instantly share code, notes, and snippets.

View ebenolson's full-sized avatar

Eben Olson ebenolson

  • Presco Engineering
  • Woodbridge, CT
View GitHub Profile
@ebenolson
ebenolson / prebuilt_startup.sh
Last active November 10, 2015 00:26
gpu demo box setup.sh
#!/bin/bash
cd ~/pydata2015
git pull
source ~/miniconda2/bin/activate py27
# register with landing page
curl --form ip="`curl http://169.254.169.254/latest/meta-data/public-ipv4`" http://pydata.ebenolson.com/addip
# start notebook server
ipython notebook --ip="*"
@ebenolson
ebenolson / preseed.cfg
Created October 14, 2015 18:00
packer ubuntu 14.04 esxi
choose-mirror-bin mirror/http/proxy string
d-i base-installer/kernel/override-image string linux-server
d-i clock-setup/utc boolean true
d-i clock-setup/utc-auto boolean true
d-i finish-install/reboot_in_progress note
d-i grub-installer/only_debian boolean true
d-i grub-installer/with_other_os boolean true
d-i partman-auto-lvm/guided_size string max
d-i partman-auto/choose_recipe select atomic
d-i partman-auto/method string lvm
Subtensor{::, ::, int64:int64:} [@A] ''
|InplaceDimShuffle{0,1,3} [@B] ''
| |ConvOp{('imshp', (1, 1, 8)),('kshp', (1, 3)),('nkern', 1),('bsize', 1),('dx', 1),('dy', 1),('out_mode', 'full'),('unroll_batch', None),('unroll_kern', None),('unroll_patch', True),('imshp_logical', (1, 1, 8)),('kshp_logical', (1, 3)),('kshp_logical_top_aligned', True)} [@C] ''
| |InplaceDimShuffle{0,1,x,2} [@D] ''
| | |Subtensor{::, ::, int64:int64:} [@E] ''
| | |InplaceDimShuffle{0,1,3} [@F] ''
| | | |ConvOp{('imshp', (1, 1, 8)),('kshp', (1, 3)),('nkern', 1),('bsize', 1),('dx', 1),('dy', 1),('out_mode', 'full'),('unroll_batch', None),('unroll_kern', None),('unroll_patch', True),('imshp_logical', (1, 1, 8)),('kshp_logical', (1, 3)),('kshp_logical_top_aligned', True)} [@G] ''
| | | |InplaceDimShuffle{0,1,x,2} [@H] ''
| | | | |Subtensor{::, ::, int64:int64:} [@I] ''
import numpy as np
import theano
import theano.tensor as T
X = T.tensor3()
W = theano.shared(np.ones((1,1,1,3)).astype('float32'))
W_shape = (1, 1, 1, 3)
input = X
@ebenolson
ebenolson / bvlc_googlenet.py
Created July 16, 2015 15:31
BVLC GoogleNet translated to Lasagne
from lasagne.layers import InputLayer
from lasagne.layers import DenseLayer
from lasagne.layers import ConcatLayer
from lasagne.layers import NonlinearityLayer
from lasagne.layers import GlobalPoolLayer
from lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer
from lasagne.layers.dnn import MaxPool2DDNNLayer as PoolLayerDNN
from lasagne.layers import MaxPool2DLayer as PoolLayer
from lasagne.layers import LocalResponseNormalization2DLayer as LRNLayer
from lasagne.nonlinearities import softmax, linear
import numpy as np
import gc
import memory_profiler
import theano
import theano.tensor as T
from theano.sandbox.cuda.dnn import dnn_conv
X = T.tensor4()
W = T.tensor4()
@ebenolson
ebenolson / model.prototxt
Last active August 29, 2015 14:24 — forked from mavenlin/readme.md
Network in Network CIFAR10
name: "CIFAR10_full"
input: "data"
input_shape {
dim: 1
dim: 3
dim: 32
dim: 32
}
layers {
name: "conv1"
require 'optim'
cj = require('cjson')
a = torch.Tensor({0.1,0.2,0.3})
x0 = torch.Tensor({1, 1, 1})
function f(x)
return torch.sum(torch.cmul(a, torch.pow(x, 2)))
end
# Usage: experiment.sh SOURCE_FILE LOG_COMMIT_MESSAGE
if [ $# -eq 0 ]
then
echo "Usage: experiment.sh SOURCE_FILE [LOG_COMMIT_MESSAGE]"
exit -1
fi
if [ $# -eq 2 ]
then
from lasagne.layers import Layer
class HighwayLayer(Layer):
def __init__(self, incoming, layer_class, gate_nonlinearity=None,
**kwargs):
super(HighwayLayer, self).__init__(incoming)
self.H_layer = layer_class(incoming, **kwargs)
if gate_nonlinearity: