Skip to content

Instantly share code, notes, and snippets.

"""
debug tools for theano
Ishaan Gulrajani
"""
import numpy as np
import theano
from theano import gof
class DebugOp(gof.Op):
@igul222
igul222 / gist:765668b05b6cf20e0ebb522959d52a99
Created May 18, 2016 14:24
1D masked convolutions, a la Pixel RNN
def Conv1D(name, input_dim, output_dim, filter_size, inputs, mask_type=None, he_init=False):
"""
inputs.shape: (batch size, input_dim, 1, width)
mask_type: None, 'a', 'b'
output.shape: (batch size, output_dim, 1, width)
"""
if mask_type is not None:
mask = numpy.ones(
(output_dim, input_dim, 1, filter_size),
_baseslice = slice
class Slice(object):
def __init__(self, _tensor):
self.tensor = _tensor
def __getitem__(self, slice_spec):
if not isinstance(slice_spec, (list, tuple)):
slice_spec = [slice_spec]
indices = []
@igul222
igul222 / gist:9da6e9094b94e7db55c2
Created January 13, 2016 22:39
Language model training with batchnorm
train summary epoch:0 n_inputs:21888 cost:1.72562180904
dev summary epoch:0 n_inputs:1152 cost:1.67490259806
train summary epoch:1 n_inputs:21888 cost:1.49495576557
dev summary epoch:1 n_inputs:1152 cost:1.57830005222
train summary epoch:2 n_inputs:21888 cost:1.4162766306
dev summary epoch:2 n_inputs:1152 cost:1.52844831679
train summary epoch:3 n_inputs:21888 cost:1.36915802537
dev summary epoch:3 n_inputs:1152 cost:1.50779215495
train summary epoch:4 n_inputs:21888 cost:1.33636599535
dev summary epoch:4 n_inputs:1152 cost:1.4916501575
@igul222
igul222 / gist:802604105d1cebe9705c
Created January 13, 2016 22:38
Big language model training without batchnorm
Epoch 0 7132.9565351009s train cost 1.6981946635 correct 1.6981946635 dev cost 1.6462526276 correct 1.6462526276
Epoch 1 7147.1999368668s train cost 1.4829269145 correct 1.4829269145 dev cost 1.5761406715 correct 1.5761406715
Epoch 2 7138.0306470394s train cost 1.4255136971 correct 1.4255136971 dev cost 1.5523425980 correct 1.5523425980
Epoch 3 7127.6019258499s train cost 1.3985411839 correct 1.3985411839 dev cost 1.5382623590 correct 1.5382623590
Epoch 4 7125.7831079960s train cost 1.3843282114 correct 1.3843282114 dev cost 1.5298263626 correct 1.5298263626
Epoch 5 7124.6437451839s train cost 1.3768820110 correct 1.3768820110 dev cost 1.5249164810 correct 1.5249164810
Epoch 6 7126.9552760124s train cost 1.3730651317 correct 1.3730651317 dev cost 1.5251269255 correct 1.5251269255
Epoch 7 7125.7269859314s train cost 1.3717070357 correct 1.3717070357 dev cost 1.5228043980 correct 1.5228043980
Epoch 8 7125.5283191204s train cost 1.3718353560 correct 1.3718353560 dev cost 1.5230966063 correct 1.5230966063
Epoch 9
@igul222
igul222 / ctc.py
Last active March 22, 2016 09:36
Theano CTC implementation with performance and numerical stability optimizations
# Author: Ishaan Gulrajani
# License: BSD 3-clause
import numpy
import theano
import theano.tensor as T
# log(0) = -infinity, but this leads to
# NaN errors in log_add and elsewhere,
# so we'll just use a large negative value.
# Balanced binary tree of height 2. Boring, I know...
node_count = 7
edges = [
(0,1),
(0,2),
(1,3),
(1,4),
(2,5),
(2,6)
]
class Node(object):
def __init__(self, _children):
self.memo = None
self.children = _children
dest = Node([])
source = Node([
dest, # source -> dest
Node([dest]), # source -> intermediate -> dest
Node([Node([dest])]), # source -> a -> b -> dest
Moves =
start: () ->
{type: 'start'}
class Players
constructor: (@game) ->
@teams: [
active: true
players: []
class Game
@XMax: 25
@YMax: 15
@DotsPerPlayer: 2
@FunctionAnimationSpeed: 0.005 # graph units per ms
@DotRadius: 1
@TurnTime: 60000 # ms
@ObstacleCount: 10
@ObstacleRadiusMin: 0
@ObstacleRadiusMax: 5