Skip to content

Instantly share code, notes, and snippets.

View skaae's full-sized avatar

Søren Kaae Sønderby skaae

View GitHub Profile
class GruDenseLayer(lasagne.layers.Layer):
def __init__(self, incoming, num_units,
b_resetgate=None,
b_updategate=None,
b_hidden_update=None,
W_resetgate=init.GlorotUniform(),
W_updategate=init.GlorotUniform(),
W_hidden_update=init.GlorotUniform(),
**kwargs):
super(GruDenseLayer, self).__init__(incoming, **kwargs)
@skaae
skaae / ex5.py
Created September 23, 2015 07:59
with open("/home/lpp/.bashrc", 'r') as f:
lines = f.readlines()
with open('junk.txt', 'w') as f:
f.write("".join(lines[1:5]
####### OR######
import numpy as np
import theano
import theano.tensor as T
import lasagne.nonlinearities as nonlinearities
import lasagne.init as init
from lasagne.utils import unroll_scan
from lasagne.layers import *
import lasagne.layers.helper as helper
@skaae
skaae / MyConfusionMatrix.Lua
Created August 8, 2015 16:19
confusion matrix for torch
--[[ A Confusion Matrix class
Example:
conf = optim.ConfusionMatrix( {'cat','dog','person'} ) -- new matrix
conf:zero() -- reset matrix
for i = 1,N do
conf:add( neuralnet:forward(sample), label ) -- accumulate errors
end
print(conf) -- print matrix
Function profiling
==================
Message: /home/soren/Documents/experiments/TRANSFORMER_NET/grutranstest.py:271
Time in 5 calls to Function.__call__: 1.034791e+01s
Time in Function.fn.__call__: 1.034693e+01s (99.991%)
Time in thunks: 1.022032e+01s (98.767%)
Total compile time: 4.257923e+01s
Number of Apply nodes: 1224
Theano Optimizer time: 1.321189e+01s
Theano validate time: 8.471053e-01s
class ElemwiseMergeLayer(MergeLayer):
"""
This layer performs an elementwise merge of its input layers.
It requires all input layers to have the same output shape.
Parameters
-----------
incomings : a list of :class:`Layer` instances or tuples
the layers feeding into this layer, or expected input shapes,
with all incoming shapes being equal
import scipy.io
import lasagne
import theano
import theano.tensor as T
import numpy as np
import time
import logging
logger = logging.getLogger('')
def get_output_for(self, input, mask=None, **kwargs):
"""
Compute this layer's output function given a symbolic input variable
Parameters
----------
input : theano.TensorType
Symbolic input variable.
mask : theano.TensorType
Theano variable denoting whether each time step in each

lasagne.updates

lasagne.updates

The update functions implement different methods to control the learning rate for use with stochastic gradient descent.

Update functions take a loss expression or a list of gradient expressions and a list of parameters as input and return an ordered dictionary of updates:

import numpy as np
class ConfusionMatrix:
"""
Simple confusion matrix class
row is the true class, column is the predicted class
"""
def __init__(self, n_classes, class_names=None):
self.n_classes = n_classes
if class_names is None:
self.class_names = map(str, range(n_classes))