Skip to content

Instantly share code, notes, and snippets.

@albanie
Last active December 19, 2016 15:36
Show Gist options
  • Save albanie/1030033a96f1b9b10c58d5d4da7ba3c0 to your computer and use it in GitHub Desktop.
Save albanie/1030033a96f1b9b10c58d5d4da7ba3c0 to your computer and use it in GitHub Desktop.
function issue_814
% demo for @segman
% fix random seed to get repeatable result
rng(1) ;
sample = rand(21, 21, 3, 'single') ;
label = [ 1 1 1 1 1 ;
1 1 1 1 1 ;
1 2 2 2 1 ;
2 2 2 2 1 ;
3 3 3 3 3 ] ;
net = init() ;
net.initParams() ;
net.eval({'data', sample, 'label', label}) ;
loss = net.vars(net.getVarIndex('loss')).value ;
fprintf('loss value is %.2f \n', loss) ;
I get a loss value: 49.09
% ------------------------
function net = init()
% ------------------------
net = dagnn.DagNN() ;
numClasses = 3 ;
net = addConvLayer(net, 'x1', 'data', [5 5 3 32]) ;
net = addRelu(net, 'x1x', 'x1') ;
net = addConvLayer(net, 'x2', 'x1x', [5 5 32 64]) ;
net = addRelu(net, 'x2x', 'x2') ;
net = addConvLayer(net, 'x3', 'x2x', [5 5 64 96]) ;
net = addRelu(net, 'x3x', 'x3') ;
net = addConvLayer(net, 'x4', 'x3x', [5 5 96 128]) ;
net = addRelu(net, 'x4x', 'x4') ;
net = addConvLayer(net, 'fc5', 'x4x', [1 1 128 128]) ;
net = addDropout(net, 'fc5d', 'fc5') ;
net = addConvLayer(net, 'fc6', 'fc5d', [1 1 128 numClasses]) ;
net = addDropout(net, 'fc6d', 'fc6') ;
net = addLoss(net, 'loss', {'fc6d', 'label'}, 'softmaxlog') ;
% ----------------------------------------------------------
function net = addConvLayer(net, layerName, prev, kernelSize)
% ----------------------------------------------------------
if strcmp(prev, 'data')
inputs = 'data' ;
else
inputs = net.layers(net.getLayerIndex(prev)).outputs ;
end
layer = dagnn.Conv('size', kernelSize, 'pad', 0, 'stride', 1, 'hasBias', true) ;
params = {sprintf('%s_1f', layerName), sprintf('%s_1b', layerName)} ;
outputs = layerName ;
net.addLayer(layerName, layer, inputs, outputs, params) ;
% ------------------------------------
function net = addRelu(net, layerName, prev)
% ------------------------------------
layer = dagnn.ReLU() ;
inputs = net.layers(net.getLayerIndex(prev)).outputs ;
outputs = layerName ;
params = {} ;
net.addLayer(layerName, layer, inputs, outputs, params) ;
% ------------------------------------
function net = addDropout(net, layerName, prev)
% ------------------------------------
layer = dagnn.DropOut('rate', 0.5) ;
inputs = net.layers(net.getLayerIndex(prev)).outputs ;
outputs = layerName ;
params = {} ;
net.addLayer(layerName, layer, inputs, outputs, params) ;
% ------------------------------------
function net = addLoss(net, layerName, inputs, lossType)
% ------------------------------------
layer = dagnn.Loss('loss', lossType) ;
outputs = layerName ;
params = {} ;
net.addLayer(layerName, layer, inputs, outputs, params) ;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment