Skip to content

Instantly share code, notes, and snippets.

@Wolfy42
Forked from fdomig/ci_4_3.m
Created June 9, 2011 11:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save Wolfy42/1016551 to your computer and use it in GitHub Desktop.
Save Wolfy42/1016551 to your computer and use it in GitHub Desktop.
CI 4_3
%%%
% Feed forward with back propagation algorithm
%
% @param X ... all possible inputs
% @param Y ... all expected outputs
% @param T ... the network topology
% @param l ... the iteration limit for the error min. func.
% @returns W ... the optimal weight matrix
% @returns Ev ... the error dynamic vector
%%
function [W, Ev] = ci_4_3(X, Y, T, l)
y = [0 0 0 0 0 0 0];
W = randn(7,7);
nn = 0.9;
Ev = [];
out = [];
for iterations = 1:l
E = 0;
for i=1:length(Y)
n = length(T);
y_i = ForwardProp(W, T, X(i,:));
out(i) = y_i(n);
e = ErrorFunction(y_i(n), Y(i));
E = E + e;
w = [];
k(7) = deltaOutput(y_i(7), Y(i));
w(:,7) = k(7)*y_i;
for i=length(T)-size(Y, 2):-1:1
k(i) = deltaHidden(T, W, k, y_i(i), i);
w(:,i) = k(i)*y_i;
end;
W = W - nn*w;
end;
Ev = [Ev, E];
if (mod(iterations, 100) == 0)
doPlot(Ev, Y, out, iterations);
end;
end;
function y = ForwardProp(W, T, X)
y = [X, zeros(1,length(T)-length(X))];
for i=length(X)+1:length(T)
y(i) = ActivationFunc(PropFunc(W(:,i), y, T(:,i)), 1);
end;
end;
function p = PropFunc(w, y, t)
p = w' * (t'.*y)';
end;
function x = ActivationFunc(s, lambda)
x = 1/(1+exp(-lambda*s));
end;
function e = ErrorFunction(y_i, y)
e = (y_i - y)^2;
end;
function f = F_(y)
f = y*(1-y);
end;
function k = deltaOutput(y, d)
k = 2*(y-d)*F_(y);
end;
function k = deltaHidden(T, W, k, y, n)
sum = 0;
for j = 1:length(T)
if (T(n,j) == 1)
sum = sum + W(n, j)*k(j);
end;
end;
k = F_(y)*sum;
end;
function doPlot(Ev, Y, out, iterations)
semilogy(Ev);
drawnow();
printf("\nIteration %d\n", iterations);
printf("\t%d -> %f\n", Y(1), out(1));
printf("\t%d -> %f\n", Y(2), out(2));
printf("\t%d -> %f\n", Y(3), out(3));
printf("\t%d -> %f\n", Y(4), out(4));
fflush(stdout);
end
end;
l = 1000; %iteration limits
%---------Solving XOR------------
X = [ 0 0 ;
0 1 ;
1 0 ;
1 1];
Y = [0; 1; 1; 0];
%---------1 Hidden Layer, 4 Hidden Neurons
% (1)--|-----(3)----|
% |-----(4)----|
% | |---(7)
% |-----(5)----|
% (2)--|-----(6)----|
T =[0 0 1 1 1 1 0;
0 0 1 1 1 1 0;
0 0 0 0 0 0 1;
0 0 0 0 0 0 1;
0 0 0 0 0 0 1;
0 0 0 0 0 0 1;
0 0 0 0 0 0 0 ];
ci_4_3(X,Y,T,l);
%---------1 Hidden Layer, 3 Hidden Neurons
% (1)--|-----(3)----|
% |-----(4)----|---(6)
% (2)--|-----(5)----|
T =[0 0 1 1 1 0;
0 0 1 1 1 0;
0 0 0 0 0 1;
0 0 0 0 0 1;
0 0 0 0 0 1;
0 0 0 0 0 0 ];
ci_4_3(X,Y,T,l);
%---------1 Hidden Layer, 2 Hidden Neurons
% (1)--|-----(3)----|
% | |---(5)
% (2)--|-----(4)----|
T =[0 0 1 1 0;
0 0 1 1 0;
0 0 0 0 1;
0 0 0 0 1;
0 0 0 0 0 ];
ci_4_3(X,Y,T,l);
%---------Minimal Version NN51
% (1)--|-----------|
% | |
% (2)--|-----(4)---|---(5)
% | |
% (3)--|-----------|
X = [ 0 0 1;
0 1 1;
1 0 1;
1 1 1];
T =[0 0 0 1 1;
0 0 0 1 1;
0 0 0 1 1;
0 0 0 0 1;
0 0 0 0 0 ];
ci_4_3(X,Y,T,l);
%---------Solving Classification------------
%---------2 Hidden Layer, 5 Hidden Neurons per Layer
load K40M2-NN.dat
X = K40M2_NN;
Y = [ones(1,20), zeros(1,20)];
% (1)--|----(3)----|----( 8)----|
% |----(4)----|----( 9)----|
% |----(5)----|----(10)----|--(13)
% |----(6)----|----(11)----|
% (2)--|----(7)----|----(12)----|
T = [0 0 1 1 1 1 1 0 0 0 0 0 0;
0 0 1 1 1 1 1 0 0 0 0 0 0;
0 0 0 0 0 0 0 1 1 1 1 1 0;
0 0 0 0 0 0 0 1 1 1 1 1 0;
0 0 0 0 0 0 0 1 1 1 1 1 0;
0 0 0 0 0 0 0 1 1 1 1 1 0;
0 0 0 0 0 0 0 1 1 1 1 1 0;
0 0 0 0 0 0 0 0 0 0 0 0 1;
0 0 0 0 0 0 0 0 0 0 0 0 1;
0 0 0 0 0 0 0 0 0 0 0 0 1;
0 0 0 0 0 0 0 0 0 0 0 0 1;
0 0 0 0 0 0 0 0 0 0 0 0 1;
0 0 0 0 0 0 0 0 0 0 0 0 0];
ci_4_3(X,Y,T,l);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment