-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbackprop_out.m
More file actions
47 lines (35 loc) · 1.05 KB
/
backprop_out.m
File metadata and controls
47 lines (35 loc) · 1.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
function OUTPUTS = backprop_out(IN, Wt1, Wt2,hidnoise, beta)
% usage OUT = backprop_out(IN, Wt1, Wt2)
%
% nnet with one hidden layer
%
% generates expected output sequence for given input
% do we have forgettingrate?
if nargin < 4, hidnoise = 0.0; end
if nargin < 5, beta = 1.0; end
% get the dimensions of our data sets
[datarows, inelem]=size(IN);
[wt1r,wt1c] = size(Wt1);
nhidnodes = wt1r;
nblanks = nhidnodes;
hidzeros = zeros(nblanks,1);
OUTPUTS = [];
for p = 1:datarows
% get appropriate input & target rows
% though we will represent them as col vectors
A = [IN(p,1:inelem)'; hidzeros];
% A = IN(p,1:inelem)';
% feedforward
% layer 1
B1 = Wt1*[A;1]; % input & bias
O1 = activation(B1,beta,0);
if hidnoise > 0
% is there any noise in transmission?
% add it to the outputs of the hidden layer
O1 = O1 + sqrt(hidnoise)*randn(nhidnodes,1);
end
% layer 2
B2 = Wt2*[O1;1]; %output and a bias node
O2 = activation(B2,beta,0);
OUTPUTS = [OUTPUTS; O2'];
end