Skip to content

Commit c114fb2

Browse files
ADD: Neural network lib
1 parent 67bf8df commit c114fb2

File tree

1 file changed

+267
-0
lines changed

1 file changed

+267
-0
lines changed

data_control/uneuralnetwork.pas

+267
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,267 @@
1+
(******************************************************************************)
2+
(* uneuralnetwork.pas ??.??.2022 *)
3+
(* *)
4+
(* Version : 0.01 *)
5+
(* *)
6+
(* Author : Uwe Schächterle (Corpsman) *)
7+
(* *)
8+
(* Support : www.Corpsman.de *)
9+
(* *)
10+
(* Description : Implementation of a multi layer neural network with bias *)
11+
(* neurons. *)
12+
(* *)
13+
(* License : See the file license.md, located under: *)
14+
(* https://github.com/PascalCorpsman/Software_Licenses/blob/main/license.md *)
15+
(* for details about the license. *)
16+
(* *)
17+
(* It is not allowed to change or remove this text from any *)
18+
(* source file of the project. *)
19+
(* *)
20+
(* Warranty : There is no warranty, neither in correctness of the *)
21+
(* implementation, nor anything other that could happen *)
22+
(* or go wrong, use at your own risk. *)
23+
(* *)
24+
(* Known Issues: none *)
25+
(* *)
26+
(* History : 0.01 - Initial version *)
27+
(* *)
28+
(******************************************************************************)
29+
Unit uneuralnetwork;
30+
31+
{$MODE objfpc}{$H+}
32+
33+
Interface
34+
35+
Uses
36+
Classes, SysUtils, uvectormath;
37+
38+
Type
39+
40+
{ TNeuralNetwork }
41+
42+
TNeuralNetwork = Class
43+
private
44+
// -- Alles was gespeichert wird
45+
flayers: Array Of TMatrixNxM;
46+
fBias: Array Of TMatrixNxM;
47+
finputDim, fOutputDim: integer;
48+
fRecentAverageError: Single;
49+
// -- Arbeitsvariablen
50+
fModified: Boolean;
51+
public
52+
LearnRate: Single;
53+
Property Modified: Boolean read fModified;
54+
Constructor Create(Layers: Array Of Integer); // z.B. [2,4,1] -> 2 Eingang, 4 hidden, 1 Output
55+
Function Predict(input: TVectorN): TVectorN;
56+
Procedure Train(input, targets: TVectorN);
57+
Function getRecentAverageError(): Single;
58+
Function SaveToFile(Const Filename: String): Boolean;
59+
Function LoadFromFile(Const Filename: String): Boolean;
60+
Function Info(): String;
61+
End;
62+
63+
Implementation
64+
65+
Function Sigmoid(x: Single): Single;
66+
Begin
67+
result := 1 / (1 + exp(-x));
68+
End;
69+
70+
Function DerivSigmoid(x: Single): Single;
71+
Var
72+
s: Single;
73+
Begin
74+
//s := Sigmoid(x); // Da der Übergabe parameter bereits via Sigmoid bearbeitet wurde, kann das hier weg gelassen werden
75+
s := x;
76+
result := s * (1 - s);
77+
End;
78+
79+
{ TNeuralNetwork }
80+
81+
Constructor TNeuralNetwork.Create(Layers: Array Of Integer);
82+
Var
83+
i: Integer;
84+
Begin
85+
fModified := false;
86+
LearnRate := 0.1; // Egal hauptsache Definiert, macht nachher eh die Kontrollierende Anwendung
87+
If length(Layers) < 2 Then Begin
88+
Raise Exception.Create('Error, you have to select at leas 2 layers.');
89+
End;
90+
fRecentAverageError := layers[high(Layers)]; // Wir gehen davon aus, dass jeder Ausgangsknoten Falsch ist !
91+
// Wir benötigen 1-Schicht weniger als Layers Angefragt sind
92+
setlength(fLayers, high(Layers));
93+
setlength(fBias, high(Layers));
94+
// Erstellen der ganzen Übergangsmatrizen
95+
For i := 0 To high(Layers) - 1 Do Begin
96+
flayers[i] := ZeroNxM(Layers[i], Layers[i + 1]);
97+
fBias[i] := ZeroNxM(1, Layers[i + 1]);
98+
RandomizeNxM(flayers[i]);
99+
RandomizeNxM(fBias[i]);
100+
End;
101+
// Für die Checks
102+
finputDim := Layers[0];
103+
fOutputDim := layers[high(layers)];
104+
End;
105+
106+
Function TNeuralNetwork.Predict(input: TVectorN): TVectorN;
107+
Var
108+
v: TMatrixNxM;
109+
i: Integer;
110+
Begin
111+
If length(input) <> finputDim Then Begin
112+
Raise exception.Create('Error, input has invalid size.');
113+
End;
114+
// Input Conversion
115+
v := VNToNxM(input);
116+
// FeedForward
117+
For i := 0 To high(flayers) Do Begin
118+
v := flayers[i] * v;
119+
v := v + fBias[i];
120+
MapMatrix(v, @Sigmoid);
121+
End;
122+
// Output Conversion
123+
result := NxMToVN(v);
124+
End;
125+
126+
Procedure TNeuralNetwork.Train(input, targets: TVectorN);
127+
Var
128+
v: Array Of TMatrixNxM;
129+
i: Integer;
130+
delta, g, e: TMatrixNxM;
131+
Begin
132+
If length(input) <> finputDim Then Begin
133+
Raise exception.Create('Error, input has invalid size.');
134+
End;
135+
If length(targets) <> fOutputDim Then Begin
136+
Raise exception.Create('Error, target has invalid size.');
137+
End;
138+
fModified := true;
139+
// 1. Feed Forward
140+
setlength(v, length(flayers) + 1);
141+
// Input Conversion
142+
v[0] := VNToNxM(input);
143+
// FeedForward
144+
For i := 0 To high(flayers) Do Begin
145+
v[i + 1] := flayers[i] * v[i];
146+
v[i + 1] := v[i + 1] + fBias[i];
147+
MapMatrix(v[i + 1], @Sigmoid);
148+
End;
149+
// Output stands in v[length(flayers)]
150+
// 2. Back Propagation
151+
// Calculate Error of Output
152+
e := VNToNxM(targets) - v[length(flayers)];
153+
fRecentAverageError := (fRecentAverageError + LenVN(targets - NxMToVN(v[length(flayers)]))) / 2; // Schleifender Mittelwert
154+
// Propagate through the layers
155+
For i := high(flayers) Downto 0 Do Begin
156+
// Calculate the Gradient
157+
g := MapMatrix2(v[i + 1], @DerivSigmoid);
158+
g := HadamardNxM(g, e);
159+
g := LearnRate * g;
160+
161+
delta := g * TransposeMatrix(v[i]);
162+
163+
// Adjust Weights and bias
164+
flayers[i] := flayers[i] + delta;
165+
fBias[i] := fBias[i] + g;
166+
167+
// Calculate Error for next layer
168+
If i <> 0 Then Begin
169+
// i = 0 would calculate the Error from the input, this is not needed
170+
// anymore => not calculate it to preserve compution time
171+
e := TransposeMatrix(flayers[i]) * e;
172+
End;
173+
End;
174+
End;
175+
176+
Function TNeuralNetwork.getRecentAverageError(): Single;
177+
Begin
178+
result := fRecentAverageError;
179+
End;
180+
181+
Function TNeuralNetwork.SaveToFile(Const Filename: String): Boolean;
182+
Var
183+
fs: TFileStream;
184+
ui: uint32;
185+
Begin
186+
result := false;
187+
fs := TFileStream.Create(Filename, fmCreate Or fmOpenWrite);
188+
// Speichern der Dimension
189+
fs.Write(finputDim, sizeof(finputDim));
190+
fs.Write(fOutputDim, sizeof(fOutputDim));
191+
fs.Write(fRecentAverageError, sizeof(fRecentAverageError));
192+
// Speichern der Matrizen
193+
ui := length(flayers);
194+
fs.Write(ui, sizeof(ui));
195+
For ui := 0 To high(flayers) Do Begin
196+
SaveMNxMToStream(fs, flayers[ui]);
197+
End;
198+
ui := length(fBias);
199+
fs.Write(ui, sizeof(ui));
200+
For ui := 0 To high(fBias) Do Begin
201+
SaveMNxMToStream(fs, fBias[ui]);
202+
End;
203+
fs.free;
204+
End;
205+
206+
Function TNeuralNetwork.LoadFromFile(Const Filename: String): Boolean;
207+
Var
208+
fs: TFileStream;
209+
ui: uint32;
210+
fiDim, fODim: integer;
211+
Begin
212+
result := false;
213+
fs := TFileStream.Create(Filename, fmOpenRead);
214+
// Speichern der Dimension
215+
fiDim := 0;
216+
fODim := 0;
217+
fs.read(fiDim, sizeof(finputDim));
218+
fs.read(fODim, sizeof(fOutputDim));
219+
If (finputDim <> fiDim) Or (fOutputDim <> fODim) Then Begin
220+
fs.free;
221+
Raise exception.create('Error, the defined net has a different interface, than the net to load.');
222+
exit;
223+
End;
224+
fs.Read(fRecentAverageError, sizeof(fRecentAverageError));
225+
// Speichern der Matrizen
226+
ui := 0;
227+
fs.Read(ui, sizeof(ui));
228+
setlength(flayers, ui);
229+
For ui := 0 To high(flayers) Do Begin
230+
flayers[ui] := LoadMNxMFromStream(fs);
231+
End;
232+
233+
ui := 0;
234+
fs.Read(ui, sizeof(ui));
235+
setlength(fBias, ui);
236+
If length(fBias) <> length(flayers) Then Begin
237+
Raise exception.create('Error, bias and layer dim is different, file invalid.');
238+
result := false;
239+
fs.free;
240+
End;
241+
For ui := 0 To high(fBias) Do Begin
242+
fBias[ui] := LoadMNxMFromStream(fs);
243+
End;
244+
fs.free;
245+
result := true;
246+
End;
247+
248+
Function TNeuralNetwork.Info(): String;
249+
Var
250+
i: Integer;
251+
ui64: uint64;
252+
Begin
253+
// Layer Informationen
254+
result := 'Layers: [';
255+
ui64 := 0;
256+
For i := 0 To high(flayers) Do Begin
257+
result := result + inttostr(length(flayers[i])) + ', ';
258+
ui64 := ui64 + length(flayers[i]) * length(flayers[i, 0]);
259+
ui64 := ui64 + length(fBias[i]) * length(fBias[i, 0]);
260+
End;
261+
result := result + inttostr(length(flayers[high(flayers), 0])) + ']' + LineEnding;
262+
result := result + 'Trainable params: ' + IntToStr(ui64);
263+
264+
End;
265+
266+
End.
267+

0 commit comments

Comments
 (0)