Skip to content

Commit 3a6c4aa

Browse files
committed
Added some doc
1 parent dba993f commit 3a6c4aa

File tree

3 files changed

+123
-32
lines changed

3 files changed

+123
-32
lines changed

Doxyfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -949,7 +949,7 @@ WARN_LOGFILE =
949949
# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
950950
# Note: If this tag is empty the current directory is searched.
951951

952-
INPUT = ./src ./ml_library_include README.md
952+
INPUT = ./src ./ml_library_include README.md ./tests ./examples
953953

954954
# This tag can be used to specify the character encoding of the source files
955955
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
@@ -1104,7 +1104,7 @@ EXAMPLE_RECURSIVE = NO
11041104
# that contain images that are to be included in the documentation (see the
11051105
# \image command).
11061106

1107-
IMAGE_PATH =
1107+
IMAGE_PATH = ./images
11081108

11091109
# The INPUT_FILTER tag can be used to specify a program that doxygen should
11101110
# invoke to filter for each input file. Doxygen will invoke the filter program

images/ML_logo.jpg

2.33 MB
Loading

src/neural_network/NN.cpp

Lines changed: 121 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,33 @@
55

66
// ********** TrainingData Class Implementation ********** //
77

8-
TrainingData::TrainingData(const string filename) {
8+
/**
9+
* @brief Constructor that opens the file containing training data.
10+
* Ensures the file is ready for reading data sequentially.
11+
*/
12+
TrainingData::TrainingData(const std::string filename) {
913
_trainingDataFile.open(filename.c_str());
1014
}
1115

12-
void TrainingData::getTopology(vector<unsigned>& topology) {
13-
string line, label;
16+
/**
17+
* @brief Parses the topology of the neural network.
18+
*
19+
* This method reads a line from the training data file that starts with the keyword "topology:"
20+
* followed by integers representing the number of neurons in each layer. For instance, a topology
21+
* line of "topology: 3 2 1" describes a network with 3 neurons in the first layer, 2 in the second,
22+
* and 1 in the final layer.
23+
*/
24+
void TrainingData::getTopology(std::vector<unsigned>& topology) {
25+
std::string line, label;
1426
if (!getline(_trainingDataFile, line)) {
15-
cerr << "Error reading from the file." << endl;
27+
std::cerr << "Error reading from the file." << std::endl;
1628
abort();
1729
}
1830

19-
stringstream ss(line);
31+
std::stringstream ss(line);
2032
ss >> label;
2133
if (label != "topology:") {
22-
cerr << "Invalid format. Expected 'topology:'." << endl;
34+
std::cerr << "Invalid format. Expected 'topology:'." << std::endl;
2335
abort();
2436
}
2537

@@ -28,18 +40,25 @@ void TrainingData::getTopology(vector<unsigned>& topology) {
2840
topology.push_back(numLayers);
2941
}
3042

43+
// Ensure that a valid topology was provided, otherwise halt execution.
3144
if (topology.empty()) {
32-
cerr << "No topology data found." << endl;
45+
std::cerr << "No topology data found." << std::endl;
3346
abort();
3447
}
3548
}
3649

37-
unsigned TrainingData::getNextInputs(vector<double>& inputVals) {
50+
/**
51+
* @brief Retrieves the next set of input values.
52+
*
53+
* Each line with the "in:" prefix contains input values for a training pass.
54+
* This function parses those values and stores them in inputVals.
55+
*/
56+
unsigned TrainingData::getNextInputs(std::vector<double>& inputVals) {
3857
inputVals.clear();
39-
string line;
58+
std::string line;
4059
getline(_trainingDataFile, line);
41-
stringstream ss(line);
42-
string label;
60+
std::stringstream ss(line);
61+
std::string label;
4362
ss >> label;
4463
if (label.compare("in:") == 0) {
4564
double oneVal;
@@ -50,12 +69,18 @@ unsigned TrainingData::getNextInputs(vector<double>& inputVals) {
5069
return inputVals.size();
5170
}
5271

53-
unsigned TrainingData::getTargetOutputs(vector<double>& targetOutputsVals) {
72+
/**
73+
* @brief Retrieves the target output values.
74+
*
75+
* Each line with the "out:" prefix contains the expected output values for a training pass.
76+
* This function parses those values and stores them in targetOutputsVals.
77+
*/
78+
unsigned TrainingData::getTargetOutputs(std::vector<double>& targetOutputsVals) {
5479
targetOutputsVals.clear();
55-
string line;
80+
std::string line;
5681
getline(_trainingDataFile, line);
57-
stringstream ss(line);
58-
string label;
82+
std::stringstream ss(line);
83+
std::string label;
5984
ss >> label;
6085
if (label.compare("out:") == 0) {
6186
double oneVal;
@@ -68,73 +93,139 @@ unsigned TrainingData::getTargetOutputs(vector<double>& targetOutputsVals) {
6893

6994
// ********** Neuron Class Implementation ********** //
7095

96+
/**
97+
* @brief Static variables for learning rate (eta) and momentum (alpha).
98+
*
99+
* - eta controls the rate at which the network adjusts during backpropagation.
100+
* - alpha applies momentum to reduce oscillations during training.
101+
*/
71102
double Neuron::eta = 0.15;
72103
double Neuron::alpha = 0.5;
73104

105+
/**
106+
* @brief Neuron constructor that initializes output weights with random values.
107+
*
108+
* Each neuron is connected to neurons in the next layer, and these connections
109+
* are initialized with random weights in the range [0, 1). Random initialization
110+
* is essential for neural networks to avoid symmetry during training.
111+
*/
74112
Neuron::Neuron(unsigned numOutputs, unsigned myIdx) : _myIdx(myIdx), _outputVal(0.0) {
75113
for (unsigned c = 0; c < numOutputs; ++c) {
76114
_outputWeights.push_back(Connection());
77115
_outputWeights.back().weight = randomWeight();
78116
}
79117
}
80118

119+
/**
120+
* @brief Generates a random weight for initializing connections.
121+
*
122+
* Random weights are critical for breaking symmetry in the network.
123+
* Without randomness, neurons would learn identical features.
124+
*/
81125
double Neuron::randomWeight() {
82126
return rand() / double(RAND_MAX);
83127
}
84128

129+
/**
130+
* @brief Hyperbolic tangent (tanh) transfer function.
131+
*
132+
* The tanh function is commonly used as an activation function. It outputs
133+
* values between -1 and 1, providing a non-linear transformation that enables
134+
* the network to approximate complex functions.
135+
*/
85136
double Neuron::transferFunction(double x) {
86137
return tanh(x);
87138
}
88139

140+
/**
141+
* @brief Derivative of the tanh function for backpropagation.
142+
*
143+
* The derivative is used to calculate gradients during the backpropagation process.
144+
* With tanh(x), the derivative is 1 - x^2, which facilitates efficient gradient calculation.
145+
*/
89146
double Neuron::transferFunctionDerivative(double x) {
90147
return 1.0 - x * x;
91148
}
92149

93-
void Neuron::feedForward(const vector<Neuron>& prevLayer) {
150+
/**
151+
* @brief Computes the neuron’s output by summing inputs from the previous layer.
152+
*
153+
* For each neuron in the previous layer, the output is weighted and summed. This
154+
* summation is then passed through the transfer function to generate the final output.
155+
*/
156+
void Neuron::feedForward(const std::vector<Neuron>& prevLayer) {
94157
double sum = 0.0;
95158
for (unsigned n = 0; n < prevLayer.size(); ++n) {
96159
sum += prevLayer[n].getOutputVal() * prevLayer[n]._outputWeights[_myIdx].weight;
97160
}
98161
_outputVal = Neuron::transferFunction(sum);
99162
}
100163

101-
// (Other Neuron methods like updateInputWeights, sumDOW, calcHiddenGradients, and calcOutputGradients go here...)
102-
103164
// ********** NN Class Implementation ********** //
104165

166+
/**
167+
* @brief Smoothing factor for recent average error calculations.
168+
*
169+
* The `_recentAverageSmoothFactor` is used to smooth out the fluctuations in the
170+
* recent average error, providing a more stable view of error trends during training.
171+
*/
105172
double NN::_recentAverageSmoothFactor = 100.0;
106173

107-
NN::NN(const vector<unsigned>& topology) {
174+
/**
175+
* @brief Neural network constructor that builds layers and initializes neurons.
176+
*
177+
* Each layer is constructed according to the topology provided. The constructor also
178+
* creates bias neurons, which always output 1.0. Bias neurons help the network learn
179+
* patterns that require a constant offset.
180+
*/
181+
NN::NN(const std::vector<unsigned>& topology) {
108182
unsigned numLayers = topology.size();
109183
for (unsigned layerNum = 0; layerNum < numLayers; ++layerNum) {
110-
_layers.push_back(vector<Neuron>());
184+
_layers.push_back(std::vector<Neuron>());
111185
unsigned numOutputs = (layerNum == topology.size() - 1) ? 0 : topology[layerNum + 1];
112186
for (unsigned neuronNum = 0; neuronNum <= topology[layerNum]; ++neuronNum) {
113187
_layers.back().push_back(Neuron(numOutputs, neuronNum));
114-
_layers.back().back().setOutputVal(1.0);
188+
_layers.back().back().setOutputVal(1.0); // Bias neuron output
115189
}
116190
}
117191
}
118192

119-
// (Other NN methods like feedForward, backProp, and getResults go here...)
120-
121193
// Utility functions
122-
void showVectorVals(string label, vector<double>& v) {
123-
cout << label << " ";
194+
195+
/**
196+
* @brief Utility function to print vector values with a label.
197+
*
198+
* Primarily used for debugging, this function outputs each value in a vector
199+
* with an associated label.
200+
*/
201+
void showVectorVals(std::string label, std::vector<double>& v) {
202+
std::cout << label << " ";
124203
for (unsigned i = 0; i < v.size(); ++i) {
125-
cout << v[i] << " ";
204+
std::cout << v[i] << " ";
126205
}
127-
cout << endl;
206+
std::cout << std::endl;
128207
}
129208

130-
string printVectorVals(string label, vector<double>& v) {
131-
string res = label + " ";
209+
/**
210+
* @brief Converts vector values to a formatted string for easier inspection.
211+
*
212+
* Useful for debugging and logging, this function formats vector values
213+
* into a labeled string for printing or saving to a file.
214+
*/
215+
std::string printVectorVals(std::string label, std::vector<double>& v) {
216+
std::string res = label + " ";
132217
for (const auto& val : v) {
133-
res += to_string(val) + " ";
218+
res += std::to_string(val) + " ";
134219
}
135220
return res + "\n";
136221
}
137222

223+
/**
224+
* @brief Saves a string to a file.
225+
*
226+
* This utility function opens a file in write mode and saves the content.
227+
* It’s used to log results or intermediate outputs for analysis.
228+
*/
138229
void saveStringToFile(const std::string& filename, const std::string& content) {
139230
std::ofstream outputFile(filename);
140231
if (outputFile.is_open()) {

0 commit comments

Comments
 (0)