-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNetwork.cpp
More file actions
126 lines (103 loc) · 4.28 KB
/
Network.cpp
File metadata and controls
126 lines (103 loc) · 4.28 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
/********************************************************************
* Kodi Neumiller *
* kneumiller *
* CSCI 446 *
* Assignment 5: Machine Learning *
* *
* An A.I. based off of a neural network pattern. *
* The program will take in a file of with each row having some *
* amount of input and one output. *
* *
* To compile: g++ -o MachineLearning MachineLearning.cpp *
* To run: ./MachineLearning *
********************************************************************/
#include <vector>
#include <cassert>
#include "Network.h"
Network::Network (vector<int> topology) {
int numLayers = topology.size();
cout << "topology size: " << topology.size() << endl;
/*Create new layers and populate them with nodes*/
for (int i = 0; i < numLayers; i++) {
int numOutputs;
layers.push_back(vector<Node>());
//If this is the output layer (the last layer) then there will be no outputs given
//Otherwise, the number of outputs is equal to the number of nodes in the next layer over
if (i == numLayers - 1)
numOutputs = 0;
else
numOutputs = topology[i + 1];
for (int j = 0; j <= topology[i]; j++) {
//Appends the Node onto the previous vector (i is used as the node index)
layers.back().push_back(Node(numOutputs, i));
cout << "New node - number of outputs: " << numOutputs << endl;
}
}
}
void Network::backPropogate(vector<double> targetVals) {
//Determine if the neural network training is working
vector<Node> outputLayer = layers.back();
error = 0.0;
double delta = 0;
//Find the difference between teh expected value and the actual value
cout << "output layer size: " << outputLayer.size() << endl;
for (int i = 0; i <= outputLayer.size() - 1; i++) {
cout << "loop" << endl;
delta = targetVals[i] - outputLayer[i].getOutputVal();
error += delta * delta;
cout << "loop 2" << endl;
}
cout << "I finished" << endl;
error /= outputLayer.size() - 1; //Find the average of the error^2
error = sqrt(error); //RMS (Root-Mean-Squared)
//Calculate the average Error Rate for each run
averageError = (averageError * averageSmoothingFactor + error
/ (averageSmoothingFactor + 1));
//Find the gradient for the output layers (don't include the bias node)
for (int i = 0; i < outputLayer.size() - 1; i++) {
outputLayer[i].outputGradient(targetVals[i]);
}
//Find the gradient for all of the hidden layers (starting w/ the right most hidden layer)
for (int i = layers.size() - 2; i > 0; i--) {
//Create vectors for display purposes
vector<Node> hiddenLayer = layers[i]; //Current layer
vector<Node> nextLayer = layers[i+1];
for (int j = 0; j < hiddenLayer.size(); j++) {
hiddenLayer[j].hiddenGradient(nextLayer);
}
}
//Update the weights of each connection by looping through ALL the layers
for (int i = layers.size() - 1; i > 0; i--) {
vector <Node> currentLayer = layers[i];
vector <Node> previousLayer = layers[i - 1];
for (int j = 0; j < currentLayer.size(); j++) {
currentLayer[j].updateInputWeights(previousLayer);
}
}
}
void Network::feedForward(const vector<double> inputVals) {
//Make sure that the number of given neurons is the same as the number of neurons in the vector array
cout << "Input Vals size: " << inputVals.size() << endl;
cout << "Number of neurons: " << layers[0].size() << endl;
assert(inputVals.size() == layers[0].size());
//Give the input nodes the input values
for (int i = 0; i < inputVals.size(); i++) {
layers[0][i].setOutputVal(inputVals[i]);
}
//Forward propagate - move through each layer then through each node and feed forward
for (int i = 1; i < layers.size(); i++) {
vector<Node> previousLayer = layers[i-1];
for (int j = 0; j < layers[i].size(); j++) {
layers[i][j].nodeFeedForward(previousLayer);
}
}
}
void Network::getResults(vector <double> results) {
results.clear();
for (int i = 0; i < layers.back().size() - 1; i++) {
results.push_back(layers.back()[i].getOutputVal());
}
}
double Network::getAverageError() {
return averageError;
}