-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathNeuralNetwork.cpp
More file actions
166 lines (147 loc) · 4.67 KB
/
NeuralNetwork.cpp
File metadata and controls
166 lines (147 loc) · 4.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
#include "NeuralNetwork.h"
#include "config/config.h"
#include <cstdlib>
#include <ctime>
#include <random>
using namespace std;
NeuralNetwork::NeuralNetwork(std::vector<unsigned int> layerSizes)
{
if (layerSizes.size()!=0)
{
m_inputNum=layerSizes.at(0);
}
if (layerSizes.size()>1)
{
m_outputNum = layerSizes.at(layerSizes.size()-1);
m_hiddenLayerNum = layerSizes.size()-2;
for(unsigned int i=1; i<layerSizes.size(); i++)
{
NeuronLayer nL(layerSizes.at(i), layerSizes.at(i-1));
m_layers.push_back(nL);
}
}
}
NeuralNetwork::NeuralNetwork(int inputsNum, const std::vector<std::vector<std::vector<double> > > &neuronWeights) :
m_inputNum(inputsNum), m_outputNum(neuronWeights.at(neuronWeights.size()-1).size()), m_hiddenLayerNum(neuronWeights.size() - 1)
{
for(unsigned int i=0; i<neuronWeights.size(); i++)
{
m_layers.push_back(neuronWeights.at(i));
}
}
//TO FINISH
NeuralNetwork::NeuralNetwork(const NeuralNetwork& father, const NeuralNetwork& mother, bool addRandomChange)
{
//checks if sizes correspond
if(father.getInputNum() != mother.getInputNum()
|| father.getLayers().size() != mother.getLayers().size())
{
cerr << "error, mother and father NN of different sizes" << endl;
cerr << "error in numbers of layers" << endl;
cerr << father.getLayers().size() << " " << mother.getLayers().size() << endl;
}
for(unsigned int i=0; i<father.getLayers().size(); i++)
{
if (father.getLayers()[i].getNeurons().size() != mother.getLayers()[i].getNeurons().size())
{
cerr << "error, mother and father NN of different sizes" << endl;
cerr << "error in layers " << i << endl;
cerr << father.getLayers()[i].getNeurons().size() << " " << mother.getLayers()[i].getNeurons().size() << endl;
}
}
m_inputNum = father.getInputNum();
m_outputNum = father.getOutputNum();
m_hiddenLayerNum = father.getHiddenLayerNum();
vector<Neuron> childNeurons;
static mt19937 generator(random_device{}());
bernoulli_distribution distribution(0.5);
for(unsigned i = 0; i < father.m_layers.size(); i++)
{
const auto& fatherLayer = father.m_layers[i];
const auto& motherLayer = mother.m_layers[i];
for(int j = 0; j < fatherLayer.getNeuronNum(); j++)
{
const auto& fatherNeurons = fatherLayer.getNeurons();
const auto& motherNeurons = motherLayer.getNeurons();
// 1/2 chance to take the mother neuron
if(distribution(generator))
{
childNeurons.push_back(motherNeurons[j]);
}
else
{
childNeurons.push_back(fatherNeurons[j]);
}
}
m_layers.push_back(childNeurons);
childNeurons.clear();
}
if(addRandomChange)
{
randomiseWeight();
}
}
vector<double> NeuralNetwork::run(std::vector<double> &inputs)
{
vector<double> outputs; //stores the resultant outputs from each layer
#ifdef DISABLE_NEURAL_NETWORK
bool percepted = false;
for(unsigned int i = 0; i < inputs.size(); i++)
{
if(i%2 == 0 && inputs[i] == 20)
{
percepted = true;
break;
}
}
if(percepted)
{
// outputs.push_back(0); // move speed 0%
srand(std::time(0));
double random_direction = (rand() % 2) * 2 - 1;
outputs.push_back(5.0); // move speed 5%
outputs.push_back(0);
//outputs.push_back(random_direction * PI / 2.0);
// outputs.push_back(-PI);
}
else
{
outputs.push_back(5.0); // move speed 5%
outputs.push_back(0);
}
#else
//first check that we have the correct amount of inputs
if (inputs.size() != m_inputNum)
{
//just return an empty vector if incorrect.
return outputs;
}
//For each layer...
for (unsigned int computingLayer=0; computingLayer < m_hiddenLayerNum+1 ; computingLayer++)
{
outputs.clear();
outputs = m_layers[computingLayer].run(inputs);
inputs = outputs;
}
#endif
return outputs;
}
void NeuralNetwork::randomiseWeight()
{
for(unsigned i=0 ; i<m_layers.size() ; i++)
{
m_layers[i].randomise();
}
}
void NeuralNetwork::improve(int score)
{
std::cerr << "improve method does nothing " << score << std::endl;
}
void NeuralNetwork::printNetwork() const
{
for(unsigned int i = 0; i < m_layers.size(); i++)
{
std::cout << "layer " << (i+1) << "\n" << std::endl;
m_layers[i].printLayer();
}
}