-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLayer.cpp
More file actions
103 lines (90 loc) · 2.35 KB
/
Layer.cpp
File metadata and controls
103 lines (90 loc) · 2.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#include "Layer.h"
#include <iostream>
using namespace std;
Layer::~Layer()
{
if (_weights) free(_weights);
_weights = 0;
if (_biases) free(_biases);
_biases = 0;
if (output_arr) free(output_arr);
output_arr = 0;
}
float Layer::activation_func(float out)
{
// cout << "calling parent activation " << endl;
return out;
}
//return a pointer to the array of activations
//this pointer is only good untill the next time someone tries to activate the network
//so copy the data if needed elsewhere
float * Layer::activate(float * input_arr)
{
for (int i = 0; i < outputs; i++)
{
output_arr[i] = _biases[i];
int off = inputs*i;
for (int j = 0; j < inputs; j++)
{
output_arr[i] += _weights[off+j]*input_arr[j];
}
output_arr[i] = activation_func(output_arr[i]);
}
return output_arr;
}
void Layer::printRotatedWeights()
{
cout << "[";
for (int j = 0; j < inputs; j++)
{
cout << "[" << endl;
for (int i = 0; i < outputs; i++)
{
cout << _weights[i*inputs + j] << " ,";
}
cout << "]" << endl;
}
cout << "]" << endl;
}
void Layer::init_unitialized(int layer_inputs, int layer_outputs)
{
outputs = layer_outputs;
inputs = layer_inputs;
output_arr = (float*)malloc(sizeof(float)*layer_outputs);
_biases = (float*)malloc(sizeof(float)*layer_outputs);
_weights = (float*)malloc(sizeof(float)*layer_outputs*layer_inputs);
}
Layer::Layer(int layer_inputs, int layer_outputs, float ** weights, float * biases)
{
init_unitialized(layer_inputs, layer_outputs);
setParams(weights, biases);
}
Layer::Layer(int layer_inputs, int layer_outputs, float * weights, float * biases)
{
init_unitialized(layer_inputs, layer_outputs);
setBiases(biases);
memcpy(_weights, weights, layer_inputs*layer_outputs*sizeof(float));
}
void Layer::setBiases(float * biases)
{
memcpy(_biases, biases, outputs*sizeof(float));
}
//convert a weights matrix of the form where w_ij is the connection between the i-th input and the j-th ouput
void Layer::weightsFromRotated(float * rotWeights)
{
for (int i = 0; i < outputs; i++)
{
for (int j = 0; j < inputs; j++)
{
_weights[inputs*i + j] = rotWeights[outputs*j + i];
}
}
}
void Layer::setParams(float ** weights, float * biases)
{
setBiases(biases);
for (int i = 0; i < outputs; i++)
{
memcpy(_weights+i*inputs, weights[i], inputs*sizeof(float));
}
}