-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNeuron.cpp
More file actions
107 lines (79 loc) · 2.26 KB
/
Neuron.cpp
File metadata and controls
107 lines (79 loc) · 2.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#include "Header.h"
double Neuron::learningRate = 0.15;
double Neuron::momentum= 0.9; ]
void Neuron::updateInputWeights(Layer &prevLayer)
{
for (unsigned n = 0; n < prevLayer.size(); ++n) {
Neuron &neuron = prevLayer[n];
double oldDeltaWeight = neuron.m_outputWeights[m_myIndex].deltaWeight;
double newDeltaWeight =
learningRate
* neuron.getOutputVal()
* m_gradient
+ momentum
* oldDeltaWeight;
neuron.m_outputWeights[m_myIndex].deltaWeight = newDeltaWeight;
neuron.m_outputWeights[m_myIndex].weight += newDeltaWeight;
}
}
double Neuron::sumDOW(const Layer &nextLayer) const
{
double sum = 0.0;
for (unsigned n = 0; n < nextLayer.size() - 1; ++n) {
sum += m_outputWeights[n].weight * nextLayer[n].m_gradient;
}
return sum;
}
void Neuron::calcHiddenGradients(const Layer &nextLayer)
{
double dow = sumDOW(nextLayer);
m_gradient = dow * Neuron::transferFunctionDerivative(m_outputVal);
}
void Neuron::calcOutputGradients(double targetVal)
{
double delta = targetVal - m_outputVal;
m_gradient = delta * Neuron::transferFunctionDerivative(m_outputVal);
}
double Neuron::transferFunction(double x)
{
return 1/(1+exp(-x));
}
double Neuron::transferFunctionDerivative(double x)
{
return transferFunction(x)*(1-transferFunction(x));
}
void Neuron::feedForward(const Layer &prevLayer)
{
double sum = 0.0;
for (unsigned n = 0; n < prevLayer.size(); ++n) {
sum += prevLayer[n].getOutputVal() *
prevLayer[n].m_outputWeights[m_myIndex].weight;
}
m_outputVal = Neuron::transferFunction(sum);
}
Neuron::Neuron(unsigned numOutputs, unsigned myIndex)
{
for (unsigned c = 0; c < numOutputs; ++c) {
m_outputWeights.push_back(Link());
m_outputWeights.back().weight = randomWeight();
}
m_myIndex = myIndex;
}
vector<double> Neuron::getForwardWeights()
{
vector<double> weights;
for (int i = 0;i<m_outputWeights.size();i++)
{
weights.push_back(m_outputWeights[i].weight);
}
return weights;
}
Neuron::Neuron(vector<double> forwardWeights, int id)
{
for (int i = 0;i < forwardWeights.size();i++)
{
this->m_outputWeights.push_back(Link());
this->m_outputWeights.back().weight = forwardWeights[i];
}
this->m_myIndex = id;
}