Fixed gradient computation

This commit is contained in:
mandlm 2015-10-17 21:02:10 +02:00
parent 26a51ce476
commit de06daaad3
2 changed files with 3 additions and 3 deletions

View File

@ -87,12 +87,12 @@ void Net::backProp(const std::vector<double> &targetValues)
} }
// calculate hidden neuron gradients // calculate hidden neuron gradients
for (auto it = end() - 1; it != begin(); --it) for (auto it = end() - 1; (it - 1) != begin(); --it)
{ {
Layer &hiddenLayer = *(it - 1); Layer &hiddenLayer = *(it - 1);
Layer &nextLayer = *it; Layer &nextLayer = *it;
for (auto neuron : hiddenLayer) for (Neuron &neuron : hiddenLayer)
{ {
neuron.calcHiddenGradients(nextLayer); neuron.calcHiddenGradients(nextLayer);
} }

View File

@ -65,7 +65,7 @@ double Neuron::sumDOW(const Layer & nextLayer) const
{ {
double sum = 0; double sum = 0;
for (size_t i = 0; i < nextLayer.size() - 1; ++i) for (size_t i = 0; i < outputWeights.size(); ++i)
{ {
sum += outputWeights[i] * nextLayer[i].getGradient(); sum += outputWeights[i] * nextLayer[i].getGradient();
} }