This repository has been archived on 2024-12-15. You can view files and clone it, but cannot push or open issues or pull requests.
Neuro/Neuron.cpp

124 lines
2.1 KiB
C++
Raw Normal View History

#include "Neuron.h"
#include <cmath>
#include <random>
2015-10-16 21:23:27 +00:00
#include "Layer.h"
2015-03-24 12:45:38 +00:00
Neuron::Neuron(double value)
: outputValue(value)
, gradient(0)
2015-03-24 12:45:38 +00:00
{
}
void Neuron::setOutputValue(double value)
{
outputValue = value;
}
double Neuron::transferFunction(double inputValue)
{
return std::tanh(inputValue);
}
double Neuron::transferFunctionDerivative(double inputValue)
{
return 1.0 - (inputValue * inputValue);
}
void Neuron::feedForward(double inputValue)
{
outputValue = transferFunction(inputValue);
}
double Neuron::getWeightedOutputValue(size_t outputNeuron) const
{
2015-03-24 12:45:38 +00:00
if (outputNeuron < outputWeights.size())
{
return outputValue * outputWeights[outputNeuron];
}
return 0.0;
}
2015-10-15 20:37:13 +00:00
void Neuron::createRandomOutputWeights(size_t numberOfWeights)
{
outputWeights.clear();
for (unsigned int i = 0; i < numberOfWeights; ++i)
{
outputWeights.push_back(std::rand() / (double)RAND_MAX);
}
}
2015-10-22 14:02:27 +00:00
void Neuron::createOutputWeights(std::list<double> weights)
{
outputWeights.clear();
for (const double &weight : weights)
{
outputWeights.push_back(weight);
}
}
void Neuron::createOutputWeights(size_t numberOfWeights, double weight)
{
outputWeights.clear();
for (unsigned int i = 0; i < numberOfWeights; ++i)
{
outputWeights.push_back(weight);
}
}
double Neuron::getOutputValue() const
{
return outputValue;
}
void Neuron::calcOutputGradients(double targetValue)
{
double delta = targetValue - outputValue;
gradient = delta * transferFunctionDerivative(outputValue);
}
double Neuron::sumDOW(const Layer & nextLayer) const
{
double sum = 0;
2015-10-17 19:02:10 +00:00
for (size_t i = 0; i < outputWeights.size(); ++i)
2015-10-16 21:23:27 +00:00
{
sum += outputWeights[i] * nextLayer[i].getGradient();
}
return sum;
}
void Neuron::calcHiddenGradients(const Layer &nextLayer)
{
double dow = sumDOW(nextLayer);
gradient = dow * transferFunctionDerivative(outputValue);
}
2015-10-16 21:23:27 +00:00
double Neuron::getGradient() const
{
return gradient;
}
double Neuron::getOutputWeight(size_t index) const
{
return outputWeights.at(index);
}
void Neuron::setOutputWeight(size_t index, double value)
{
outputWeights.at(index) = value;
}
size_t Neuron::getNumOutputWeights() const
{
return outputWeights.size();
}