2015-03-23 20:58:30 +00:00
|
|
|
#include "Layer.h"
|
|
|
|
|
2015-10-15 20:37:13 +00:00
|
|
|
Layer::Layer(size_t numNeurons)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
for (unsigned int i = 0; i < numNeurons; ++i)
|
|
|
|
{
|
|
|
|
push_back(Neuron());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Layer::setOutputValues(const std::vector<double> & outputValues)
|
|
|
|
{
|
2015-03-24 12:45:38 +00:00
|
|
|
if (size() - 1 != outputValues.size())
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
|
|
|
throw std::exception("The number of output values has to match the layer size");
|
|
|
|
}
|
|
|
|
|
2015-03-24 12:45:38 +00:00
|
|
|
auto neuronIt = begin();
|
|
|
|
for (const double &value : outputValues)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
2015-10-15 17:18:26 +00:00
|
|
|
(neuronIt++)->setOutputValue(value);
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Layer::feedForward(const Layer &inputLayer)
|
2015-10-22 14:02:27 +00:00
|
|
|
{
|
2015-10-24 16:03:07 +00:00
|
|
|
for (size_t neuronNumber = 0; neuronNumber < sizeWithoutBiasNeuron(); ++neuronNumber)
|
2015-03-23 20:58:30 +00:00
|
|
|
{
|
2015-10-22 14:02:27 +00:00
|
|
|
at(neuronNumber).feedForward(inputLayer.getWeightedSum(neuronNumber));
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
double Layer::getWeightedSum(int outputNeuron) const
|
|
|
|
{
|
|
|
|
double sum = 0.0;
|
|
|
|
|
|
|
|
for (const Neuron &neuron : *this)
|
|
|
|
{
|
|
|
|
sum += neuron.getWeightedOutputValue(outputNeuron);
|
|
|
|
}
|
|
|
|
|
|
|
|
return sum;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Layer::connectTo(const Layer & nextLayer)
|
|
|
|
{
|
|
|
|
for (Neuron &neuron : *this)
|
|
|
|
{
|
2015-10-25 16:40:22 +00:00
|
|
|
neuron.createRandomOutputWeights(nextLayer.sizeWithoutBiasNeuron());
|
2015-03-23 20:58:30 +00:00
|
|
|
}
|
|
|
|
}
|
2015-10-16 20:59:04 +00:00
|
|
|
|
2015-10-17 20:05:27 +00:00
|
|
|
void Layer::updateInputWeights(Layer & prevLayer)
|
2015-10-16 20:59:04 +00:00
|
|
|
{
|
2015-10-23 20:16:12 +00:00
|
|
|
static const double trainingRate = 0.3;
|
2015-10-17 20:05:27 +00:00
|
|
|
|
2015-10-22 20:09:35 +00:00
|
|
|
for (size_t targetLayerIndex = 0; targetLayerIndex < sizeWithoutBiasNeuron(); ++targetLayerIndex)
|
2015-10-17 20:05:27 +00:00
|
|
|
{
|
2015-10-22 20:09:35 +00:00
|
|
|
const Neuron &targetNeuron = at(targetLayerIndex);
|
2015-10-17 20:05:27 +00:00
|
|
|
|
2015-10-22 20:09:35 +00:00
|
|
|
for (size_t sourceLayerIndex = 0; sourceLayerIndex < prevLayer.size(); ++sourceLayerIndex)
|
2015-10-17 20:05:27 +00:00
|
|
|
{
|
2015-10-22 20:09:35 +00:00
|
|
|
Neuron &sourceNeuron = prevLayer.at(sourceLayerIndex);
|
2015-10-18 19:20:37 +00:00
|
|
|
|
2015-10-22 20:09:35 +00:00
|
|
|
sourceNeuron.setOutputWeight(targetLayerIndex,
|
|
|
|
sourceNeuron.getOutputWeight(targetLayerIndex) +
|
2015-10-17 20:05:27 +00:00
|
|
|
sourceNeuron.getOutputValue() * targetNeuron.getGradient() * trainingRate);
|
|
|
|
}
|
|
|
|
}
|
2015-10-16 20:59:04 +00:00
|
|
|
}
|
2015-10-18 19:20:37 +00:00
|
|
|
|
|
|
|
void Layer::addBiasNeuron()
|
|
|
|
{
|
|
|
|
push_back(Neuron(1.0));
|
2015-10-25 16:40:22 +00:00
|
|
|
m_hasBiasNeuron = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Layer::hasBiasNeuron() const
|
|
|
|
{
|
|
|
|
return m_hasBiasNeuron;
|
2015-10-18 19:20:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
size_t Layer::sizeWithoutBiasNeuron() const
|
|
|
|
{
|
2015-10-25 16:40:22 +00:00
|
|
|
if (m_hasBiasNeuron)
|
2015-10-18 19:20:37 +00:00
|
|
|
{
|
|
|
|
return size() - 1;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return size();
|
|
|
|
}
|
|
|
|
}
|