Renamed a few things, started working on back-propagation

This commit is contained in:
mandlm 2015-10-15 19:18:26 +02:00
parent 2f556d1b92
commit 7ba16e9e9d
7 changed files with 23 additions and 18 deletions

View File

@ -18,8 +18,7 @@ void Layer::setOutputValues(const std::vector<double> & outputValues)
auto neuronIt = begin();
for (const double &value : outputValues)
{
neuronIt->setOutputValue(value);
neuronIt++;
(neuronIt++)->setOutputValue(value);
}
}
@ -48,6 +47,6 @@ void Layer::connectTo(const Layer & nextLayer)
{
for (Neuron &neuron : *this)
{
neuron.createOutputWeights(nextLayer.size());
neuron.createRandomOutputWeights(nextLayer.size());
}
}

View File

@ -17,7 +17,8 @@ Net::Net(std::initializer_list<unsigned int> layerSizes)
Layer &currentLayer = *layerIt;
const Layer &nextLayer = *(layerIt + 1);
currentLayer.push_back(Neuron(1.0));
Neuron biasNeuron(1.0);
currentLayer.push_back(biasNeuron);
currentLayer.connectTo(nextLayer);
}
@ -43,7 +44,7 @@ void Net::feedForward(const std::vector<double> &inputValues)
}
}
std::vector<double> Net::getResult()
std::vector<double> Net::getOutput()
{
std::vector<double> result;
@ -65,7 +66,7 @@ void Net::backProp(const std::vector<double> &targetValues)
throw std::exception("The number of target values has to match the output layer size");
}
std::vector<double> resultValues = getResult();
std::vector<double> resultValues = getOutput();
double rmsError = 0.0;
for (unsigned int i = 0; i < resultValues.size(); ++i)

2
Net.h
View File

@ -10,6 +10,6 @@ public:
Net(std::initializer_list<unsigned int> layerSizes);
void feedForward(const std::vector<double> &inputValues);
std::vector<double> getResult();
std::vector<double> getOutput();
void backProp(const std::vector<double> &targetValues);
};

View File

@ -9,18 +9,23 @@ int main()
{
std::cout << "Neuro running" << std::endl;
Net myNet({ 3, 4, 2 });
std::vector<double> inputValues = { 1.0, 4.0, 5.0 };
std::vector<double> targetValues = { 3.0 };
myNet.feedForward({ 1.0, 2.0, 3.0 });
Net myNet({ inputValues.size(), 4, targetValues.size() });
std::vector<double> result = myNet.getResult();
myNet.feedForward(inputValues);
std::vector<double> outputValues = myNet.getOutput();
std::cout << "Result: ";
for (double &value : result)
for (double &value : outputValues)
{
std::cout << value << " ";
}
std::cout << std::endl;
myNet.backProp(targetValues);
}
catch (std::exception &ex)
{

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
@ -19,13 +19,13 @@
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<PlatformToolset>v140</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<PlatformToolset>v140</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>

View File

@ -25,7 +25,7 @@ double Neuron::transferFunctionDerivative(double inputValue)
void Neuron::feedForward(double inputValue)
{
outputValue = Neuron::transferFunction(inputValue);
outputValue = transferFunction(inputValue);
}
double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const
@ -38,11 +38,11 @@ double Neuron::getWeightedOutputValue(unsigned int outputNeuron) const
return 0.0;
}
void Neuron::createOutputWeights(unsigned int number)
void Neuron::createRandomOutputWeights(unsigned int numberOfWeights)
{
outputWeights.clear();
for (unsigned int i = 0; i < number; ++i)
for (unsigned int i = 0; i < numberOfWeights; ++i)
{
outputWeights.push_back(std::rand() / (double)RAND_MAX);
}

View File

@ -16,6 +16,6 @@ public:
static double transferFunctionDerivative(double inputValue);
void feedForward(double inputValue);
double getWeightedOutputValue(unsigned int outputNeuron) const;
void createOutputWeights(unsigned int number);
void createRandomOutputWeights(unsigned int numberOfWeights);
double getOutputValue() const;
};