Added simple (de-)serialization of (trained) nets

This commit is contained in:
mandlm 2015-10-25 17:40:22 +01:00
parent 249bd22b67
commit 99ef63e019
10 changed files with 153 additions and 9 deletions

View File

@ -46,7 +46,7 @@ void Layer::connectTo(const Layer & nextLayer)
{
for (Neuron &neuron : *this)
{
neuron.createOutputWeights(nextLayer.sizeWithoutBiasNeuron(), 1.0);
neuron.createRandomOutputWeights(nextLayer.sizeWithoutBiasNeuron());
}
}
@ -72,12 +72,17 @@ void Layer::updateInputWeights(Layer & prevLayer)
void Layer::addBiasNeuron()
{
push_back(Neuron(1.0));
hasBiasNeuron = true;
m_hasBiasNeuron = true;
}
bool Layer::hasBiasNeuron() const
{
return m_hasBiasNeuron;
}
size_t Layer::sizeWithoutBiasNeuron() const
{
if (hasBiasNeuron)
if (m_hasBiasNeuron)
{
return size() - 1;
}

View File

@ -7,7 +7,7 @@
class Layer : public std::vector < Neuron >
{
private:
bool hasBiasNeuron = false;
bool m_hasBiasNeuron = false;
public:
Layer(size_t numNeurons);
@ -21,5 +21,6 @@ public:
void addBiasNeuron();
bool hasBiasNeuron() const;
size_t sizeWithoutBiasNeuron() const;
};

93
Net.cpp
View File

@ -1,5 +1,9 @@
#include "Net.h"
#include <string>
#include <iostream>
#include <fstream>
Net::Net(std::initializer_list<size_t> layerSizes)
{
if (layerSizes.size() < 2)
@ -23,6 +27,11 @@ Net::Net(std::initializer_list<size_t> layerSizes)
}
}
Net::Net(const std::string &filename)
{
load(filename);
}
void Net::feedForward(const std::vector<double> &inputValues)
{
Layer &inputLayer = front();
@ -105,3 +114,87 @@ void Net::backProp(const std::vector<double> &targetValues)
currentLayer.updateInputWeights(prevLayer);
}
}
void Net::save(const std::string &filename)
{
std::ofstream outFile;
outFile.open(filename);
if (!outFile.is_open())
{
throw std::exception("unable to open output file");
}
outFile << size() << std::endl;
for (const Layer &layer : *this)
{
outFile << layer.size() << std::endl;
outFile << layer.hasBiasNeuron() << std::endl;
for (const Neuron &neuron : layer)
{
size_t numOutputWeights = neuron.getNumOutputWeights();
outFile << numOutputWeights << std::endl;
for (size_t outputWeightIndex = 0; outputWeightIndex < numOutputWeights; ++outputWeightIndex)
{
outFile << neuron.getOutputWeight(outputWeightIndex) << std::endl;
}
}
}
outFile.close();
}
void Net::load(const std::string &filename)
{
std::ifstream inFile;
inFile.open(filename, std::ios::binary);
if (!inFile.is_open())
{
throw std::exception("unable to open input file");
}
clear();
std::string buffer;
getline(inFile, buffer);
size_t numLayers = std::stol(buffer);
for (size_t layerIndex = 0; layerIndex < numLayers; ++layerIndex)
{
getline(inFile, buffer);
size_t numNeurons = std::stol(buffer);
getline(inFile, buffer);
bool hasBiasNeuron = std::stol(buffer) != 0;
size_t numNeuronsWithoutBiasNeuron = hasBiasNeuron ? numNeurons - 1 : numNeurons;
Layer newLayer(numNeuronsWithoutBiasNeuron);
if (hasBiasNeuron)
{
newLayer.addBiasNeuron();
}
for (size_t neuronIndex = 0; neuronIndex < numNeurons; ++neuronIndex)
{
getline(inFile, buffer);
size_t numWeights = std::stol(buffer);
std::list<double> outputWeights;
for (size_t weightIndex = 0; weightIndex < numWeights; ++weightIndex)
{
getline(inFile, buffer);
outputWeights.push_back(std::stod(buffer));
}
newLayer.at(neuronIndex).createOutputWeights(outputWeights);
}
push_back(newLayer);
}
inFile.close();
}

4
Net.h
View File

@ -8,8 +8,12 @@ class Net : public std::vector < Layer >
{
public:
Net(std::initializer_list<size_t> layerSizes);
Net(const std::string &filename);
void feedForward(const std::vector<double> &inputValues);
std::vector<double> getOutput();
void backProp(const std::vector<double> &targetValues);
void save(const std::string &filename);
void load(const std::string &filename);
};

View File

@ -21,7 +21,7 @@ int main()
double batchMaxError = 0.0;
double batchMeanError = 0.0;
size_t numIterations = 1000000;
size_t numIterations = 100000;
for (size_t iteration = 0; iteration < numIterations; ++iteration)
{
std::vector<double> inputValues =
@ -59,6 +59,9 @@ int main()
myNet.backProp(targetValues);
}
myNet.save("mynet.nnet");
Net copyNet("mynet.nnet");
}
catch (std::exception &ex)
{

View File

@ -114,3 +114,8 @@ void Neuron::setOutputWeight(size_t index, double value)
outputWeights.at(index) = value;
}
size_t Neuron::getNumOutputWeights() const
{
return outputWeights.size();
}

View File

@ -32,6 +32,7 @@ public:
double getOutputWeight(size_t index) const;
void setOutputWeight(size_t index, double value);
size_t getNumOutputWeights() const;
private:
static double transferFunction(double inputValue);

View File

@ -12,7 +12,7 @@ void NetLearner::run()
double batchMaxError = 0.0;
double batchMeanError = 0.0;
size_t numIterations = 1000000;
size_t numIterations = 100000;
for (size_t iteration = 0; iteration < numIterations; ++iteration)
{
std::vector<double> inputValues =
@ -57,6 +57,8 @@ void NetLearner::run()
emit progress((double)iteration / (double)numIterations);
}
myNet.save("mynet.nnet");
}
catch (std::exception &ex)
{

View File

@ -6,6 +6,8 @@ NeuroUI::NeuroUI(QWidget *parent) :
ui(new Ui::NeuroUI)
{
ui->setupUi(this);
ui->logView->addItem("Ready.");
}
NeuroUI::~NeuroUI()

28
mynet.nnet Normal file
View File

@ -0,0 +1,28 @@
3
3
1
3
1.04423
0.628599
0.480053
3
1.049
0.69511
0.462104
3
-2.3429
0.830251
0.596034
4
1
1
1.61567
1
0.42416
1
1.03857
1
0.732838
1
0
0