#include <iostream>
#include <math.h>
#include <time.h>
#include "bp.h"
using namespace std;
const int TrainingSet[4][3]={
{0,0,0},
{0,1,1},
{1,0,1},
{1,1,0},
};
NeuralNetworkLayer::NeuralNetworkLayer()
{
ParentLayer = NULL;
ChildLayer = NULL;
}
void NeuralNetworkLayer::Initialize(int NumNodes,NeuralNetworkLayer* parent,NeuralNetworkLayer* child)
{
int i, j;
NeuronValues = (double*) malloc(sizeof(double) * NumberOfNodes);
DesiredValues = (double*) malloc(sizeof(double) * NumberOfNodes);
Errors = (double*) malloc(sizeof(double) * NumberOfNodes);
if(parent != NULL)
{
ParentLayer = parent;
}
if(child != NULL)
{
ChildLayer = child;
Weights = (double**) malloc(sizeof(double*) *NumberOfNodes);
WeightChanges = (double**) malloc(sizeof(double*) *NumberOfNodes);
for(i = 0; i<NumberOfNodes; i++)
{
Weights[i] = (double*) malloc(sizeof(double) * NumberOfChildNodes);
WeightChanges[i] = (double*) malloc(sizeof(double) * NumberOfChildNodes);
}
BiasValues = (double*) malloc(sizeof(double) *NumberOfChildNodes);
BiasWeights = (double*) malloc(sizeof(double) *NumberOfChildNodes);
} else {
Weights = NULL;
BiasValues = NULL;
BiasWeights = NULL;
WeightChanges = NULL;
}
for(i=0; i<NumberOfNodes; i++)
{
NeuronValues[i] = 0;
DesiredValues[i] = 0;
Errors[i] = 0;
if(ChildLayer != NULL)
for(j=0; j<NumberOfChildNodes; j++)
{
Weights[i][j] = 0;
WeightChanges[i][j] = 0;
}
}
if(ChildLayer != NULL)
for(j=0; j<NumberOfChildNodes; j++)
{
BiasValues[j] = 0;
BiasWeights[j] = 0;
}
}
void NeuralNetworkLayer::CleanUp(void)
{
int i;
free(NeuronValues);
free(DesiredValues);
free(Errors);
if(Weights != NULL)
{
for(i = 0; i<NumberOfNodes; i++)
{
free(Weights[i]);
free(WeightChanges[i]);
}
free(Weights);
free(WeightChanges);
}
if(BiasValues != NULL) free(BiasValues);
if(BiasWeights != NULL) free(BiasWeights);
}
void NeuralNetworkLayer::RandomizeWeights(void)
{
int i,j;
int min = 0;
int max = 200;
int number;
srand( (unsigned)time( NULL ) );
for(i=0; i<NumberOfNodes; i++)
{
for(j=0; j<NumberOfChildNodes; j++)
{
number = (((abs(rand())%(max-min+1))+min));
if(number>max)
number = max;
if(number<min)
number = min;
Weights[i][j] = number / 100.0f - 1;
}
}
for(j=0; j<NumberOfChildNodes; j++)
{
number = (((abs(rand())%(max-min+1))+min));
if(number>max)
number = max;
if(number<min)
number = min;
BiasWeights[j] = number / 100.0f - 1;
}
}
void NeuralNetworkLayer::CalculateNeuronValues(void)
{
int i,j;
double x;
if (ParentLayer != NULL)
{
for(j=0; j<NumberOfNodes; j++)
{
x = 0;
for(i=0; i<NumberOfParentNodes; i++)
{
x += ParentLayer->NeuronValues[i] *
ParentLayer->Weights[i][j];
}
x += ParentLayer->BiasValues[j] *
ParentLayer->BiasWeights[j];
NeuronValues[j] = x;
}
}
}
void NeuralNetworkLayer::CalculateErrors(void)
{
int i, j;
double sum;
if(ChildLayer == NULL)
{
for(i=0; i<NumberOfNodes; i++)
{
Errors[i] = (DesiredValues[i] - NeuronValues[i]) *
NeuronValues[i] *
(1.0f - NeuronValues[i]);
}
} else if(ParentLayer == NULL) {
for(i=0; i<NumberOfNodes; i++)
{
Errors[i] = 0.0f;
}
} else {
for(i=0; i<NumberOfNodes; i++)
{
sum = 0;
for(j=0; j<NumberOfChildNodes; j++)
{
sum += ChildLayer->Errors[j] * Weights[i][j];
}
Errors[i] = sum * NeuronValues[i] *
(1.0f - NeuronValues[i]);
}
}
}
void NeuralNetworkLayer::AdjustWeights(void)
{
int i, j;
double dw;
if(ChildLayer != NULL)
{
for(i=0; i<NumberOfNodes; i++)
{
for(j=0; j<NumberOfChildNodes; j++)
{
dw = LearningRate * ChildLayer->Errors[j] *
NeuronValues[i];
Weights[i][j] += dw;
}
}
for(j=0; j<NumberOfChildNodes; j++)
{
BiasWeights[j] += LearningRate *
ChildLayer->Errors[j] *
BiasValues[j];
}
}
}
void NeuralNetworkLayer::OutData()
{
}
void NeuralNetwork::Initialize(int nNodesInput, int nNodesHidden,int nNodesOutput)
{
InputLayer.NumberOfNodes = nNodesInput; //输入层个数
InputLayer.NumberOfChildNodes = nNodesHidden;
InputLayer.NumberOfParentNodes = 0;
InputLayer.Initialize(nNodesInput, NULL, &HiddenLayer);
InputLayer.RandomizeWeights();
HiddenLayer.NumberOfNodes = nNodesHidden; //隐含层个数
HiddenLayer.NumberOfChildNodes = nNodesOutput;
HiddenLayer.NumberOfParentNodes = nNodesInput;
HiddenLayer.Initialize(nNodesHidden,&InputLayer,&OutputLayer);
HiddenLayer.RandomizeWeights();
OutputLayer.NumberOfNodes = nNodesOutput; //输出层个数
OutputLayer.NumberOfChildNodes = 0;
OutputLayer.NumberOfParentNodes = nNodesHidden;
OutputLayer.Initialize(nNodesOutput, &HiddenLayer, NULL);
}
void NeuralNetwork::CleanUp()
{
InputLayer.CleanUp();
HiddenLayer.CleanUp();
OutputLayer.CleanUp();
}
void NeuralNetwork::SetInput(int i, double value) //设定输入值
{
if((i>=0) && (i<InputLayer.NumberOfNodes))
{
InputLayer.NeuronValues[i] = value;
}
}
/*double NeuralNetwork::GetOutput(int i)
{
return OutputLayer.NeuronValues[i];
}
*/
void NeuralNetwork::SetDesiredOutput(int i, double value) //期望的输出
{
if((i>=0) && (i<OutputLayer.NumberOfNodes))
{
OutputLayer.DesiredValues[i] = value;
}
}
void NeuralNetwork::FeedForward(void)
{
InputLayer.CalculateNeuronValues ();
HiddenLayer.CalculateNeuronValues ();
OutputLayer.CalculateNeuronValues ();
}
void NeuralNetwork::BackPropagate(void)
{
OutputLayer.CalculateErrors();
HiddenLayer.CalculateErrors();
HiddenLayer.AdjustWeights();
InputLayer.AdjustWeights();
}
double NeuralNetwork::CalculateError(void)
{
int i;
double error = 0;
for(i=0; i<OutputLayer.NumberOfNodes; i++)
{
error += pow(OutputLayer.NeuronValues[i] -
OutputLayer.DesiredValues[i], 2);
}
error = error / 2;
return error;
}
void NeuralNetwork::SetLearningRate(double rate) //设定学习率
{
InputLayer.LearningRate = rate;
HiddenLayer.LearningRate = rate;
OutputLa