BP算法程序练习(doing)

目录

一、先定义需要用到的结构

1、输入层

2、隐藏层

3、输出层

二、BP算法 (doing)


一、先定义需要用到的结构

1、输入层

#pragma once
#ifndef INPUTLAYER_H
#define INPUTLAYER_H

#include"Header.h"

class InLayer {
public:
	double bias = 1;              //偏置
	//double biasWeight;

	double inputValue;            // 训练的特征值
	std::vector weight;   // 输入层单个神经元对隐藏层每个神经元的权重

};

#endif // !INPUTLAYER_H

2、隐藏层

#pragma once
#ifndef HIDDENLAYER_H
#define HIDDENLAYER_H

#include"Header.h"

class HiddenLayer {
public:
	double bias = 1;  //偏置
	//double biasWeight;

	double inputValue;   // z = sum(weight * x)
	double outputValue;  // a = sigmoid(z)
	
	std::vector weight;   // 隐藏层单个神经元对下一层每个神经元的权重

	double delta = 0;        // 单个神经元的激活项误差
};

#endif // !HIDDENLAYER_H

3、输出层

#pragma once
#ifndef OUTPUTLAYER_H
#define OPUTPUTLAYER_H

class OutLayer {
public:

	double inputValue;   // z = sum(weight * x)
	double outputValue;  // a = sigmoid(z),即预测输出

	double label;        // 训练的标签

	double delta = 0;        // 单个神经元的激活项误差

};

#endif // !INPUTLAYER_H

二、BP算法 (doing)

NPNet.h

#pragma once
#include"Header.h"
#include"InLayer.h"
#include"HiddenLayer.h"
#include"OutLayer.h"

#define InNodeNum 2		// 输入层节点数
#define HiddenNodeNum 4    // 隐藏层节点数 
#define OutNodeNum 1		// 输出层节点数
#define TotalLayer 3

typedef struct sample {
	std::vector inputData;
	std::vector outputData;
}Sample;


class BPNet
{
public:
	BPNet() {};
	BPNet(std::string &filenamne);   //构造函数,初始化 权重
	void forwardPropagation();
	void BackPropagation();
	void train(std::vector sampleGroup);
	void prediction(std::vector testGroup);
	~BPNet(){}
private:
	double error; //误差率
	double LearnStep = 0.1;   // 学习率
	std::vector* inLayerNode;
	std::vector* hiddenLayerNode;   // 一个隐藏层,多个隐藏层就用矩阵arma::mat
	std::vector* outLayerNode;

private:
	void InputInitW(std::vector &inLayer);
	void HiddenInitW(std::vector& hiddenLayer);

	void ReadInputData();
};

BPNet.cpp

#include"Header.h"
#include "BPNet.h"
using namespace std;

inline double getRandom()
{
	return (rand() % (2000) / 1000.0 - 1);
}

inline double sigmoid(double &z)
{
	return 1 / (1 + exp(-z));
}

void BPNet::InputInitW(std::vector& inLayerNode) {
	inLayerNode.resize(InNodeNum + 1);
	srand(time(NULL));
	for (int i = 0; i <= InNodeNum; i++) {  // 0 为偏置的权重;
		for (int j = 0; j < HiddenNodeNum; j++) {
			inLayerNode[i].weight.push_back(getRandom());

		}
	}
}
void BPNet::HiddenInitW(std::vector& hiddenLayerNode) {
	hiddenLayerNode.resize(HiddenNodeNum + 1);
	srand(time(NULL));
	for (int i = 0; i <= HiddenNodeNum; i++) {  // 0 为偏置的权重;
		for (int j = 0; j < OutNodeNum; j++) {
			hiddenLayerNode[i].weight.push_back(getRandom());

		}
	}

}

BPNet::BPNet(std::string& filenamne)
{
	inLayerNode->resize(InNodeNum + 1);
	hiddenLayerNode->resize(HiddenNodeNum + 1);
	outLayerNode->reserve(OutNodeNum + 1);

	InputInitW(*inLayerNode);
	HiddenInitW(*hiddenLayerNode);
	

	ReadInputData();   
}

void BPNet::forwardPropagation()
{
	// input layer to hidden layer
	for (int i = 1; i <= HiddenNodeNum; i++)
	{
		double z = 0.0;
		auto& inNode = *inLayerNode;
		auto& hiddenNode = *hiddenLayerNode;
		int j;
		for (j = 1; j <= InNodeNum; j++)
		{
			z += inNode[j].inputValue * inNode[j].weight[i];
		}
		// 加上偏移量
		z += inNode[0].bias * inNode[0].weight[0];

		double a = sigmoid(z);
		hiddenNode[i].inputValue = z;
		hiddenNode[i].outputValue = a;
	}
	// hidden layer to out layer
	for (int i = 1; i <= OutNodeNum; i++)
	{
		double z = 0.0;
		auto& hiddenNode = *hiddenLayerNode;
		auto& outNode = *outLayerNode;
		int j;
		for (j = 1; j <= HiddenNodeNum; j++) {
			z += hiddenNode[j].outputValue * hiddenNode[j].weight[i];
		}
		z += hiddenNode[0].bias * hiddenNode[0].weight[0];

		double a = sigmoid(z);
		outNode[i].inputValue = z;
		outNode[i].outputValue = a;
	}
} 

void BPNet::BackPropagation()
{
	//输出层误差
	auto& outNode = *outLayerNode;
	for (int i = 1; i <= OutNodeNum; i++)
	{
		double delta = -(outNode[i].label - outNode[i].outputValue) * (1 - outNode[i].outputValue) * outNode[i].outputValue;
		outNode[i].delta = delta;
	}

	//隐藏层误差
	auto& hiddenNode = *hiddenLayerNode;
	for (int i = 1; i <= HiddenNodeNum; i++)
	{
		double delta = 0.0;
		for (int j = 1; j <= OutNodeNum; j++) {
			delta = outNode[j].delta * hiddenNode[i].weight[j];
		}
		delta = delta * hiddenNode[i].outputValue * (1 - hiddenNode[i].outputValue);
		hiddenNode[i].delta = delta;
	}

	//更新权重
	for (int i = 1; i <= HiddenNodeNum; i++)
	{
		double err = 0.0;
		int j;
		for (j = 1; j <= OutNodeNum; j++) {
			err = outNode[j].delta * hiddenNode[i].outputValue;
		}
		hiddenNode[i].weight[j] -= LearnStep * err;
	}

	auto& inNode = *inLayerNode;
	for (int i = i; i <= InNodeNum; i++)
	{
		double err = 0.0;
		int j;
		for (j = 1; j < HiddenNodeNum; j++) {
			err = hiddenNode[j].delta * inNode[i].inputValue;
		}
		inNode[i].weight[j] -= LearnStep * err;
	}
}



 

你可能感兴趣的:(Machine,Learning)