神经网络与机器学习 笔记—Rosenblatt感知器收敛算法C++实现

Rosenblatt感知器收敛算法C++实现

算法概述

神经网络与机器学习 笔记—Rosenblatt感知器收敛算法C++实现_第1张图片

自己用C++实现了下,测试的例子和模式用的都是双月分类模型,关于双月分类相关看之前的那个笔记:

https://blog.csdn.net/u013761036/article/details/90548819

直接上实现代码:

#pragma once
#include "stdafx.h"
#include 
#include 
using namespace std;
 
 
int gnM = 0;      //训练集空间维度
int gnN = 0;      //突触权值个数
double gdU = 0.01; //学习率参数
 
void RosenBlattInit(double *dX, int nM, double *dW, int nN ,double dB ,double dU) {
	//dX 本次训练数据集
	//nM 训练集空间维度
	//dW 权值矩阵
	//nN 突触权值个数 RosenBlatt只有一个神经元,所以nM==nM
	//dB 偏置,正常这个是应该 走退火动态调整的,以后再说,现在固定得了。
	//dU 学习率参数
	if (nM > 0) {
		dX[0] = 1;//把偏置永远当成一个固定的突触
	}
	for (int i = 0; i <= nN; i++) {
		if (i == 0) {
			dW[i] = dB;//固定偏置
		}
		else {
			dW[i] = 0.0;
		}
	}
	gnM = nM ,gnN = nN ,gdU = dU;
}
 
double Sgn(double dNumber) {
	return dNumber > 0 ? +1.0 : -1.0;
}
 
//感知器收敛算法-学习
void RosenBlattStudy(const double *dX, const double dD, double *dW) {
	//dX 本次训练数据集
	//dD 本次训练数据集的期望值
	//dW 动态参数,突触权值
	double dY = 0;
	for (int i = 0; i <= gnM && i <= gnN; i++) {
		dY = dY + dX[i] * dW[i];
	}
	dY = Sgn(dY);
	if (dD == dY) {
		return;//不需要进行学习调整突触权值
	}
	for (int i = 1; i <= gnM && i <= gnN; i++) {
		dW[i] = dW[i] + gdU * (dD - dY) * dX[i];
	}
}
 
//感知器收敛算法-泛化
double RosenBlattGeneralization(const double *dX , const double *dW) {
	//dX 本次需要泛化的数据集
	//dW 已经学习好的突触权值
	//返回的是当前需要泛化的数据集的泛化结果(属于那个域的)
	double dY = 0;
	for (int i = 0; i <= gnM && i <= gnN; i++) {
		dY = dY + dX[i] * dW[i];
	}
	return Sgn(dY);
}
 
//双月分类模型,随机获取一组值
/*  自己稍微改了下
域1:上半个圆,假设圆心位坐标原点(0,0)
    (x - 0) * (x - 0) + (y - 0) * (y - 0) = 10 * 10
     x >= -10 && x <= 10
     y >= 0 && y <= 10
域2:下半个圆,圆心坐标(10 ,-1)
    (x - 10) * (x - 10) + (y + 1) * (y + 1) = 10 * 10;
	x >= 0 && x <= 20
	y >= -11 && y <= -1
*/
 
const double gRegionA = 1.0; //双月上
const double gRegionB = -1.0;//双月下
 
void Bimonthly(double *dX ,double *dY ,double *dResult) {
	//dX      坐标x
	//dY      坐标y
	//dResult 属于哪个分类
	*dResult = rand () % 2 == 0 ? gRegionA :  gRegionB;
	if (*dResult == gRegionA) {
		*dX = rand() % 20 - 10;//在区间内随机一个X
		*dY = sqrt(10 * 10 - (*dX) * (*dX));//求出Y
	}
	else {
		*dX = rand() % 20;
		*dY = sqrt(10 * 10 - (*dX - 10) * (*dX - 10)) - 1;
		*dY = *dY * -1;
	}
}
 
 
int main()
{
	//system("color 0b");
	double dX[2 + 1], dD, dW[2 + 1]; //输入空间维度为3 平面坐标系+一个偏置
	double dU = 0.1;
	double dB = 0;
	RosenBlattInit(dX, 2, dW, 2, dB, dU);//初始化 感知器
	double dBimonthlyX, dBimonthlyY, dBimonthlyResult;
	int nLearningTimes = 1024 * 10;//进行10K次学习
	for (int nLearning = 0; nLearning <= nLearningTimes;  nLearning++) {
		Bimonthly(&dBimonthlyX, &dBimonthlyY, &dBimonthlyResult);//随机生成双月数据
		dX[1] = dBimonthlyX;
		dX[2] = dBimonthlyY;
		dD = dBimonthlyResult;
		RosenBlattStudy(dX, dD, dW);
		//cout <<"Study:" << nLearning << " :X= " << dBimonthlyX << "Y= " << dBimonthlyY << " D=" << dBimonthlyResult<< "----W1= " << dW[1] << "  W2= " << dW[2] << endl;
	}
	//进行感知器泛化能力测试 测试数据量1K
	int nGeneralizationTimes = 1 * 1024;
	int nGeneralizationYes = 0, nGeneralizationNo = 0;
	double dBlattGeneralizationSuccessRate = 0;
	for (int nLearning = 1; nLearning <= nGeneralizationTimes; nLearning++) {
		Bimonthly(&dBimonthlyX, &dBimonthlyY, &dBimonthlyResult);//随机生成双月数据
		dX[1] = dBimonthlyX;
		dX[2] = dBimonthlyY;
		//cout << "Generalization: " << dBimonthlyX << "," << dBimonthlyY;
		if (dBimonthlyResult == RosenBlattGeneralization(dX, dW)) {
			nGeneralizationYes++;
			//cout << " Yes" << endl;
		}
		else {
			nGeneralizationNo++;
			//cout << " No" << endl;
		}
	}
	dBlattGeneralizationSuccessRate = nGeneralizationYes * 1.0 / (nGeneralizationNo + nGeneralizationYes) * 100;
	cout << "Study : " << nLearningTimes << "     Generalization : " << nGeneralizationTimes << "     SuccessRate:" << dBlattGeneralizationSuccessRate << "%" << endl;
    getchar();
	return 0;
}
 

结果:

学习了10K次,泛化测试1K次,成功率96%

                                                                                                                                                                 

你可能感兴趣的:(神经网络与机器学习)