参考文章https://www.cnblogs.com/Finley/p/5946000.html
头文件BP_NN.h
#ifndef BP_NN_H
#define BP_NN_H
#include
#include
#include
#include // 时间库
#include
#include
#include
using namespace std;
class RandomNumber
{
public:
RandomNumber()
{
srand((unsigned)time(NULL)); //析构函数,在对象创建时数据成员执行初始化操作
}
int integer(int begin, int end)
{
return rand() % (end - begin + 1) + begin;
}
double decimal(double a, double b)
{
return double(rand() % 10000) / 10000 * (b - a) + a;
}
double GaussianNoise(double mu, double sigma)
{
return sigma * sqrt(-2 * log(decimal(0, 1)))*cos(2 * 3.1415926*(decimal(0, 1))) + mu;
}
};
RandomNumber r;
//激活函数
double sigmoid(double x)
{
return 1.0 / (1.0 + exp(-x));
}
//激活函数一阶导
double sigmoid_derivative(double x)
{
return x * (1 - x);
}
class BPNeuralNetwork
{
public:
int input_n;
int hidden_n;
int output_n;
vectorinput_cells;
vectorhidden_cells;
vectoroutput_cells;
vector>input_weights;
vector>hidden_weights;
vector>input_correction;
vector>hidden_correction;
//神经网络函数
void setup(int ni,int nh,int no);//启动
vectorpredict(vectorinputs);//输入到输出映射
double back_propagate(vectorinput, vectoroutput, double learn, double correct);//误差反向传播
double train(vector>cases, vector>labels, int limit, double learn, double correct);//训练
};
void BPNeuralNetwork::setup(int ni, int nh, int no)
{
input_n=ni+1;
hidden_n= nh;
output_n=no;
//init cells
input_cells.resize(input_n,1);
hidden_cells.resize(hidden_n, 1);
output_cells.resize(output_n, 1);
//init weights
input_weights.resize(input_n, vector(hidden_n));
hidden_weights.resize(hidden_n, vector(output_n));
// random activate
for (int i = 0; i < input_n; i++)
{
for (int h = 0; h < hidden_n; h++)
{
input_weights[i][h] = r.decimal(-0.2, 0.2);
}
}
for (int h = 0; h < hidden_n; h++)
{
for (int o = 0; o < output_n; o++)
{
hidden_weights[h][o] = r.decimal(-2.0, 2.0);
}
}
//init correction matrix
input_correction.resize(input_n, vector(hidden_n));
hidden_correction.resize(hidden_n, vector(output_n));
}
vectorBPNeuralNetwork::predict(vectorinputs)
{
//activate input layer
for (int i = 0; i < input_n - 1; i++)
{
input_cells[i] = inputs[i];
}
//activate hidden layer
double total;
for (int h = 0; h< hidden_n; h++)
{
total = 0.0;
for (int i=0;i < input_n; i++)
{
total += input_cells[i] * input_weights[i][h];
}
hidden_cells[h] = sigmoid(total);
}
//activate output layer
for (int o = 0; o < output_n; o++)
{
total = 0.0;
for (int h = 0; h < hidden_n; h++)
{
total += hidden_cells[h] * hidden_weights[h][o];
}
output_cells[o] = sigmoid(total);
}
return output_cells;
}
double BPNeuralNetwork::back_propagate(vectorinput, vectoroutput, double learn, double correct)
{
//feed forward
predict(input);
//get output layer error
vectoroutput_deltas(output_n, 1.0);
double error;
for (int o = 0; o < output_n; o++)
{
error = output[o] - output_cells[o];
output_deltas[o] = sigmoid_derivative(output_cells[o]) * error;
}
//get hidden layer error
vectorhidden_deltas(hidden_n, 1.0);
for (int h = 0; h < hidden_n; h++)
{
error = 0.0;
for (int o = 0; o < output_n; o++)
{
error += output_deltas[o] * hidden_weights[h][o];
}
hidden_deltas[h] = sigmoid_derivative(hidden_cells[h]) * error;
}
//update output weights
double change;
for (int h = 0; h < hidden_n; h++)
{
for (int o = 0; o < output_n; o++)
{
change = output_deltas[o] * hidden_cells[h];
hidden_weights[h][o] += learn * change + correct * hidden_correction[h][o];
hidden_correction[h][o] = change;
}
}
//update input weights
for (int i = 0; i < input_n; i++)
{
for (int h = 0; h < hidden_n; h++)
{
change = hidden_deltas[h] * input_cells[i];
input_weights[i][h] += learn * change + correct * input_correction[i][h];
input_correction[i][h] = change;
}
}
//get global error
error = 0.0;
for (int o = 0; o < output.size(); o++)
{
error += 0.5 * (output[o] - output_cells[o]) *(output[o] - output_cells[o]);
}
return error;
}
double BPNeuralNetwork::train(vector>cases, vector>labels, int limit, double learn, double correct)
{
double error=100;
vectorlabel;
vectorcase1;
for (int j = 0; j < limit; j++)
{
error = 0.0;
for (int i = 0; i < cases.size(); i++)
{
label = labels[i];
case1 = cases[i];
error += back_propagate(cases[i], labels[i], learn, correct);
}
cout <<"第"<> Data_normalization(vector>x)
{
double min, max;
for (int i = 0; i max)
{
max= x[j][i];
}
}
for (int j =0; j < size(x); j++)
{
x[j][i] = (x[j][i] - min) / (max - min);
}
}
return x;
}
const string test_function = "1";//测6试函数
double function(vector x)
{
double fx = 0;
int n = x.size();
//============================测试函数=============================
//1.Sphere函数 变量[-100,100]
if (test_function == "1")
{
for (int i = 0; i < n; i++)
{
fx = fx + pow(x[i], 2);
}
}
if (test_function=="2")
{
for (int i = 0; i < n; i++)
{
fx = fx + x[i] * x[i] - 10 * cos(2 * 3.1415926*x[i]) + 10;
}
}
return fx;
}
#endif
主函数BP_NN.cpp
#include"BP_NN.h"
int main()
{
ofstream out("隐藏层神经元敏感性分析.dat");
double rate,success_rate;
BPNeuralNetwork self;
int ni = 3, training_n = 10, no = 1, nh = 5;
vector>training_x(training_n, vector(ni));
vector>training_y(training_n, vector(no));
for (int i = 0; i < training_n; i++)
{
for (int j = 0; j < ni; j++)
{
training_x[i][j] = r.decimal(0, 1);
}
}
for (int i = 0; i < training_n; i++)
{
for (int j = 0; j < no; j++)
{
training_y[i][j]= r.decimal(0, 1);
}
}
self.setup(ni, nh, no);
double error=self.train(training_x, training_y, 100000, 0.05, 0.1);
vector predict_y;
int j = 0;
cout <<"样本训练值"<< "、" << "样本预测值" << "、" << "训练误差" << endl;
for (int i = 0; i < training_x.size(); i++)
{
predict_y = self.predict(training_x[i]);
rate = (training_y[i][0] - predict_y[0]) / training_y[i][0];
if (rate < 0.05)
{
j++;
}
cout << training_y[i][0] << "、" << predict_y[0] << "、" <<(training_y[i][0] - predict_y[0]) / training_y[i][0]<< endl;
}
cout <<"训练精度:"<< fixed << setw(12) << setprecision(6) << error << " 样本训练成功率:" << setw(12) << setprecision(6) << double(j) / training_x.size() << endl;
}