c++ SVR实现(草稿)

import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D

fr = open('battery_data.txt')
x = [];y = [];z = [];w = [];
for line in fr.readlines():
            lineArr = line.strip().split(',')
            x.append(float(lineArr[0]))
            y.append(float(lineArr[1]))
            z.append(float(lineArr[2]))
            w.append(float(lineArr[3]))
fr.close()


fig = plt.figure()
ax1 =fig.add_subplot(111,projection = '3d')
#产生测试数据
ax1.set_title('Scatter Plot') #设置标题  
plt.xlabel('X') #设置X轴标签  
plt.ylabel('Y') #设置Y轴标签
plt.ylabel('Z') #设置Z轴标签  
ax1.scatter(x,y,z,c=w,marker='>') #画散点图 
plt.legend('c')   #设置图标  
plt.show() #显示所画的图 

c++ SVR实现(草稿)_第1张图片

12.5,12.45,12.47,5.7,4.1
12.46,12.42,12.44,5.7,4.12
12.42,12.39,12.41,5.7,4.13
12.35,12.32,12.34,5.7,4.15
12.32,12.29,12.34,5.7,4.16
12.29,12.27,12.28,5.7,4.2
12.25,12.23,12.24,5.7,4.22
12.19,12.17,12.18,5.7,4.24
12.15,12.12,12.13,5.7,4.31
12.12,12.09,12.16,5.7,4.34
12.1,12.07,12.08,5.7,4.36
12.07,12.05,12.06,5.7,4.41
12.05,12.03,12.04,5.7,4.45
12.02,11.99,12,5.7,4.51
11.99,11.96,11.97,5.7,4.6
11.97,11.94,11.95,5.7,4.71
11.91,11.89,11.9,5.7,4.78
11.87,11.85,11.86,5.7,4.89
11.85,11.83,11.84,5.7,4.96
11.81,11.78,11.79,5.7,5.06
11.78,11.76,11.77,5.7,5.14
11.76,11.73,11.74,5.7,5.16
11.73,11.7,11.71,5.7,5.54
11.71,11.68,11.69,5.7,5.62
11.68,11.65,11.66,5.7,5.65
11.64,11.61,11.62,5.7,5.86
11.61,11.58,11.59,5.7,5.92
11.58,11.55,11.56,5.7,6.42
11.54,11.5,11.52,5.7,6.72
11.5,11.44,11.47,5.7,6.96
11.44,11.37,11.41,5.7,7.42
11.37,11.27,11.32,5.7,7.57
11.32,11.19,11.24,5.7,8.12
11.27,11.11,11.24,5.7,8.72
11.17,10.62,10.89,5.7,9.21
12.67,12.63,12.65,10.9,4.08
12.59,12.54,12.56,10.9,4.1
12.54,12.5,12.52,10.9,4.13
12.48,12.44,12.46,10.9,4.14
12.44,12.4,12.42,10.9,4.16
12.38,12.35,12.37,10.9,4.18
12.28,12.25,12.27,10.9,4.21
12.27,12.24,12.26,10.9,4.24
12.23,12.2,12.22,10.9,4.26
12.19,12.16,12.17,10.9,4.28
12.17,12.13,12.15,10.9,4.3
12.14,12.11,12.12,10.9,4.31
12.11,12.08,12.09,10.9,4.36
12.08,12.04,12.06,10.9,4.41
12.04,12.02,12.03,10.9,4.47
12.02,11.98,12.01,10.9,4.42
11.99,11.95,11.97,10.9,4.6
11.95,11.91,11.93,10.9,4.71
11.92,11.89,11.91,10.9,4.78
11.81,11.78,11.79,10.9,5.08
11.79,11.75,11.77,10.9,5.42
11.75,11.71,11.73,10.9,5.52
11.71,11.66,11.68,10.9,5.66
11.66,11.6,11.63,10.9,5.92
11.63,11.57,11.61,10.9,6.23
11.6,11.53,11.58,10.9,6.42
11.58,11.51,11.54,10.9,6.52
11.55,11.48,11.52,10.9,6.68
11.45,11.38,11.42,10.9,6.82
11.42,11.35,11.38,10.9,6.94
11.4,11.33,11.36,10.9,7.05
11.37,11.3,11.34,10.9,7.37
11.33,11.26,11.3,10.9,7.76
11.2,11.11,11.16,10.9,8.11
11.05,10.17,11.52,10.9,9.53
12.8,12.02,12.1,9.7,4.4
12,11.95,11.97,9.7,4.55
11.98,11.93,11.96,9.7,4.6
11.96,11.91,11.94,9.7,4.66
11.94,11.89,11.92,9.7,4.75
11.92,11.87,11.9,9.7,4.78
11.9,11.85,11.87,9.7,4.83
11.87,11.82,11.84,9.7,4.88
11.86,11.81,11.83,9.7,4.9
11.85,11.8,11.82,9.7,4.92
11.83,11.78,11.81,9.7,4.94
11.81,11.76,11.78,9.7,4.98
11.8,11.75,11.78,9.7,5.01
11.79,11.74,11.76,9.7,5.12
11.78,11.73,11.75,9.7,5.18
11.77,11.72,11.74,9.7,5.27
11.76,11.71,11.73,9.7,5.34
11.75,11.7,11.72,9.7,5.35
11.74,11.69,11.72,9.7,5.38
11.73,11.67,11.71,9.7,5.48
11.71,11.64,11.68,9.7,5.62
11.69,11.63,11.66,9.7,5.68
11.67,11.61,11.64,9.7,5.74
11.65,11.59,11.62,9.7,5.78
11.64,11.58,11.61,9.7,5.86
11.63,11.57,11.6,9.7,6.18
11.61,11.54,11.57,9.7,6.23
11.58,11.49,11.56,9.7,6.29
11.56,11.46,11.51,9.7,6.35
11.54,11.43,11.49,9.7,6.78
11.51,11.4,11.45,9.7,7.01
11.49,11.36,11.42,9.7,7.17
11.43,11.26,11.34,9.7,7.72
11.36,11.12,11.24,9.7,8.18
11.26,10.19,10.81,9.7,9.11
12.65,12.6,12.62,7.7,4.12
12.38,12.32,12.34,7.7,4.23
12.16,12.12,12.14,7.7,4.33
12.14,12.12,12.13,7.7,4.35
12.13,12.1,12.12,7.7,4.5
12.1,12.08,12.09,7.7,4.53
12.08,12.05,12.07,7.7,4.54
12.05,12.02,12.04,7.7,4.58
12.02,11.99,12.01,7.7,4.68
11.99,11.96,11.97,7.7,4.8
11.96,11.93,11.94,7.7,4.82
11.93,11.9,11.92,7.7,4.87
11.9,11.87,11.89,7.7,4.91
11.87,11.84,11.86,7.7,5.05
11.84,11.81,11.83,7.7,5.1
11.81,11.78,11.8,7.7,5.23
11.78,11.75,11.77,7.7,5.37
11.75,11.71,11.73,7.7,5.52
11.71,11.68,11.69,7.7,5.65
11.68,11.64,11.66,7.7,5.8
11.64,11.6,11.62,7.7,6
11.59,11.54,11.56,7.7,6.1
11.57,11.53,11.54,7.7,6.3
11.54,11.5,11.52,7.7,6.46
11.52,11.46,11.49,7.7,6.52
11.5,11.42,11.46,7.7,6.65
11.48,11.4,11.44,7.7,6.68
11.46,11.37,11.41,7.7,6.98
11.42,11.31,11.36,7.7,7.03
11.4,11.27,11.32,7.7,7.16
11.37,11.23,11.3,7.7,7.24
11.35,11.2,11.28,7.7,7.45
11.31,11.1,11.21,7.7,7.75
11.25,10.79,11.1,7.7,8.12
11.1,10.45,10.71,7.7,8.57

发现一个好博客,不知道让不让转载,先贴一个,我加了几个核函数,代码是按照
Sequential Minimal Optimization: A Fast Algorithm for Training Support Vector Machines和统计学习方法的公式写的。

#include "pch.h"
#include "SVM.h"

using std::string;
using std::vector;
using std::pair;

double binaryPow(double a, long long int b) {
     
	double ans = 1;
	while (b > 0) {
     
		if (b & 1) {
     
			ans = ans * a;
		}
		a = a * a;
		b >>= 1;
	}
	return ans;
}

void SVM::getData(const string &filename) {
     
	//load data to a vector
	std::vector<double> temData;
	double onepoint;
	std::string line;
	inData.clear();
	std::ifstream infile(filename);
	std::cout << "reading ..." << std::endl;
	while (!infile.eof()) {
     
		temData.clear();
		std::getline(infile, line);
		if (line.empty())
			continue;
		std::stringstream stringin(line);
		while (stringin >> onepoint) {
     
			temData.push_back(onepoint);
		}
		indim = temData.size() - 1;
		inData.push_back(temData);
	}
	std::cout << "total data is " << inData.size() << std::endl;
}


void SVM::createTrainTest() {
     
	std::random_shuffle(inData.begin(), inData.end());
	unsigned long size = inData.size();
	unsigned long trainSize = size * divRate;
	std::cout << "total data is " << size << " ,train data has " << trainSize << std::endl;
	for (int i = 0; i < size; ++i) {
     
		if (i < trainSize)
			trainData.push_back(inData[i]);
		else
			testData.push_back(inData[i]);
	}
	//create feature for test,using trainData, testData
	for (const auto& data : trainData) {
     
		std::vector<double> trainf;
		trainf.assign(data.begin(), data.end() - 1);
		trainDataF.push_back(trainf);
		trainDataGT.push_back(*(data.end() - 1));
	}
	for (const auto& data : testData) {
     
		std::vector<double> testf;
		testf.assign(data.begin(), data.end() - 1);
		testDataF.push_back(testf);
		testDataGT.push_back(*(data.end() - 1));
	}
}


void SVM::SMO() {
     
	//this function reference the Platt J. Sequential minimal optimization: A fast algorithm for training support vector machines[J]. 1998.
	int numChanged = 0;
	int examineAll = 1;
	while (numChanged > 0 || examineAll) {
     
		numChanged = 0;
		if (examineAll) {
     
			for (int i = 0; i < trainDataF.size(); ++i)
				numChanged += SMOExamineExample(i);
		}
		else {
     
			for (int i = 0; i < trainDataF.size(); ++i) {
     
				if (alpha[i] != 0 && alpha[i] != C)
					numChanged += SMOExamineExample(i);
			}
		}
		if (examineAll == 1)
			examineAll = 0;
		else {
     
			if (numChanged == 0)
				examineAll = 1;
		}
	}
}



double SVM::kernel(vector<double> & x1, vector<double> & x2) {
     
	//here use linear kernel
	if (choModel == 1) {
     
		return x1 * x2;
	}
	else if (choModel == 2) {
     
		double ret = (x1-x2)*(x1-x2);
		double sigma = 10;
		ret = exp(-1.0*ret / (2 * sigma*sigma));
		return ret;
	}
	else if (choModel == 3) {
     
		int p = 3;//power
		double ret = binaryPow((x1 * x2),p);
		return ret;
	}
}


double SVM::computeE(int& i) {
     //P148 7.117
	double e = 0;
	for (int j = 0; j < trainDataF.size(); ++j) {
     
		e += alpha[j] * trainDataGT[j] * kernel(trainDataF[j], trainDataF[i]);
	}
	e += b;
	e -= trainDataGT[i];
	return e;
}


pair<double, double> SVM::SMOComputeOB(int& i1, int& i2, double&L, double& H) {
     //formula 19
	double y1 = trainDataGT[i1];
	double y2 = trainDataGT[i2];
	double s = y1 * y2;
	double f1 = y1 * (E[i1] + b) - alpha[i1] * kernel(trainDataF[i1], trainDataF[i1]) -
		s * alpha[i2] * kernel(trainDataF[i1], trainDataF[i2]);
	double f2 = y2 * (E[i2] + b) - s * alpha[i1] * kernel(trainDataF[i1], trainDataF[i2]) -
		alpha[i2] * kernel(trainDataF[i2], trainDataF[i2]);
	double L1 = alpha[i1] + s * (alpha[i2] - L);
	double H1 = alpha[i1] + s * (alpha[i2] - H);
	double obL = L1 * f1 + L * f2 + 0.5 * L1 * L1 * kernel(trainDataF[i1], trainDataF[i1]) +
		0.5 * L * L * kernel(trainDataF[i2], trainDataF[i2]) +
		s * L * L1 * kernel(trainDataF[i1], trainDataF[i2]);
	double obH = H1 * f1 + H * f2 + 0.5 * H1 * H1 * kernel(trainDataF[i1], trainDataF[i1]) +
		0.5 * H * H * kernel(trainDataF[i2], trainDataF[i2]) +
		s * H * H1 * kernel(trainDataF[i1], trainDataF[i2]);
	return std::make_pair(obL, obH);
}


int SVM::SMOTakeStep(int& i1, int& i2) {
     
	if (i1 == i2)
		return 0;
	double y1 = trainDataGT[i1];
	double y2 = trainDataGT[i2];
	double s = y1 * y2;
	double L, H;
	if (y1 != y2) {
     //formula 13
		L = (alpha[i1] - alpha[i2]) > 0 ? alpha[i1] - alpha[i2] : 0;
		H = (alpha[i1] - alpha[i2] + C) < C ? alpha[i1] - alpha[i2] + C : C;
	}
	else {
     //formula 14
		L = (alpha[i1] + alpha[i2] - C) > 0 ? alpha[i1] + alpha[i2] - C : 0;
		H = (alpha[i1] + alpha[i2]) < C ? alpha[i1] + alpha[i2] : C;
	}
	if (L == H)
		return 0;
	double k11 = kernel(trainDataF[i1], trainDataF[i1]);
	double k12 = kernel(trainDataF[i1], trainDataF[i2]);
	double k22 = kernel(trainDataF[i2], trainDataF[i2]);
	double eta = k11 + k22 - 2 * k12;//formula 15
	double a2;
	if (eta > 0) {
     
		a2 = alpha[i2] + y2 * (E[i1] - E[i2]) / eta;
		if (a2 < L)
			a2 = L;
		else {
     
			if (a2 > H)
				a2 = H;
		}
	}
	else {
     
		pair<double, double> ob = SMOComputeOB(i1, i2, L, H);
		double Lobj = ob.first;
		double Hobj = ob.second;
		if (Lobj < Hobj - eps)
			a2 = L;
		else {
     
			if (Lobj > Hobj + eps)
				a2 = H;
			else
				a2 = alpha[i2];
		}
	}
	if (std::abs(a2 - alpha[i2]) < eps * (a2 + alpha[i2] + eps))
		return 0;
	double a1 = alpha[i1] + s * (alpha[i2] - a2);
	double b1;
	//P148 7.115, not formula 20
	b1 = -E[i1] - y1 * (a1 - alpha[i1]) * kernel(trainDataF[i1], trainDataF[i1]) -
		y2 * (a2 - alpha[i2]) * kernel(trainDataF[i1], trainDataF[i2]) + b;
	double b2;
	b2 = -E[i2] - y1 * (a1 - alpha[i1]) * kernel(trainDataF[i1], trainDataF[i2]) -
		y2 * (a2 - alpha[i2]) * kernel(trainDataF[i2], trainDataF[i2]) + b;
	double bNew = (b1 + b2) / 2;
	b = bNew;
	//formula 22
	w = w + y1 * (a1 - alpha[i1]) * trainDataF[i1] + y2 * (a2 - alpha[i2]) *
		trainDataF[i2];
	alpha[i1] = a1;
	alpha[i2] = a2;

	E[i1] = computeE(i1);
	E[i2] = computeE(i2);
	return 1;
}

int SVM::SMOExamineExample(int i2) {
     
	double y2 = trainDataGT[i2];
	double alph2 = alpha[i2];
	double E2 = E[i2];
	double r2 = E2 * y2;
	if ((r2 < -tol && alph2 < C) || (r2 > tol && alph2 > 0)) {
     
		int alphNum = 0;
		for (auto& a : alpha) {
     
			if (a != 0 && a != C)
				alphNum++;
		}
		if (alphNum > 1) {
     //max E1-E2
			double dis = 0;
			int i1;
			for (int j = 0; j < E.size(); ++j) {
     
				if (std::abs(E[j] - E[i2]) > dis) {
     
					i1 = j;
					dis = std::abs(E[j] - E[i2]);
				}
			}
			if (SMOTakeStep(i1, i2))
				return 1;
		}
		for (int i = 0; i < alpha.size(); ++i) {
     
			if (alpha[i] != 0 && alpha[i] != C) {
     
				int i1 = i;
				if (SMOTakeStep(i1, i2))
					return 1;
			}
		}
		for (int i = 0; i < trainDataF.size(); ++i) {
     
			int i1 = i;
			if (SMOTakeStep(i1, i2))
				return 1;
		}

	}
	return 0;
}

void SVM::initialize() {
     
	b = 0;
	for (int i = 0; i < trainDataF.size(); ++i) {
     
		alpha.push_back(0.0);
	}
	for (int i = 0; i < indim; ++i) {
     
		w.push_back(0.0);
	}
	for (int i = 0; i < trainDataF.size(); ++i) {
     
		double e = computeE(i);
		E.push_back(e);
	}
}

void SVM::train() {
     
	initialize();
	SMO();
}

double SVM::predict(const vector<double> &inputData) {
     
	if (choModel == 1) {
     
		double p = w * inputData + b;
		if (p > 0)
			return 1.0;
		else
			return -1.0;
	}
	else if (choModel == 2) {
     
		double p = 0;
		vector<double> in = inputData;
		for (int i = 0; i < alpha.size(); i++) {
     
			p += alpha[i] * trainDataGT[i] * kernel(in,trainDataF[i]);
		}
		p += b;
		if (p > 1)
			return 1.0;
		else
			return -1.0;
	}
	else if (choModel == 3) {
     
		double p = 0;
		vector<double> in = inputData;
		for (int i = 0; i < alpha.size(); i++) {
     
			p += alpha[i] * trainDataGT[i] * kernel(in, trainDataF[i]);
		}
		p += b;
		if (p > 0)
			return 1.0;
		else
			return -1.0;
	}
}

void SVM::run() {
     
	getData("data.txt");
	createTrainTest();
	train();
	cout << "w and b is: " << endl;
	for (auto&c : w)
		cout << c << " ";
	cout << b << endl;
	for (int i = 0; i < testDataF.size(); ++i) {
     
		double pre = predict(testDataF[i]);
		cout << "the true class of this point is " << testDataGT[i];
		cout << ", the predict class of this point is " << pre << endl;

	}
}

int main() {
     
	Base* obj = new SVM();
	obj->run();
	return 0;
}

https://www.cnblogs.com/bobxxxl/p/10397557.html

你可能感兴趣的:(c++ SVR实现(草稿))