最近邻(kNN,k-NearestNeighbor), 贝叶斯(Bayes), BPNN 人工神经网络(BPNN)算法之花的分类

package machineLearning;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;

public class KNN {
	//20 * 3 = 60 training samples of the begin of three classes
	public String[] trainingSample = new String[60];
	public  double[][] training = new double[60][5];
	//30 * 3 = 90 test samples of the end of the three classes
	public String[] testSample = new String[90];
	public double[][] test = new double[90][5];
	//cosine value for each test sample to training sample 
	public double[][] cosValue = new double[60][2];
	
	public void run() {
		//Get information from file
		int count = 0;
		int i = 0;
		int j = 0;
		String tempString;
		File file = new File("IrisData.txt");
		try {
			FileReader fileReader = new FileReader(file);
			BufferedReader bufferedReader = new BufferedReader(fileReader);
			while((tempString = bufferedReader.readLine()) != null) {
				if((count >= 0 &&count < 20) || (count >= 50 && count < 70) || (count >= 100 && count < 120)) {
					trainingSample[i++] = tempString;
				} else {
					testSample[j++] = tempString;
				}
				count++;
			}
			bufferedReader.close();
			fileReader.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
		//Change String[] to double[]
		for(i = 0; i < 60; i++) {
			training[i] = getValueByString(trainingSample[i]);
		}
		for(i = 0; i < 90; i++) {
			test[i] = getValueByString(testSample[i]);
			test[i][4] = 0;
		}
		KNNAlgorithm();
	}
	
	public void KNNAlgorithm() {
		for(int i = 0; i < 90; i++) {
			for(int j = 0; j < 60; j++) {
				cosValue[j][0] = getCosValue(training[j], test[i]);
				cosValue[j][1] = training[j][4];
			}
			//Sort
			double[] temp = new double[2];
			for(int m = 0; m < 60; m++) {
				for(int n = m; n < 60; n++) {
					if(cosValue[m][0] < cosValue[n][0]) {
						temp = cosValue[m];
						cosValue[m] = cosValue[n];
						cosValue[n] = temp;
					}
				}
			}
			//K = 11, select the top 11
			int a = 0;// present 1 as Iris-setosa
			int b = 0;//present 2 as Iris-versicolor
			int c = 0;//present 3 as Iris-virginica
			for(int k = 0; k < 11; k++) {
				if(cosValue[k][1] == 1) {
					a++;
				} else if(cosValue[k][1] == 2) {
					b++;
				} else if(cosValue[k][1] == 3) {
					c++;
				}
				if(a >= b && a >= c) {
					test[i][4] = 1;
				} else if(b >= a && b >= c) {
					test[i][4] = 2;
				} else if(c >= a && c >= b) {
					test[i][4] = 3;
				}
			}
		}
		//print the result
		int error = 0;
		for(int i = 0; i < 90; i++) {
			if(i == 0) {
				System.out.println("The following is Iris-setosa");
			} else if(i == 30) {
				System.out.println("The following is Iris-versicolor");
			} else if(i == 60) {
				System.out.println("The following is Iris-virginica");
			}
			System.out.print(test[i][0] + " , ");
			System.out.print(test[i][1] + " , ");
			System.out.print(test[i][2] + " , ");
			System.out.print(test[i][3] + " , ");
			if(test[i][4] == 1) {
				System.out.println("Iris-setosa");
			} else if(test[i][4] == 2) {
				System.out.println("Iris-versicolor");
			} else if(test[i][4] == 3) {
				System.out.println("Iris-virginica");
			}
			//System.out.println(test[i][4]);
			if(i < 30 && test[i][4] != 1) {
				error++;
			} else if(i >= 30 && i < 60 && test[i][4] != 2) {
				error++;
			} else if(i >= 60 && i < 90 && test[i][4] != 3) {
				error++;
			}
		}
		System.out.println("************The result of KNN************");
		System.out.println("The number of the test set is 90\r\nError is "+ error
				+"\r\nRight rate is " + (1 - error / (1.0 * 90)));
	}
	
	public double getCosValue(double[] d1, double[] d2) {
		double[] da = d1;
		double[] db = d2;
		double tempXY = 0;
		double tempX = 0;
		double tempY = 0;
		for(int i = 0; i < 4; i++) {
			tempXY += da[i] * db[i];
		}
		for(int i = 0; i < 4; i++) {
			tempX += da[i] * da[i];
			tempY += db[i] * db[i];
		}
		tempX = Math.sqrt(tempX);
		tempY = Math.sqrt(tempY);
		return tempXY / (tempX * tempY);
	}
	
	public double[] getValueByString(String sa) {
		String saString = sa;
		double[] value = new double[5];
		String[] stringFive = new String[5]; 
		stringFive = saString.split(",");
		for(int i = 0; i < 4; i++) {
			value[i] = Double.valueOf(stringFive[i]);
		}
		if(stringFive[4].equals("Iris-setosa")) {
			value[4] = 1;
		} else if(stringFive[4].equals("Iris-versicolor")) {
			value[4] = 2;
		} else if(stringFive[4].equals("Iris-virginica")) {
			value[4] = 3;
		}
		return value;
	}

}
package machineLearning;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;

public class Bayes {

	//20 * 3 = 60 training samples of the begin of three classes
	public String[] trainingSample = new String[60];
	public double[][] training = new double[60][5];
	//30 * 3 = 90 test samples of the end of the three classes
	public String[] testSample = new String[90];
	public double[][] test = new double[90][5];
	//average and variance for three classes
	public double[][] averAndVar_1 = new double[4][2];
	public double[][] averAndVar_2 = new double[4][2];
	public double[][] averAndVar_3 = new double[4][2];
	
	public void run() {
		//Get information from file
		int count = 0;
		int i = 0;
		int j = 0;
		String tempString;
		File file = new File("IrisData.txt");
		try {
			FileReader fileReader = new FileReader(file);
			BufferedReader bufferedReader = new BufferedReader(fileReader);
			while((tempString = bufferedReader.readLine()) != null) {
				if((count >= 0 &&count < 20) || (count >= 50 && count < 70) || (count >= 100 && count < 120)) {
					trainingSample[i++] = tempString;
				} else {
					testSample[j++] = tempString;
				}
				count++;
			}
			bufferedReader.close();
			fileReader.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
		//Change String[] to double[]
		for(i = 0; i < 60; i++) {
			training[i] = getValueByString(trainingSample[i]);
		}
		for(i = 0; i < 90; i++) {
			test[i] = getValueByString(testSample[i]);
			test[i][4] = 0;
		}
		//Statistics the normal distribution
		statistics();
		BayesAlgorithm();
	}
	
	public void statistics() {
		//Get the average
		for(int i = 0; i < 20; i++) {
			averAndVar_1[0][0] += training[i][0];
			averAndVar_1[1][0] += training[i][1];
			averAndVar_1[2][0] += training[i][2];
			averAndVar_1[3][0] += training[i][3];
		}
		for(int i = 20; i < 40; i++) {
			averAndVar_2[0][0] += training[i][0];
			averAndVar_2[1][0] += training[i][1];
			averAndVar_2[2][0] += training[i][2];
			averAndVar_2[3][0] += training[i][3];
		}
		for(int i = 40; i < 60; i++) {
			averAndVar_3[0][0] += training[i][0];
			averAndVar_3[1][0] += training[i][1];
			averAndVar_3[2][0] += training[i][2];
			averAndVar_3[3][0] += training[i][3];
		}
		for(int i = 0; i < 4; i++) {
			averAndVar_1[i][0] = averAndVar_1[i][0] / 20.0;
			averAndVar_2[i][0] = averAndVar_2[i][0] / 20.0;
			averAndVar_3[i][0] = averAndVar_3[i][0] / 20.0;
		}
		//Get the variance
		for(int i = 0; i < 20; i++) {
			averAndVar_1[0][1] += (training[i][0] - averAndVar_1[0][0]) * (training[i][0] - averAndVar_1[0][0]);
			averAndVar_1[1][1] += (training[i][1] - averAndVar_1[1][0]) * (training[i][1] - averAndVar_1[1][0]);
			averAndVar_1[2][1] += (training[i][2] - averAndVar_1[2][0]) * (training[i][2] - averAndVar_1[2][0]);
			averAndVar_1[3][1] += (training[i][3] - averAndVar_1[3][0]) * (training[i][3] - averAndVar_1[3][0]);
		}
		for(int i = 20; i < 40; i++) {
			averAndVar_2[0][1] += (training[i][0] - averAndVar_2[0][0]) * (training[i][0] - averAndVar_2[0][0]);
			averAndVar_2[1][1] += (training[i][1] - averAndVar_2[1][0]) * (training[i][1] - averAndVar_2[1][0]);
			averAndVar_2[2][1] += (training[i][2] - averAndVar_2[2][0]) * (training[i][2] - averAndVar_2[2][0]);
			averAndVar_2[3][1] += (training[i][3] - averAndVar_2[3][0]) * (training[i][3] - averAndVar_2[3][0]);
		}
		for(int i = 40; i < 60; i++) {
			averAndVar_3[0][1] += (training[i][0] - averAndVar_3[0][0]) * (training[i][0] - averAndVar_3[0][0]);
			averAndVar_3[1][1] += (training[i][1] - averAndVar_3[1][0]) * (training[i][1] - averAndVar_3[1][0]);
			averAndVar_3[2][1] += (training[i][2] - averAndVar_3[2][0]) * (training[i][2] - averAndVar_3[2][0]);
			averAndVar_3[3][1] += (training[i][3] - averAndVar_3[3][0]) * (training[i][3] - averAndVar_3[3][0]);
		}
		for(int i = 0; i < 4; i++) {
			averAndVar_1[i][1] = averAndVar_1[i][1] / 20.0;
			averAndVar_2[i][1] = averAndVar_2[i][1] / 20.0;
			averAndVar_3[i][1] = averAndVar_3[i][1] / 20.0;
		}
	}
	//Get the probability by normal distribution function	
	public double getNormalValue(double aver, double var, double x1) {
		double x = x1;
		double average = aver;
		double variance = var;
		double part1 = 1.0 / (Math.sqrt(2 * Math.PI * variance));
		double part2 = (- 1.0) * (x - average) * (x - average) / (2 * variance);
		return part1 * Math.pow(Math.E, part2);
	}
	//Get the each total probability for one class
	public double getProbability(double[][] distribution, double[] test1) {
		double[][] averAndVar = new double[4][2];
		averAndVar = distribution;
		double[] test = new double[5];
		test = test1;
		double result = 1.0;
		for(int i = 0; i < 4; i++) {
			result = result * getNormalValue(averAndVar[i][0], averAndVar[i][1], test[i]);
		}
		return result;
	}

	public void BayesAlgorithm() {
		double a;// present 1 as Iris-setosa
		double b;// present 2 as Iris-versicolor
		double c;// present 3 as Iris-virginica
		int error = 0;
		for (int i = 0; i < 90; i++) {
			a = getProbability(averAndVar_1, test[i]);
			b = getProbability(averAndVar_2, test[i]);
			c = getProbability(averAndVar_3, test[i]);

			if (i == 0) {
				System.out.println("The following is Iris-setosa");
			} else if (i == 30) {
				System.out.println("The following is Iris-versicolor");
			} else if (i == 60) {
				System.out.println("The following is Iris-virginica");
			}
			System.out.print(test[i][0] + " , ");
			System.out.print(test[i][1] + " , ");
			System.out.print(test[i][2] + " , ");
			System.out.print(test[i][3] + " , ");
			if (a >= b && a >= c) {
				System.out.println("Iris-setosa");
				if(i >= 30) {
					error++;
				}
			} else if(b >= a && b >= c) {
				System.out.println("Iris-versicolor");
				if(i < 30 || i >= 60) {
					error++;
				}
			} else if(c >= a && c >= b) {
				System.out.println("Iris-virginica");
				if(i < 60) {
					error++;
				}
			}
		}
		System.out.println("************The result of Bayes ***********");
		System.out.println("The number of the test set is 90\r\nError is "
				+ error + "\r\nRight rate is " + (1 - error / (1.0 * 90)));
	}
	
	public double[] getValueByString(String sa) {
		String saString = sa;
		double[] value = new double[5];
		String[] stringFive = new String[5]; 
		stringFive = saString.split(",");
		for(int i = 0; i < 4; i++) {
			value[i] = Double.valueOf(stringFive[i]);
		}
		if(stringFive[4].equals("Iris-setosa")) {
			value[4] = 1;
		} else if(stringFive[4].equals("Iris-versicolor")) {
			value[4] = 2;
		} else if(stringFive[4].equals("Iris-virginica")) {
			value[4] = 3;
		}
		return value;
	}

}


 
  


package machineLearning;

import java.io.File;
import java.io.FileNotFoundException;
import java.util.HashMap;
import java.util.Scanner;

public class BPNN {
	
	int inputnodenum=4;
	int hidennodenum=3;
	int outputnodenum=3;
	int maxstep=5000;
	int step;
	double theta=0.5;
	double lambda=1.0;
	double alpha=0.01;
	double eta=0.5;
	double v[][];
	double w[][];
	double inputdata[];
	double hidendata[];
	double outputdata[];
	double rightoutput[];
	double deltav[][][];
	double deltaw[][][];
	HashMap map;
	String [] classname;
	
	void init()
	{
		v=new double[inputnodenum][hidennodenum];
		w=new double[hidennodenum][outputnodenum];
		deltav=new double[maxstep+1][inputnodenum][hidennodenum];//0
		deltaw=new double[maxstep+1][hidennodenum][outputnodenum];
		inputdata=new double[inputnodenum];
		hidendata=new double[hidennodenum];
		outputdata=new double[outputnodenum];
		rightoutput=new double[outputnodenum];
		map=new HashMap();
		classname=new String[]{"Iris-setosa","Iris-versicolor","Iris-virginica"};
		for(int i=0;i<3;++i)
			map.put(classname[i], i);
		for(int i=0;i

package machineLearning;

import java.util.Scanner;

public class BootStrap {

	public static void main(String[] args) {
		while(true) {
			System.out.println("Please input number 1, 2, 3\r\n"
					+ "\"1 \"present KNN Algorithm\r\n"
					+ "\"2 \"present Bayes Algorithm\r\n"
					+ "\"3 \"present BPNN Algorithm\r\n"
					+ "\"4 \"Exit System\r\n");
			int i = 0;
			@SuppressWarnings("resource")
			Scanner in = new Scanner(System.in);
			i = in.nextInt();
			System.out.println("i = " + i);
			switch(i) {
			case 1: {
				KNN knn = new KNN();
				knn.run();
				break;
			}
			case 2: {
				Bayes bayes = new Bayes();
				bayes.run();
				break;
			}
			case 3: {
				BPNN bpnn = new BPNN();
				bpnn.run();
				break;
			}
			case 4: {
				System.exit(1);
			}
			default : {
				System.out.println("Error Input for i = " + i);
				break;
			}
			}
		}
	}
}
实验使用的数据。txt
5.1,3.5,1.4,0.2,Iris-setosa
4.9,3.0,1.4,0.2,Iris-setosa
4.7,3.2,1.3,0.2,Iris-setosa
4.6,3.1,1.5,0.2,Iris-setosa
5.0,3.6,1.4,0.2,Iris-setosa
5.4,3.9,1.7,0.4,Iris-setosa
4.6,3.4,1.4,0.3,Iris-setosa
5.0,3.4,1.5,0.2,Iris-setosa
4.4,2.9,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.4,3.7,1.5,0.2,Iris-setosa
4.8,3.4,1.6,0.2,Iris-setosa
4.8,3.0,1.4,0.1,Iris-setosa
4.3,3.0,1.1,0.1,Iris-setosa
5.8,4.0,1.2,0.2,Iris-setosa
5.7,4.4,1.5,0.4,Iris-setosa
5.4,3.9,1.3,0.4,Iris-setosa
5.1,3.5,1.4,0.3,Iris-setosa
5.7,3.8,1.7,0.3,Iris-setosa
5.1,3.8,1.5,0.3,Iris-setosa
5.4,3.4,1.7,0.2,Iris-setosa
5.1,3.7,1.5,0.4,Iris-setosa
4.6,3.6,1.0,0.2,Iris-setosa
5.1,3.3,1.7,0.5,Iris-setosa
4.8,3.4,1.9,0.2,Iris-setosa
5.0,3.0,1.6,0.2,Iris-setosa
5.0,3.4,1.6,0.4,Iris-setosa
5.2,3.5,1.5,0.2,Iris-setosa
5.2,3.4,1.4,0.2,Iris-setosa
4.7,3.2,1.6,0.2,Iris-setosa
4.8,3.1,1.6,0.2,Iris-setosa
5.4,3.4,1.5,0.4,Iris-setosa
5.2,4.1,1.5,0.1,Iris-setosa
5.5,4.2,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.0,3.2,1.2,0.2,Iris-setosa
5.5,3.5,1.3,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
4.4,3.0,1.3,0.2,Iris-setosa
5.1,3.4,1.5,0.2,Iris-setosa
5.0,3.5,1.3,0.3,Iris-setosa
4.5,2.3,1.3,0.3,Iris-setosa
4.4,3.2,1.3,0.2,Iris-setosa
5.0,3.5,1.6,0.6,Iris-setosa
5.1,3.8,1.9,0.4,Iris-setosa
4.8,3.0,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.3,3.7,1.5,0.2,Iris-setosa
5.0,3.3,1.4,0.2,Iris-setosa
7.0,3.2,4.7,1.4,Iris-versicolor
6.4,3.2,4.5,1.5,Iris-versicolor
6.9,3.1,4.9,1.5,Iris-versicolor
5.5,2.3,4.0,1.3,Iris-versicolor
6.5,2.8,4.6,1.5,Iris-versicolor
5.7,2.8,4.5,1.3,Iris-versicolor
6.3,3.3,4.7,1.6,Iris-versicolor
4.9,2.4,3.3,1.0,Iris-versicolor
6.6,2.9,4.6,1.3,Iris-versicolor
5.2,2.7,3.9,1.4,Iris-versicolor
5.0,2.0,3.5,1.0,Iris-versicolor
5.9,3.0,4.2,1.5,Iris-versicolor
6.0,2.2,4.0,1.0,Iris-versicolor
6.1,2.9,4.7,1.4,Iris-versicolor
5.6,2.9,3.6,1.3,Iris-versicolor
6.7,3.1,4.4,1.4,Iris-versicolor
5.6,3.0,4.5,1.5,Iris-versicolor
5.8,2.7,4.1,1.0,Iris-versicolor
6.2,2.2,4.5,1.5,Iris-versicolor
5.6,2.5,3.9,1.1,Iris-versicolor
5.9,3.2,4.8,1.8,Iris-versicolor
6.1,2.8,4.0,1.3,Iris-versicolor
6.3,2.5,4.9,1.5,Iris-versicolor
6.1,2.8,4.7,1.2,Iris-versicolor
6.4,2.9,4.3,1.3,Iris-versicolor
6.6,3.0,4.4,1.4,Iris-versicolor
6.8,2.8,4.8,1.4,Iris-versicolor
6.7,3.0,5.0,1.7,Iris-versicolor
6.0,2.9,4.5,1.5,Iris-versicolor
5.7,2.6,3.5,1.0,Iris-versicolor
5.5,2.4,3.8,1.1,Iris-versicolor
5.5,2.4,3.7,1.0,Iris-versicolor
5.8,2.7,3.9,1.2,Iris-versicolor
6.0,2.7,5.1,1.6,Iris-versicolor
5.4,3.0,4.5,1.5,Iris-versicolor
6.0,3.4,4.5,1.6,Iris-versicolor
6.7,3.1,4.7,1.5,Iris-versicolor
6.3,2.3,4.4,1.3,Iris-versicolor
5.6,3.0,4.1,1.3,Iris-versicolor
5.5,2.5,4.0,1.3,Iris-versicolor
5.5,2.6,4.4,1.2,Iris-versicolor
6.1,3.0,4.6,1.4,Iris-versicolor
5.8,2.6,4.0,1.2,Iris-versicolor
5.0,2.3,3.3,1.0,Iris-versicolor
5.6,2.7,4.2,1.3,Iris-versicolor
5.7,3.0,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
5.1,2.5,3.0,1.1,Iris-versicolor
5.7,2.8,4.1,1.3,Iris-versicolor
6.3,3.3,6.0,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3.0,5.9,2.1,Iris-virginica
6.3,2.9,5.6,1.8,Iris-virginica
6.5,3.0,5.8,2.2,Iris-virginica
7.6,3.0,6.6,2.1,Iris-virginica
4.9,2.5,4.5,1.7,Iris-virginica
7.3,2.9,6.3,1.8,Iris-virginica
6.7,2.5,5.8,1.8,Iris-virginica
7.2,3.6,6.1,2.5,Iris-virginica
6.5,3.2,5.1,2.0,Iris-virginica
6.4,2.7,5.3,1.9,Iris-virginica
6.8,3.0,5.5,2.1,Iris-virginica
5.7,2.5,5.0,2.0,Iris-virginica
5.8,2.8,5.1,2.4,Iris-virginica
6.4,3.2,5.3,2.3,Iris-virginica
6.5,3.0,5.5,1.8,Iris-virginica
7.7,3.8,6.7,2.2,Iris-virginica
7.7,2.6,6.9,2.3,Iris-virginica
6.0,2.2,5.0,1.5,Iris-virginica
6.9,3.2,5.7,2.3,Iris-virginica
5.6,2.8,4.9,2.0,Iris-virginica
7.7,2.8,6.7,2.0,Iris-virginica
6.3,2.7,4.9,1.8,Iris-virginica
6.7,3.3,5.7,2.1,Iris-virginica
7.2,3.2,6.0,1.8,Iris-virginica
6.2,2.8,4.8,1.8,Iris-virginica
6.1,3.0,4.9,1.8,Iris-virginica
6.4,2.8,5.6,2.1,Iris-virginica
7.2,3.0,5.8,1.6,Iris-virginica
7.4,2.8,6.1,1.9,Iris-virginica
7.9,3.8,6.4,2.0,Iris-virginica
6.4,2.8,5.6,2.2,Iris-virginica
6.3,2.8,5.1,1.5,Iris-virginica
6.1,2.6,5.6,1.4,Iris-virginica
7.7,3.0,6.1,2.3,Iris-virginica
6.3,3.4,5.6,2.4,Iris-virginica
6.4,3.1,5.5,1.8,Iris-virginica
6.0,3.0,4.8,1.8,Iris-virginica
6.9,3.1,5.4,2.1,Iris-virginica
6.7,3.1,5.6,2.4,Iris-virginica
6.9,3.1,5.1,2.3,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
6.8,3.2,5.9,2.3,Iris-virginica
6.7,3.3,5.7,2.5,Iris-virginica
6.7,3.0,5.2,2.3,Iris-virginica
6.3,2.5,5.0,1.9,Iris-virginica
6.5,3.0,5.2,2.0,Iris-virginica
6.2,3.4,5.4,2.3,Iris-virginica
5.9,3.0,5.1,1.8,Iris-virginica


 
  

你可能感兴趣的:(Java,机器学习)