日撸代码300行:第53天(kNN分类器续)

	/**
	 * **************************************************************
	 * Weighted voting
	 * 
	 * @param paraNeighbors  The number of neighbors.
	 * @param paraWeightMeasure  The measure of weight.
	 * @return 
	 * ************************************************************* 
	 */
	public int weightedVoting(int[] paraNeighbors, int paraWeightMeasure) {
		double[] tempVotes = new double[dataset.numClasses()];
		switch (paraWeightMeasure) {
		case 0:
			for (int i = 0; i < tempVotes.length; i++) {
				tempVotes[(int)dataset.instance(paraNeighbors[i]).classValue()] += 1/distanceAfterSorting[i];
			}//of for i
			break;
		case 1:
			for (int i = 0; i < tempVotes.length; i++) {
				tempVotes[(int)dataset.instance(paraNeighbors[i]).classValue()] += (100/(i + 1))/distanceAfterSorting[i];
			}//of for i
			break;
		case 2:
			for (int i = 0; i < paraNeighbors.length; i++) {
				tempVotes[(int)dataset.instance(paraNeighbors[i]).classValue()]++;
			}//of for i
			break;
		default:
			break;
		}

		System.out.println("tempVotes = " + Arrays.toString(tempVotes));
		int tempMaximalVotingIndex = 0;
		double tempMaximalVoting = 0;
		for (int i = 0; i < tempVotes.length; i++) {
			if (tempVotes[i] > tempMaximalVoting) {
				tempMaximalVoting = tempVotes[i];
				tempMaximalVotingIndex = i;
			}//of if
		}//of for i
		return tempMaximalVotingIndex;
	}//of weightedVoting

加上原有的距离计算,共三种加权方式。

	/**
	 * *****************************************************************
	 * Leave one out.
	 * 
	 * @param paraNumNeighbors  The number of Neighbors.
	 * @param paraDistanceMeasure  The measure of distance.
	 * @param paraWeightMeasure   The measure of weight.
	 * *****************************************************************
	 */
	public void leaveOneOutSplitTrainingTesting(int paraIndex) {
		int tempSize = dataset.numInstances();
		int tempTrainingSize = tempSize - 1;
		
		trainingSet = new int[tempTrainingSize];
		testingSet = new int[tempSize - tempTrainingSize];
		
		int tempTrainingIndex = 0;
		for (int i = 0; i < tempTrainingSize; i++) {
			if (i != paraIndex) {
				trainingSet[tempTrainingIndex ++] = i;
			}else {
				testingSet[0] = i;
			}//of if
		}//of for i
		
	}//of leaveOneOut
	public static void main(String args[]) {
		KnnClassification tempClassifier = new KnnClassification("E:/Datasets/iris.arff");
		//System.out.println(tempClassifier.dataset);
//		tempClassifier.splitTrainingTesting(0.5);
		predictions = new int[dataset.numInstances() -1];
		int numNeighbors = tempClassifier.setNumNeighbors(3);
		for (int i = 0; i < dataset.numInstances() - 1; i++) {
			tempClassifier.leaveOneOutSplitTrainingTesting(i);
			 
			predictions[i] = tempClassifier.predict(i,numNeighbors,1,2);
			
		}//of for i
		System.out.println("The array of predictions: " + Arrays.toString(predictions));
		System.out.println("The accuracy of the classifier is: " + tempClassifier.getAccuracy());
	}//of main

你可能感兴趣的:(java,算法,leetcode)