k-近邻算法

算法实现:python

knn.py

#!/usr/bin/python
# code by dengzp@time:2020
# coding=utf-8
#################################
# knn代码实现

from numpy import *
import operator


# 创建一个数据集,包含2个类别共四个样本
def creatDatabase():
    group = array([[1.0, 0.9], [1.0, 1.0], [0.1, 0.2], [0.0, 0.1]])
    # 4个样本的分别所属类别
    labels = ['A', 'A', 'B', 'B']
    return group, labels


def kNNClassify(newInput, dataSet, labels, k):
    numSamples = dataSet.shape[0]
    diff = tile(newInput, (numSamples, 1)) - dataSet
    squareDiff = diff ** 2
    squareDist = sum(squareDiff, axis=1)
    distance = squareDist ** 0.5

    sortedDistIndices = argsort(distance)
    classCount = {}
    for i in range(k):
        voteLabel = labels[sortedDistIndices[i]]
        classCount[voteLabel] = classCount.get(voteLabel, 0) + 1
    maxCount = 0
    for key, value in classCount.items():
        if value > maxCount:
            maxCount = value
            maxIndex = key
    return maxIndex

test.py

# !/user/bin/python
# coding=utf-8

import knn
from numpy import *

dataSet, labels = knn.creatDatabase()

testX = array([1.2, 1.0])
k = 3

outputLabel = knn.kNNClassify(testX, dataSet, labels, 3)
print("Your input data is :", testX, " and the classified to class: ", outputLabel)

你可能感兴趣的:(k-近邻算法)