KNN近邻算法实现

# !/usr/bin/env python3
import matplotlib.pyplot as plt
import numpy as np
from math import sqrt
from collections import Counter
#随机生成10个元素的数组
x = np.array([0.19233859 ,0.18626021 ,0.34556073 ,0.39676747 ,0.53881673 ,0.41919451, 0.6852195 ,0.20445225, 0.87811744, 0.32738759]) * 10
X = x.reshape(10,1)
#随机生成10个不同元素的数组
y = np.array([0.62978754 ,0.93461034 ,0.56664329 ,0.99947942 ,0.59558408 ,0.84467021
 ,0.95667363 ,0.41492526, 0.47581411 ,0.12934113]) * 10
Y = y.reshape(10,1)
data = np.hstack((X,Y))
target = np.array([0 ,0 ,1 ,0 ,0 ,1 ,1, 0 ,1, 1])
result = np.array([4.823 ,7.09])
# plt.scatter(X,Y,c=target)
# plt.scatter(result[0],result[1],c='r')
# plt.show()
#  写法一
# distances = []
# for i in data:
#     distance = sqrt(np.sum((i - result) ** 2))
#     distances.append(distance)

distances = [ sqrt(np.sum((i - result) ** 2)) for i in data]
# 求出了距离
print(distances)
index = np.argsort(distances)
k = 3
print(index[:k])
top = [ target[i] for i in index[:k]]
# 投票
votes = Counter(top)
#根据第二个元素从大到小倒叙排序
print(votes.most_common()[0][0])
# y = np.random(0, 10, 100)
# plt.scatter(x,y)
# plt.show()

使用scikit-learn框架

from sklearn.neighbors import KNeighborsClassifier
import numpy as np
#随机生成10个元素的数组
x = np.array([0.19233859 ,0.18626021 ,0.34556073 ,0.39676747 ,0.53881673 ,0.41919451, 0.6852195 ,0.20445225, 0.87811744, 0.32738759]) * 10
X = x.reshape(10,1)
#随机生成10个不同元素的数组
y = np.array([0.62978754 ,0.93461034 ,0.56664329 ,0.99947942 ,0.59558408 ,0.84467021
 ,0.95667363 ,0.41492526, 0.47581411 ,0.12934113]) * 10
Y = y.reshape(10,1)
data = np.hstack((X,Y))
target = np.array([0 ,0 ,1 ,0 ,0 ,1 ,1, 0 ,1, 1])
result = np.array([4.823 ,7.09])
kNN_classifier = KNeighborsClassifier(n_neighbors=3)
# 训练
kNN_classifier.fit(data, target)

'''预测 预存数据需要传入一个矩阵'''
print(kNN_classifier.predict(result.reshape(1,2))[0])

文章出处登录后可见!

已经登录?立即刷新

共计人评分,平均

到目前为止还没有投票!成为第一位评论此文章。

(0)
扎眼的阳光的头像扎眼的阳光普通用户
上一篇 2022年6月9日 上午11:56
下一篇 2022年6月9日 上午11:59

相关推荐