-
Notifications
You must be signed in to change notification settings - Fork 0
/
knnclassifier.py
51 lines (36 loc) · 1.27 KB
/
knnclassifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import numpy as np
from scipy import stats
import innerproduct
import l2distance
def findknn(xTr,xTe,k):
"""
function [indices,dists]=findknn(xTr,xTe,k);
Finds the k nearest neighbors of xTe in xTr.
Input:
xTr = nxd input matrix with n row-vectors of dimensionality d
xTe = mxd input matrix with m row-vectors of dimensionality d
k = number of nearest neighbors to be found
Output:
I = kxm matrix, where indices(i,j) is the i^th nearest neighbor of xTe(j,:)
D = Euclidean distances to the respective nearest neighbors
"""
D = l2distance(xTe, xTr)
I = np.argsort(D, axis=1)[:, :k].T
D = np.sort(D, axis=1)[:, :k].T
return I, D
def knnclassifier(xTr,yTr,xTe,k):
"""
function preds=knnclassifier(xTr,yTr,xTe,k);
k-nn classifier
Input:
xTr = nxd input matrix with n row-vectors of dimensionality d
xTe = mxd input matrix with m row-vectors of dimensionality d
k = number of nearest neighbors to be found
Output:
preds = predicted labels, ie preds(i) is the predicted label of xTe(i,:)
"""
m = xTe.shape[0]
indices, _ = findknn(xTr, xTe, k)
labels = yTr[indices]
preds = stats.mode(labels)[0].reshape((m, 1))
return preds