-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathknnClassifier.m
More file actions
69 lines (68 loc) · 3.14 KB
/
knnClassifier.m
File metadata and controls
69 lines (68 loc) · 3.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
function [trainedClassifier predictedClassesTest F ACC] = knnClassifier(features,group,sortedFeatureIndices,bestN,train,test,crossValidationMethod)
%KNNClassifier function trains naive bayesian classifier based on
%K-fold or leave one out cross validation.
% [trainedClassifier predictedClassesTest F
% ACC]=naiveBayesianClassifier(features,group,sortedFeatureIndices,bestN,
% crossValidationIndices,crossValidationMethod)
% The CopyRight is reserved by the author.
% By Seyed Mostafa Kia, January, 2013
trainedClassifier = [];
sampleNum = size(group,1);
features = features';
c{1} = group{1,1};
for i = 2 : sampleNum
if ~strcmp(group{i,1},c{1})
c{2} = group{i,1};
break;
end
end
switch crossValidationMethod
case 1
foldNum = length(train);
trainedClassifier = cell(1,foldNum);
predictedClassesTest = cell(foldNum,1);
testResult = cell(1,foldNum);
h = waitbar(0,'Please wait...');
for i = 1 : foldNum
predictedClassesTest{i,1} = knnclassify(features(test{i},sortedFeatureIndices{i}(1:bestN{i})), features(train{i},sortedFeatureIndices{i}(1:bestN{i})), group(train{i}), 1, 'euclidean');
testResult{i} = strcmp(predictedClassesTest{i,1},group(test{i}));
ACC(i) = sum(testResult{i})/size(testResult{i},1);
[F1 F2] = Fmeasure (group(test{i}),predictedClassesTest{i},c);
F(i) = (F1 + F2)/2;
waitbar(i/foldNum);
end
case 2
foldNum = length(train);
trainedClassifier = cell(1,foldNum);
predictedClassesTest = cell(foldNum,1);
testResult = cell(1,foldNum);
h = waitbar(0,'Please wait...');
for i = 1 : foldNum
%test = (crossValidationIndices == i); train = ~test;
trainSet = features(train{i},sortedFeatureIndices{i}(1:bestN{i}));
testSet = features(test{i},sortedFeatureIndices{i}(1:bestN{i}));
trainedClassifier{i} = NaiveBayes.fit(trainSet,group(train{i}));
predictedClassesTest{i} = trainedClassifier{i}.predict(testSet);
predictedClassesTest{i} = knnclassify(testSet, trainSet, group(train{i}), 1, 'euclidean');
testResult{i} = strcmp(predictedClassesTest{i},group(test{i}));
waitbar(i/foldNum);
end
ACC = sum(cell2mat(testResult))/size(testResult,2);
[F1 F2] = Fmeasure (group,predictedClassesTest,c);
F = (F1 + F2)/2;
case 3
foldNum = length(train);
trainedClassifier = cell(1,foldNum);
predictedClassesTest = cell(foldNum,1);
testResult = cell(1,foldNum);
h = waitbar(0,'Please wait...');
for i = 1 : foldNum
predictedClassesTest{i,1} = knnclassify(features(test{i},sortedFeatureIndices{i}(1:bestN{i})), features(train{i},sortedFeatureIndices{i}(1:bestN{i})), group(train{i}), 1, 'euclidean');
testResult{i} = strcmp(predictedClassesTest{i,1},group(test{i}));
ACC(i) = sum(testResult{i})/size(testResult{i},1);
[F1 F2] = Fmeasure (group(test{i}),predictedClassesTest{i},c);
F(i) = (F1 + F2)/2;
waitbar(i/foldNum);
end
end
delete(h);