- 阅读权限
- 255
- 威望
- 0 级
- 论坛币
- 50278 个
- 通用积分
- 83.5106
- 学术水平
- 253 点
- 热心指数
- 300 点
- 信用等级
- 208 点
- 经验
- 41518 点
- 帖子
- 3256
- 精华
- 14
- 在线时间
- 766 小时
- 注册时间
- 2006-5-4
- 最后登录
- 2022-11-6
|
Nearest Neighbors Classification using Matlab
- close all; clc; clear;
- rand('seed',0);
- randn('seed',0);
- nTraining = 50;
- nTesting = 50;
- % Generate training data like for Problem 2.12:
- %
- mu1 = [ 1, 1 ].';
- mu2 = [ 1.5, 1.5 ].';
- sigmasSquared = 0.2;
- d = size(mu1,1);
- nFeats = nTraining;
- X1 = mvnrnd( mu1, sigmasSquared*eye(d), nFeats );
- X2 = mvnrnd( mu2, sigmasSquared*eye(d), nFeats );
- if( 0 )
- h1 = plot( X1(:,1), X1(:,2), '.b' ); hold on;
- h2 = plot( X2(:,1), X2(:,2), '.r' ); hold on;
- legend( [h1,h2], {'class 1', 'class 2'} );
- end
- X_train = [ X1; X2 ];
- labels_train = [ ones(nFeats,1); 2*ones(nFeats,1) ];
- % Generate 100 new points from each class to classify:
- %
- nFeats = nTesting;
- X1 = mvnrnd( mu1, sigmasSquared*eye(d), nFeats );
- X2 = mvnrnd( mu2, sigmasSquared*eye(d), nFeats );
- X_test = [ X1; X2 ];
- labels_test = [ ones(nFeats,1); 2*ones(nFeats,1) ];
- % Classify each of the vectors in X_test using the NN and 3NN rules
- %
- addpath('../../../Duda_Hart_Stork/BookSupplements/ClassificationToolbox/Src');
- for nni = 1:11
- test_targets = Nearest_Neighbor( X_train.', labels_train, X_test.', nni);
- P_NN_error = sum( test_targets(:) ~= labels_test(:) )/length(test_targets);
- fprintf('P_e %2dNN= %10.6f; \n',nni,P_NN_error);
- end
- % Calculate the optimal Bayes error rate (using the results from Problem~2.9):
- %
- addpath('../../../Duda_Hart_Stork/Code/Chapter2/ComputerExercises');
- dm = mahalanobis(mu1,mu2,sigmasSquared*eye(d));
- P_B = 1 - normcdf( 0.5 * dm );
- fprintf('P_B= %10.6f\n',P_B);
复制代码
|
|