[LON-CAPA-cvs] cvs: modules /minaeibi c183_10way_9Class.m

minaeibi lon-capa-cvs@mail.lon-capa.org
Tue, 03 Sep 2002 13:17:25 -0000


This is a MIME encoded message

--minaeibi1031059045
Content-Type: text/plain

minaeibi		Tue Sep  3 09:17:25 2002 EDT

  Added files:                 
    /modules/minaeibi	c183_10way_9Class.m 
  Log:
  9-classes matlab code non-tree classifiers
  
  
--minaeibi1031059045
Content-Type: text/plain
Content-Disposition: attachment; filename="minaeibi-20020903091725.txt"


Index: modules/minaeibi/c183_10way_9Class.m
+++ modules/minaeibi/c183_10way_9Class.m
%This program compares the error rates of PHY183 data by using Byaes, 1NN, Knn, MLP, and Parzen Classifiers.
close all;
clear all;

folder=10;
l_col=9;
ClassNo=9;
load f183.txt;

dataf=f183;
k1=find(dataf(:,l_col)==1);
k2=find(dataf(:,l_col)==2);
k3=find(dataf(:,l_col)==3);
k4=find(dataf(:,l_col)==4);
k5=find(dataf(:,l_col)==5);
k6=find(dataf(:,l_col)==6);
k7=find(dataf(:,l_col)==7);
k8=find(dataf(:,l_col)==8);
k9=find(dataf(:,l_col)==9);
First_Column=1;
Second_Column=6;
Feature_no=Second_Column-First_Column+1;
k_knn=3;%floor(sqrt(Feature_no));

B=[];
nc1=size(k1,1);
nc2=size(k2,1);
nc3=size(k3,1);
nc4=size(k4,1);
nc5=size(k5,1);
nc6=size(k6,1);
nc7=size(k7,1);
nc8=size(k8,1);
nc9=size(k9,1);

for i=1:nc1
    B=[B;dataf(k1(i),:)]; 
end
for i=1:nc2
    B=[B;dataf(k2(i),:)];
end
for i=1:nc3
    B=[B;dataf(k3(i),:)];
end
for i=1:nc4
    B=[B;dataf(k4(i),:)];
end
for i=1:nc5
    B=[B;dataf(k5(i),:)];
end
for i=1:nc6
    B=[B;dataf(k6(i),:)];
end
for i=1:nc7
    B=[B;dataf(k7(i),:)];
end
for i=1:nc8
    B=[B;dataf(k8(i),:)];
end
for i=1:nc9
    B=[B;dataf(k9(i),:)];
end

index1=randperm(nc1);
index2=randperm(nc2)+nc1;
index3=randperm(nc3)+nc1+nc2;
index4=randperm(nc4)+nc1+nc2+nc3;
index5=randperm(nc5)+nc1+nc2+nc3+nc4;
index6=randperm(nc6)+nc1+nc2+nc3+nc4+nc5;
index7=randperm(nc7)+nc1+nc2+nc3+nc4+nc5+nc6;
index8=randperm(nc8)+nc1+nc2+nc3+nc4+nc5+nc6+nc7;
index9=randperm(nc1)+nc1+nc2+nc3+nc4+nc5+nc6+nc7+nc8;

nn1=floor(nc1/folder);
nn_1=nc1-nn1;
nn2=floor(nc2/folder);
nn_2=nc2-nn2;
nn3=floor(nc3/folder);
nn_3=nc3-nn3;
nn4=floor(nc4/folder);
nn_4=nc4-nn4;
nn5=floor(nc5/folder);
nn_5=nc5-nn5;
nn6=floor(nc6/folder);
nn_6=nc6-nn6;
nn7=floor(nc7/folder);
nn_7=nc7-nn7;
nn8=floor(nc8/folder);
nn_8=nc8-nn8;
nn9=floor(nc9/folder);
nn_9=nc9-nn9;

nn_test=nn1+nn2+nn3+nn4+nn5+nn6+nn7+nn8+nn9;
nn_train=nn_1+nn_2+nn_3+nn_4+nn_5+nn_6+nn_7+nn_8+nn_9;

lab_test = [ones(1,nn1) ones(1,nn2)*2 ones(1,nn3)*3 ones(1,nn4)*4 ones(1,nn5)*5 ones(1,nn6)*6 ones(1,nn7)*7 ones(1,nn8)*8 ones(1,nn9)*9];
lab_train = [ones(1,nn_1) ones(1,nn_2)*2 ones(1,nn_3)*3 ones(1,nn_4)*4 ones(1,nn_5)*5 ones(1,nn_6)*6 ones(1,nn_7)*7 ones(1,nn_8)*8 ones(1,nn_9)*9];

target_train = [
                repmat([1 0 0 0 0 0 0 0 0],nn_1,1);
                repmat([0 1 0 0 0 0 0 0 0],nn_2,1); 
                repmat([0 0 1 0 0 0 0 0 0],nn_3,1);
                repmat([0 0 0 1 0 0 0 0 0],nn_4,1);
                repmat([0 0 0 0 1 0 0 0 0],nn_5,1);
                repmat([0 0 0 0 0 1 0 0 0],nn_6,1);
                repmat([0 0 0 0 0 0 1 0 0],nn_7,1);
                repmat([0 0 0 0 0 0 0 1 0],nn_8,1);
                repmat([0 0 0 0 0 0 0 0 1],nn_9,1);
              ];

target_test = [
                repmat([1 0 0 0 0 0 0 0 0],nn1,1);
                repmat([0 1 0 0 0 0 0 0 0],nn2,1); 
                repmat([0 0 1 0 0 0 0 0 0],nn3,1);
                repmat([0 0 0 1 0 0 0 0 0],nn4,1);
                repmat([0 0 0 0 1 0 0 0 0],nn5,1);
                repmat([0 0 0 0 0 1 0 0 0],nn6,1);
                repmat([0 0 0 0 0 0 1 0 0],nn7,1);
                repmat([0 0 0 0 0 0 0 1 0],nn8,1);
                repmat([0 0 0 0 0 0 0 0 1],nn9,1);
              ];

Column_No = Second_Column-First_Column + 1;
data = B(:,First_Column:Second_Column);

%Normalize the data
for k=1:Column_No
    data(:,k)=(data(:,k)-mean(data(:,k)))/std(data(:,k));
end

test = zeros(nn_test,Column_No);
train = zeros(nn_train,Column_No);

round_err_bayes = 0;
round_err_1nn = 0;
round_err_knn = 0;
round_err_parzen = 0;
round_err_mlp = 0;
round_err_cmc = 0;
round_err_oracle = 0;

index=randperm(size(B,1));

for round = 1:folder
    train1=[];train2=[];train3=[];train4=[];train5=[];train6=[];train7=[];train8=[];train9=[];
    test1=[];test2=[];test3=[];test4=[];test5=[];test6=[];test7=[];test8=[];test9=[];
    test_index1=[];test_index2=[];test_index3=[];test_index4=[];test_index5=[];test_index6=[];test_index7=[];test_index8=[];test_index9=[];
    %Randomly separate each class to training and testing set.
    for i=1:nn1
        idx=index1(i+(round-1)*nn1);
        test1(i,:)=data(idx,:);
        test_index1(i)=idx;
    end
    for i=1:nn2
        idx=index2(i+(round-1)*nn2);
        test2(i,:)=data(idx,:);
        test_index2(i)=idx;
    end
    for i=1:nn3
        idx=index3(i+(round-1)*nn3);
        test3(i,:)=data(idx,:);
        test_index3(i)=idx;
    end
    for i=1:nn4
        idx=index4(i+(round-1)*nn4);
        test4(i,:)=data(idx,:);
        test_index4(i)=idx;
    end
    for i=1:nn5
        idx=index5(i+(round-1)*nn5);
        test5(i,:)=data(idx,:);
        test_index5(i)=idx;
    end
    for i=1:nn6
        idx=index6(i+(round-1)*nn6);
        test6(i,:)=data(idx,:);
        test_index6(i)=idx;
    end
    for i=1:nn7
        idx=index7(i+(round-1)*nn7);
        test7(i,:)=data(idx,:);
        test_index7(i)=idx;
    end
    for i=1:nn8
        idx=index8(i+(round-1)*nn8);
        test8(i,:)=data(idx,:);
        test_index8(i)=idx;
    end
    for i=1:nn9
        idx=index9(i+(round-1)*nn9);
        test9(i,:)=data(idx,:);
        test_index9(i)=idx;
    end
    
    for i=1:(round-1)*nn1
        idx=index1(i);
        train1(i,:)=data(idx,:);
        train_index1(i)=idx;
    end        
    for i=(round*nn1)+1:nc1
        idx=index1(i);
        train1=[train1;data(idx,:)];
        train_index1(i)=idx;
    end        
    
    for i=1:(round-1)*nn2
        idx=index2(i);
        train2(i,:)=data(idx,:);
        train_index2(i)=idx;
    end        
    for i=(round*nn2)+1:(nc2)
        idx=index2(i);
        train2=[train2;data(idx,:)];
        train_index2(i)=idx;
    end        

    for i=1:(round-1)*nn3
        idx=index3(i);
        train3(i,:)=data(idx,:);
        train_index3(i)=idx;
    end        
    for i=(round*nn3)+1:(nc3)
        idx=index3(i);
        train3=[train3;data(idx,:)];
        train_index3(i)=idx;
    end        

    for i=1:(round-1)*nn4
        idx=index4(i);
        train4(i,:)=data(idx,:);
        train_index4(i)=idx;
    end        
    for i=(round*nn4)+1:(nc4)
        idx=index4(i);
        train4=[train4;data(idx,:)];
        train_index4(i)=idx;
    end        

    for i=1:(round-1)*nn5
        idx=index5(i);
        train5(i,:)=data(idx,:);
        train_index5(i)=idx;
    end        
    for i=(round*nn5)+1:(nc5)
        idx=index5(i);
        train5=[train5;data(idx,:)];
        train_index5(i)=idx;
    end        

    for i=1:(round-1)*nn6
        idx=index6(i);
        train6(i,:)=data(idx,:);
        train_index6(i)=idx;
    end        
    for i=(round*nn6)+1:(nc6)
        idx=index6(i);
        train6=[train6;data(idx,:)];
        train_index6(i)=idx;
    end        

    for i=1:(round-1)*nn7
        idx=index7(i);
        train7(i,:)=data(idx,:);
        train_index7(i)=idx;
    end        
    for i=(round*nn7)+1:(nc7)
        idx=index7(i);
        train7=[train7;data(idx,:)];
        train_index7(i)=idx;
    end        
    
    for i=1:(round-1)*nn8
        idx=index8(i);
        train8(i,:)=data(idx,:);
        train_index8(i)=idx;
    end        
    for i=(round*nn8)+1:(nc8)
        idx=index8(i);
        train8=[train8;data(idx,:)];
        train_index8(i)=idx;
    end        
    
    for i=1:(round-1)*nn9
        idx=index9(i);
        train9(i,:)=data(idx,:);
        train_index9(i)=idx;
    end        
    for i=(round*nn9)+1:(nc9)
        idx=index9(i);
        train9=[train9;data(idx,:)];
        train_index9(i)=idx;
    end        
    
    train=[train1;train2;train3;train4;train5;train6;train7;train8;train9];
    test=[test1;test2;test3;test4;test5;test6;test7;test8;test9];
    
    test_index(1:nn1)=test_index1(1:nn1);
    test_index(nn1+1:nn1+nn2)=test_index2(1:nn2);
    test_index(nn1+nn2+1:nn1+nn2+nn3)=test_index3(1:nn3);
    test_index(nn1+nn2+nn3+1:nn1+nn2+nn3+nn4)=test_index4(1:nn4);
    test_index(nn1+nn2+nn3+nn4+1:nn1+nn2+nn3+nn4+nn5)=test_index5(1:nn5);
    test_index(nn1+nn2+nn3+nn4+nn5+1:nn1+nn2+nn3+nn4+nn5+nn6)=test_index6(1:nn6);
    test_index(nn1+nn2+nn3+nn4+nn5+nn6+1:nn1+nn2+nn3+nn4+nn5+nn6+nn7)=test_index7(1:nn7);
    test_index(nn1+nn2+nn3+nn4+nn5+nn6+nn7+1:nn1+nn2+nn3+nn4+nn5+nn6+nn7+nn8)=test_index8(1:nn8);
    test_index(nn1+nn2+nn3+nn4+nn5+nn6+nn7+nn8+1:nn1+nn2+nn3+nn4+nn5+nn6+nn7+nn8+nn9)=test_index9(1:nn9);
    
    %Calculate Sample mean and sample covariance.
    m_1 = mean(train1); v_1 = cov(train1);
    m_2 = mean(train2); v_2 = cov(train2);
    m_3 = mean(train3); v_3 = cov(train3);
    m_4 = mean(train4); v_4 = cov(train4);
    m_5 = mean(train5); v_5 = cov(train5);
    m_6 = mean(train6); v_6 = cov(train6);
    m_7 = mean(train7); v_7 = cov(train7);
    m_8 = mean(train8); v_8 = cov(train8);
    m_9 = mean(train9); v_9 = cov(train9);
    
    %Calculate discriminant functions for every testing samples and count error classifications.
%    k_knn=2;
    error_bayes = 0;
    error_1nn = 0;
    error_knn = 0;
    error_mlp = 0;
    error_parzen = 0;
    error_oracle = 0;
    error_cmc = 0;
    
    %Get the 1nn classification result in eachClass
    [eachClass1, nearestSampleIndex, knnmat] = knn([train lab_train'], [test lab_test'], 1);
  
    %Get the knn classification result in eachClass
    [eachClass, nearestSampleIndex, knnmat] = knn([train lab_train'], [test lab_test'], k_knn);
    clear nearestSampleIndex; clear knnmat;

    %Get the Parzen Window classification result in eachClass
    
    %[m,s,p,sig]=fit_sphere(train, target);
    % sigma=trainparzen(train,target,max(sig),1,max(sig)/8);
    class=flagmax(parzen_classify(test,train,target_train,.1));
    
    %Get the MLP classification result in eachClass
    %[w,bias,error]=trainmlp(train,target_train,[3 3],0.01);
    %out = flagmax(mlp(test, w, bias));
    %clear w; clear bias; clear error;
    
    for i = 1:size(test,1)
        % Bayes decision rule
        x = test(i,:);
        g(1) = (x-m_1)*(-0.5)*inv(v_1)*(x-m_1)'-0.5*log(det(v_1))+log(size(test1,1)/size(test,1));
        g(2) = (x-m_2)*(-0.5)*inv(v_2)*(x-m_2)'-0.5*log(det(v_2))+log(size(test2,1)/size(test,1));            
        g(3) = (x-m_3)*(-0.5)*inv(v_3)*(x-m_3)'-0.5*log(det(v_3))+log(size(test3,1)/size(test,1));
        g(4) = (x-m_4)*(-0.5)*inv(v_4)*(x-m_4)'-0.5*log(det(v_4))+log(size(test4,1)/size(test,1));
        g(5) = (x-m_5)*(-0.5)*inv(v_5)*(x-m_5)'-0.5*log(det(v_5))+log(size(test5,1)/size(test,1));
        g(6) = (x-m_6)*(-0.5)*inv(v_6)*(x-m_6)'-0.5*log(det(v_6))+log(size(test6,1)/size(test,1));
        g(7) = (x-m_7)*(-0.5)*inv(v_7)*(x-m_7)'-0.5*log(det(v_7))+log(size(test7,1)/size(test,1));
        g(8) = (x-m_8)*(-0.5)*inv(v_8)*(x-m_8)'-0.5*log(det(v_8))+log(size(test8,1)/size(test,1));
        g(9) = (x-m_9)*(-0.5)*inv(v_9)*(x-m_9)'-0.5*log(det(v_9))+log(size(test9,1)/size(test,1));
        [C,I] = max([g(1) g(2) g(3) g(4) g(5) g(6) g(7) g(8) g(9)]);
        
        flag(1)=0;flag(2)=0;flag(3)=0;flag(4)=0;flag(5)=0;
        % Calculate error for Bayes
        if I~=B(test_index(i),l_col)
            error_bayes = error_bayes + 1;
            flag(1)=1;
        end   %if
            
        % Calculate error for 1NN
        if (eachClass1(i))~=B(test_index(i),l_col)
            error_1nn = error_1nn + 1;
            flag(2)=1;
        end   %if            
        
        % Calculate error for kNN
        if (eachClass(i))~=B(test_index(i),l_col)
            error_knn = error_knn + 1;
            flag(3)=1;
        end   %if            
        
        % Calculate error for parzen
        if (sum(target_test(i,:)==class(i,:))~=size(class,2))
            error_parzen = error_parzen + 1;
            flag(4)=1;
        end   %if            
        
        % Calculate error for MLP
     %   if (sum(target_test(i,:)==out(i,:))~=size(class,2))
     %       error_mlp = error_mlp + 1;
     %       flag(5)=1;
     %   end   %if
    
        s_flag=sum(flag);
        if(s_flag>3)
            error_cmc=error_cmc+1;
            if(s_flag==5)
                error_oracle=error_oracle+1;
            end %if
        end %if
        
    end   %for
    
    % Calculate error rate for plug-in
    error_rate_bayes(round) = error_bayes/size(test,1); round_err_bayes = round_err_bayes + error_rate_bayes(round);
    
    % Calculate error rate for 1nn
    error_rate_1nn(round) = error_1nn/size(test,1);round_err_1nn = round_err_1nn + error_rate_1nn(round);

    % Calculate error rate for knn
    error_rate_knn(round) = error_knn/size(test,1);round_err_knn = round_err_knn + error_rate_knn(round);

    % Calculate error rate for parzen
    error_rate_parzen(round) = error_parzen/size(test,1); round_err_parzen = round_err_parzen + error_rate_parzen(round);

    % Calculate error rate for mlp
    error_rate_mlp(round) = error_mlp/size(test,1); round_err_mlp = round_err_mlp + error_rate_mlp(round);

    % Calculate error rate for cmc
    error_rate_cmc(round) = error_cmc/size(test,1); round_err_cmc = round_err_cmc + error_rate_cmc(round);

    % Calculate error rate for oracle
    error_rate_oracle(round) = error_oracle/size(test,1); round_err_oracle = round_err_oracle + error_rate_oracle(round);

end   %round

avg_err_bayes = round_err_bayes/folder; std_dev_bayes = std(error_rate_bayes);
avg_err_1nn = round_err_1nn/folder; std_dev_1nn = std(error_rate_1nn);
avg_err_knn = round_err_knn/folder; std_dev_knn = std(error_rate_knn);
avg_err_parzen = round_err_parzen/folder; std_dev_parzen = std(error_rate_parzen);
avg_err_mlp = round_err_mlp/folder; std_dev_mlp = std(error_rate_mlp);
avg_err_cmc = round_err_cmc/folder; std_dev_cmc = std(error_rate_cmc);
avg_err_oracle = round_err_oracle/folder; std_dev_oracle = std(error_rate_oracle);


fprintf('\n\n=======================================================\n');
fprintf('Bayes\t%5.4f\t%5.4f\n', avg_err_bayes, std_dev_bayes );
fprintf('1NN\t%5.4f\t%5.4f\n', avg_err_1nn, std_dev_1nn );
fprintf('KNN\t%5.4f\t%5.4f\n', avg_err_knn, std_dev_knn );
fprintf('Parzen\t%5.4f\t%5.4f\n', avg_err_parzen, std_dev_parzen );
fprintf('MLP\t%5.4f\t%5.4f\n', avg_err_mlp, std_dev_mlp );
fprintf('CMC\t%5.4f\t%5.4f\n', avg_err_cmc, std_dev_cmc );
fprintf('Oracle\t%5.4f\t%5.4f\n', avg_err_oracle, std_dev_oracle );
%fprintf('KNN Performance = %5.2f%%\


plot(error_rate_bayes,'k','Marker','s');
hold on;
plot(error_rate_1nn,'m','LineStyle','-');
hold on;
plot(error_rate_knn,'r','Marker','*');
hold on;
plot(error_rate_parzen,'b','LineStyle',':');
hold on;
plot(error_rate_mlp,'g','Marker','o');
hold on;
plot(error_rate_cmc,'k','Marker','v');
hold on;
plot(error_rate_oracle,'k','Marker','.');

legend('Bayes','1-NN','K-NN','Parzen','MLP','CMC','Oracle');
xlabel('Test no. in 10-fold Cross-Validation');
ylabel('Error Rate');
title('LON-CAPA: Comparison of classifiers on PHY183 Data, 10-fold CV (9 Classes)');
--minaeibi1031059045--