clear; close all; %%%%% source code for homework6 %%%%% %% 1. load the data %%%%%%%%%%%% fprintf(1,'\nMNIST data loading...\n'); load MNIST_data; whos %% 2. Specify the training and testing data involved in this task. %%%%%%%%%%%% target_digits = [1 3 5 7] ; % 1:10; if length(target_digits)<10 [train_index, test_index] = deal(cell(1, length(target_digits))); % look for the index of the training data for i = 1 : length(target_digits) train_index{i} = find(trainlabel == target_digits(i)); test_index{i} = find(testlabel == target_digits(i)); end train_index = sort(cat(2, train_index{:})); test_index = sort(cat(2, test_index{:})); traindata = traindata(:,train_index); testdata = testdata(:,test_index); % groundtruth labels trainlabel = trainlabel(train_index); testlabel = testlabel(test_index); end gn = size(traindata,2); tn = size(testdata,2); fprintf(1,'learning to classify digits [%s] \n', num2str(target_digits)); fprintf(1,' %d examples for training, %d for testing \n', gn, tn); %% 3. classification with minimum distance classifiers %%%%%%%%%%%% tic; fprintf(1,'\nLearning minimum-distance classifier ...\n'); [centers, ulabels] = mdist_learn(traindata, trainlabel); [clabel, err_rate] = mdist_classify(centers, ulabels, testdata, testlabel); fprintf(1,'\n the error rate is %.3f%%\n', err_rate*100); toc; %% 4. 1NN classification with Euclidean metric %%%%%%%%%%%% tic; fprintf(1,'\nClassification with 1-NN ...\n'); % final correct rate err_rate = NN_euclidean(traindata, trainlabel, testdata, testlabel); fprintf(1,'\n the error rate is %.3f%%\n', err_rate*100); toc; %% 5. k-NN classification with L3 metric %%%%%%%%%%%% %% 6. linear perceptron with subsampled training data %%%%%%%%%%%% randid = randperm(gn); randid = sort(randid(1:10000)); traindata = traindata(:, randid); trainlabel = trainlabel(randid); %%% you linear perceptron training code here