Implementation
Implementation
Implementation
IMPLEMENTATION
clear all;
close all;
clc;
addpath('./Utils');
addpath('./Liblinear');
make;
%TrnSize = 10000;
TrnSize = 100;
ImgSize = 32;
DataPath = 'cifar-10-batches-mat';
TrnLabels = [];
TrnData = [];
for i = 1:5
end
load(fullfile(DataPath,'test_batch.mat'));
TestData = data';
%figure,imshow(TestData);
TestLabels = labels;
TrnLabels = double(TrnLabels);
TestLabels = double(TestLabels);
% plz comment out the following four lines for a complete test.
% when you want to do so, please ensure that your computer memory is more than 64GB.
% training linear SVM classifier on large amount of high dimensional data would
TrnLabels = TrnLabels(1:500:end); %
%%%%%%%%%%%%%%%%%%%%%%%
nTestImg = length(TestLabels);
%% LDANet parameters (they should be funed based on validation set; i.e., ValData &
ValLabel)
LDANet.NumStages = 2;
LDANet.PatchSize = [5 5];
LDANet.HistBlockSize = [8 8];
LDANet.BlkOverLapRatio = 0.5;
LDANet.Pyramid = [4 2 1];
LDANet
%% LDANet Training with 10000 samples
tic;
LDANet_TrnTime = toc;
c = 10;
tic;
models = train(TrnLabels, ftrain', ['-s 1 -c ' num2str(c) ' -q']); % we use linear SVM classifier (C
= 10), calling liblinear library
LinearSVM_TrnTime = toc;
clear TestData;
nCorrRecog = 0;
RecHistory = zeros(nTestImg,1);
tic;
if xLabel_est == TestLabels(idx)
RecHistory(idx) = 1;
nCorrRecog = nCorrRecog + 1;
end
if 0==mod(idx,nTestImg/1000);
end
TestData_ImgCell{idx} = [];
end
Averaged_TimeperTest = toc/nTestImg;
Accuracy = nCorrRecog/nTestImg;
ErRate = 1 - Accuracy;
%% Results display
clear memory
clear all
clc
close all
tic;
% To open the browse option in the front end we fix the initial directory paths
% testing images
% Normalization
P=P/256;
N=N/256;
% targets
img=[196 35 234;
232 59 244 ;
243 57 226]';
T=img/256;;
% backpropagation targets
algorithm;
[R,Q]=size(P);
b=-0.3;
W1=a + (b-a) *rand(S1,R); % Weights between Input and Hidden Neurons 9 columns 5 rows
W2=a + (b-a) *rand(S2,S1); % Weights between Hidden and Output Neurons 9 columns 5
rows
b1=a + (b-a) *rand(S1,1); % Weights between Input and Hidden Neurons 1 column 5 rows
b2=a + (b-a) *rand(S2,1); % Weights between Hidden and Output Neurons 1 column 5 rows
n1=W1*P;
A1=logsig(n1);
n2=W2*A1;
A2=logsig(n2);
e=A2-T;
nntwarn off
break
else
for i=1:Q
df1=dlogsig(n1,A1(:,i));
df2=dlogsig(n2,A2(:,i));
s2 = -2*diag(df2) * e(:,i);
W2 = W2-0.1*s2*A1(:,i)';
b2 = b2-0.1*s2;
W1 = W1-0.1*s1*P(:,i)';
b1 = b1-0.1*s1;
A1(:,i)=logsig(W1*P(:,i),b1);
A2(:,i)=logsig(W2*A1(:,i),b2);
end
e = T - A2;
error =0.5*mean(mean(e.*e));
mse(itr)=error;
end
end
TrnOutput=real(A2);
TrnOutput=real(A2>threshold);
%n1=W1*N;
n1=W1*N;
A1=logsig(n1);
n2=W2*A1;
A2test=logsig(n2);
%TstOutput=real(A2test)
TstOutput=real(A2test>threshold);
% recognition rate
wrong=size(find(TstOutput-T),1);
num_lines= 1;
def = {'1'};
TestImage = inputdlg(prompt,dlg_title,num_lines,def);
TestImage = strcat(TestDatabasePath,'\',char(TestImage),'.bmp');
im = imread(TestImage);
toc;
recognition_rate=abs(50*(size(N,2)-wrong)/size(N,2))
T = CreateDatabase(TrainDatabasePath);
SelectedImage = strcat(TrainDatabasePath,'\',OutputName);
SelectedImage = imread(SelectedImage);
imshow(im)
title('Image to be tested');
figure,imshow(SelectedImage);
title('Equivalent Image');
disp('Final Result');
RESULTS