Você está na página 1de 31

PATTERN

RECOGNITION
ASSIGNMENT - 1

By,

M RAKESH VAIDEESWARAN
ECE – B
Third Year
108116048
PART 1

REGULARISATION AND PSEUDO-INVERSE


TECHNIQUES FOR REGRESSION
x = linspace(-1,1,21);

t = [5.12 4.97 4.92 4.83 4.9 5.06 5.29 5.34 5.36 5.76 5.99 6.3 6.66
6.7 7.49 7.92 8.48 9.09 9.7 10.3 10.98];

true_value = [5 4.92 4.88 4.88 4.92 5 5.12 5.28 5.48 5.72 6 6.32 6.68
7.08 7.52 8 8.52 9.08 9.68 10.32 11];

train_1_x = [-1 -0.7 -0.4 -0.1 0.2 0.5 0.8];


train_1_t = [5.12 4.83 5.29 5.76 6.66 7.92 9.70];
train_2_x = [-0.9 -0.6 -0.3 0.0 0.3 0.6 0.9];
train_2_t = [4.97 4.90 5.34 5.99 6.70 8.48 10.30];
train_3_x = [-0.8 -0.5 -0.2 0.1 0.4 0.7 1.0];
train_3_t = [4.92 5.06 5.36 6.30 7.49 9.09 10.98];

lambd = [0 2 4 5 6 10];

row_num = 6;
column_num = 3;

VARIANCE = [];
BIAS = [];
W_1=zeros(row_num,column_num);
W_2=zeros(row_num,column_num);
W_3=zeros(row_num,column_num);

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
for l=1:1:numel(lambd)

basis_train_1 = [];
basis_train_1 = basis(train_1_x);

basis_transpose_1 = basis_train_1'; % transpose of basis function

I = [1 0 0;0 1 0; 0 0 1];

regularisation = (lambd(l))*I;

a = pinv((basis_train_1*basis_transpose_1) + regularisation);
b = a*basis_train_1;

w_1 = b*(train_1_t');% calculate w using pseudoinverse technique

train_1_y = (basis_transpose_1*w_1)'; % model y using calculated w

figure((l-1)*4 + 1)

scatter(train_1_x,train_1_t,40,'filled');
xlabel('x (input)');

1
ylabel('output')
title(['Training set 1 (Regularisation Constant = '
num2str(lambd(l)) ' )']);
legend('t')
hold on;
p=plot(train_1_x,train_1_y,'DisplayName','y-train-1');
p.LineWidth = 2;

x_out = linspace(-0.95,0.95,20);

basis_xout = basis(x_out);

hold on;
plot(x_out',((basis_xout')*w_1)','DisplayName','y-out','Color',[0 0
0]);
legend('Location','northwest');

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
basis_transpose_2 = [];

basis_train_2 = basis(train_2_x);

basis_transpose_2 = basis_train_2'; % transpose of basis function

a = pinv((basis_train_2*basis_transpose_2) + regularisation);
b = a*basis_train_2;

w_2 = b*(train_2_t'); % calculate w using pseudoinverse technique

train_2_y = (basis_transpose_2*w_2)'; % model y using calculated w

figure((l-1)*4 + 2)

scatter(train_2_x,train_2_t,40,'filled');
xlabel('x (input)');
ylabel('output')
title(['Training set 2 (Regularisation Constant = '
num2str(lambd(l)) ' )']);
legend('t');
hold on;
p = plot(train_2_x,train_2_y,'DisplayName','y-train-2');
p.LineWidth = 2;

hold on;
plot(x_out',((basis_xout')*w_2)','DisplayName','y-out','Color',[0 0
0]);
legend('Location','northwest');

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

2
basis_transpose_3 = [];

basis_train_3 = basis(train_3_x);

basis_transpose_3 = basis_train_3'; % transpose of basis function

a = pinv((basis_train_3*basis_transpose_3) + regularisation);
b = a*basis_train_3;

w_3 = b*(train_3_t'); % calculate w using pseudoinverse technique

train_3_y = (basis_transpose_3*w_3)'; % model y using calculated w

figure((l-1)*4 + 3)
scatter(train_3_x,train_3_t,40,'filled');
xlabel('x (input)');
ylabel('output')
title(['Training set 3 (Regularisation Constant = '
num2str(lambd(l)) ' )']);
legend('t');
hold on;
p = plot(train_3_x,train_3_y','DisplayName','y-train-3');
p.LineWidth = 2;

hold on;
p = plot(x_out',((basis_xout')*w_3)','DisplayName','y-out','Color',[0
0 0]);

legend('Location','northwest');

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

figure((l-1)*4 + 4)

sz = 40;
scatter(train_1_x,train_1_t,sz,'MarkerEdgeColor',[0.5 0 0],...
'MarkerFaceColor',[0.5 0 0],...
'LineWidth',1.5)

hold on;

scatter(train_2_x,train_2_t,sz,'MarkerEdgeColor',[0 .5 0],...
'MarkerFaceColor',[0 .5 0],...
'LineWidth',1.5);
hold on;

scatter(train_3_x,train_3_t,sz,'MarkerEdgeColor',[0 0 .5],...
'MarkerFaceColor',[0 0 .5],...

3
'LineWidth',1.5);

legend({'t1','t2','t3'});
hold on;
p = plot(train_1_x,((basis_train_1')*w_1)','Color',[0.5 0
0],'DisplayName','Model-1');
p.LineWidth = 2;
hold on;

hold on;
p = plot(train_2_x,((basis_train_2')*w_2)','Color',[0 .5
0],'DisplayName','Model-2');
p.LineWidth = 2;
hold on;

hold on;
p = plot(train_3_x',((basis_train_3')*w_3)','Color',[0 0
0.5],'DisplayName','Model-3');
p.LineWidth = 2;

hold on;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

pred_model_1 = (basis(x)')*w_1;
pred_model_2 = (basis(x)')*w_2;
pred_model_3 = (basis(x)')*w_3;

pred_mean = (pred_model_1 + pred_model_2 + pred_model_3)/3;

p = plot(x,pred_mean,'Color',[0.8 0.8 0.2],'DisplayName','Mean');


title(['Different Models (Regularisation Constant = '
num2str(lambd(l)) ' )'])
xlabel('Input')
ylabel('Output')
p.LineWidth = 2;
legend('Location','northwest');
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

variance_1 = (sum((pred_model_1 - pred_mean).^2))/numel(x);


variance_2 = (sum((pred_model_2 - pred_mean).^2))/numel(x);
variance_3 = (sum((pred_model_3 - pred_mean).^2))/numel(x);

variance_overall = (variance_1 + variance_2 + variance_3)/3;


VARIANCE = [VARIANCE, variance_overall];

4
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

bias_1 = (sum((pred_model_1' - true_value).^2))/numel(x);


bias_2 = (sum((pred_model_2' - true_value).^2))/numel(x);
bias_3 = (sum((pred_model_3' - true_value).^2))/numel(x);

bias_overall = (bias_1 + bias_2 + bias_3)/3;

BIAS = [BIAS, bias_overall];

W_1(l,:) = w_1;
W_2(l,:) = w_2;
W_3(l,:) = w_3;

end

Regularisation_Constant = lambd';
Model_1_weights = W_1;
Model_2_weights = W_2;
Model_3_weights = W_3;
Variance = VARIANCE';
Bias = BIAS';

T1 = table(Regularisation_Constant, Model_1_weights)
T2 = table(Regularisation_Constant, Model_2_weights)
T3 = table(Regularisation_Constant, Model_3_weights)
T4 = table(Regularisation_Constant, Variance)
T5 = table(Regularisation_Constant, Bias)

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

figure(25)
plot(lambd,BIAS);
title('Regularisation constant vs Bias')
xlabel('Regularisation constant')
ylabel('Bias')

figure(26)
plot(lambd, VARIANCE);
title('Regularisation constant vs Variance')
xlabel('Regularisation constant')
ylabel('Variance')

function b = basis(x)

b = [];
basis_1 = [];
basis_2 = [];
basis_3 = [];

5
for p=1:1:numel(x)
basis_1 = [basis_1,1];
end

b = basis_1;

temp = (-1)*(((x - 0.5).^2)/(0.1));

basis_2 = exp(temp); % second basis function


b = [b; basis_2];

temp1 = (-1)*(((x + 0.5).^2)/(0.1));

basis_3 = exp(temp1); % third basis function


b = [b; basis_3];

end

T1 =

6×2 table

Regularisation_Constant Model_1_weights
_______________________ ____________________________

0 6.2132 2.4837 -1.4916


2 4.5751 1.8929 0.33193
4 3.7912 1.4532 0.48305
5 3.4956 1.3096 0.49414
6 3.2435 1.1943 0.49107
10 2.5191 0.89041 0.43721

T2 =

6×2 table

Regularisation_Constant Model_2_weights
_______________________ ____________________________

0 7.1079 1.173 -2.8258


2 4.7852 1.7609 0.18076
4 3.9384 1.3944 0.4098
5 3.626 1.2656 0.43717
6 3.361 1.16 0.44492
10 2.6051 0.87429 0.41219

T3 =

6×2 table

6
Regularisation_Constant Model_3_weights
_______________________ __________________________________

0 8.1534 -0.074749 -4.2017


2 5.1233 1.665 -0.0094696
4 4.1877 1.3666 0.31761
5 3.8494 1.2501 0.36596
6 3.5643 1.1519 0.38774
10 2.7563 0.87803 0.38234

T4 =

6×2 table

Regularisation_Constant Variance
_______________________ _________

0 0.16512
2 0.028759
4 0.019096
5 0.016158
6 0.013875
10 0.0083373

T5 =

6×2 table

Regularisation_Constant Bias
_______________________ ______

0 2.1745
2 4.9094
4 8.4765
5 10.233
6 11.914
10 17.695

7
8
9
10
11
12
13
14
15
16
17
18
19
20
Published with MATLAB® R2017b

21
PART 2

SEQUENTIAL LEARNING
x = linspace(-1,1,21);

t = [5.12 4.97 4.92 4.83 4.9 5.06 5.29 5.34 5.36 5.76 5.99 6.3 6.66
6.7 7.49 7.92 8.48 9.09 9.7 10.3 10.98];

true_value = [5 4.92 4.88 4.88 4.92 5 5.12 5.28 5.48 5.72 6 6.32 6.68
7.08 7.52 8 8.52 9.08 9.68 10.32 11];

train_1_x = [-1 -0.7 -0.4 -0.1 0.2 0.5 0.8];


train_1_t = [5.12 4.83 5.29 5.76 6.66 7.92 9.70];

train_2_x = [-0.9 -0.6 -0.3 0.0 0.3 0.6 0.9];


train_2_t = [4.97 4.90 5.34 5.99 6.70 8.48 10.30];

train_3_x = [-0.8 -0.5 -0.2 0.1 0.4 0.7 1.0];


train_3_t = [4.92 5.06 5.36 6.30 7.49 9.09 10.98];

number_of_epochs = 10; % Number of Epochs

train_x = [];
train_t = [];

w = [0 0 0]; % Initial Weights

train_x = x(1:14);
train_t = t(1:14);

eta = 0.1; % Learning rate

sse = [];

for k=1:1:number_of_epochs

for i=1:1:14
b1 = 1;
b2 = exp((-1)*(((train_x(i) - 0.5).^2)/(0.1)));
b3 = exp((-1)*(((train_x(i) + 0.5).^2)/(0.1)));

error = train_t(i) - (b1*w(1) + b2*w(2) + b3*w(3));


basis = [b1 b2 b3]; % Basis function Matrix
w = w + eta*error*basis; % Updating Weights
end

test_x = x(15:21);
test_t = t(15:21);

sqer = 0;
for j=1:1:7
b1 = 1;
b2 = exp((-1)*(((test_x(j) - 0.5).^2)/(0.1)));
b3 = exp((-1)*(((test_x(j) + 0.5).^2)/(0.1)));

1
basis_test = [b1; b2; b3];
error = (test_t(j) - w*basis_test)*(test_t(j) - w*basis_test);
sqer = sqer + error;

end

sse = [sse , sqer];

end

epoch = [];
for i=1:1:number_of_epochs
epoch = [epoch, i];
end

final_weights = w;
disp('The Weight matrix after 10 epochs is: ')
disp(final_weights)

figure(1)
plot(epoch, sse, 'Color', [0 0 0]);
title('SSE (Sum Squared Error) vs Epoch')
xlabel('Epoch')
ylabel('SSE')

The Weight matrix after 10 epochs is:


5.5282 1.4603 -0.0137

2
Published with MATLAB® R2017b

3
PART 3

KERNEL TRICK FOR REGRESSION


TECHNIQUES
train_x = [-1 -0.7 -0.4 -0.1 0.2 0.5 0.8];
train_t = [5.12 4.83 5.29 5.76 6.66 7.92 9.70];

validation_x = [-0.9 -0.6 -0.3 0.0 0.3 0.6 0.9];


validation_t = [4.97 4.90 5.34 5.99 6.70 8.48 10.30];

test_x = [-0.8 -0.5 -0.2 0.1 0.4 0.7 1.0];


test_t = [4.92 5.06 5.36 6.30 7.49 9.09 10.98];

SSE_VALIDATION = [];
SSE_TEST = [];
TEST_Y = [];

M = [];
validation_y = [];
var = 0.1;

for var=0.1:0.1:0.5
validation_y = [];

for k=1:1:7
M = [];
for i=1:1:7
ker = kernel(train_x(i),validation_x(k),var);
M = [M, ker];
end

M = M/sum(M);

validation_y = [validation_y, M*train_t'];

end

SSE_validation = (validation_y - validation_t)*(validation_y-


validation_t)';
SSE_VALIDATION = [SSE_VALIDATION; SSE_validation];

test_y = [];

for k=1:1:7
M = [];
for i=1:1:7
ker = kernel(train_x(i),test_x(k),var);
M = [M, ker];
end

M = M/sum(M);

test_y = [test_y, M*train_t'];

1
end

SSE_test = (test_y - test_t)*(test_y-test_t)';


SSE_TEST = [SSE_TEST; SSE_test];
TEST_Y = [TEST_Y;test_y];

end

Variance = {0.1;0.2;0.3;0.4;0.5};

T = table(Variance, SSE_VALIDATION)

disp('Optimum SSE_Validation is for Variance:')


disp(find(min(SSE_VALIDATION))/10)

disp('Predictions for Test Set:')

Actual_Output = test_t';
Predicted_Output = TEST_Y(find(min(SSE_VALIDATION)),1:7)';

T = table(Actual_Output, Predicted_Output)

disp('SSE for Test Data is:')


disp(SSE_TEST(find(min(SSE_VALIDATION))))

function k = kernel(x1,x2,var)

temp = (-1)*((x1-x2)*(x1-x2))/(2*var);
k = exp(temp);

end

T =

5×2 table

Variance SSE_VALIDATION
________ ______________

[0.1000] 2.2453
[0.2000] 4.3229
[0.3000] 6.2575
[0.4000] 8.0186
[0.5000] 9.5804

Optimum SSE_Validation is for Variance:

2
0.1000

Predictions for Test Set:

T =

7×2 table

Actual_Output Predicted_Output
_____________ ________________

4.92 5.0624
5.06 5.2603
5.36 5.7373
6.3 6.5161
7.49 7.5413
9.09 8.5095
10.98 9.1397

SSE for Test Data is:


3.9757

Published with MATLAB® R2017b

Você também pode gostar