Perceptron and Binary Classification in MATLAB

29 April 2022

One can find the explaination in the file if you prefer more texty stuff.

Explain Me with Code!

% Prepare the environment.
clear;
clc;
% Load the enviromental variables.
load("LAB1_Data.mat");
% Choose the data respecting to you.
data = zesp_2;
% Create a new figure, and plot our data into.
figure;
% First 5 column is belongs to Class A, others belongs to Class B.
plot(data(1,1:5), data(2,1:5), 'or')
hold on
plot(data(1,6:11), data(2,6:11), '*g')
legend("Class A", "Class B")

% Take the legend to the top left of the plot.
legend("Position",[0.1631,0.80913,0.16607,0.082143])

grid on
title("Graph of All Data")
% Remove the non-fitted datum from the data.
new_data = [data(:,1:5) data(:,7:11)];

% Create new figure.
figure

% Plot the graph without non-fitted data.
plot(new_data(1, 1:5), new_data(2, 1:5), 'or')
hold on
plot(new_data(1, 6:10), new_data(2, 6:10), '*g')

% Graph Settings
legend("Class A", "Class B")
legend("Position",[0.1631,0.80913,0.16607,0.082143])
grid on
title("Graph of the Cleaned Data")
% Divide the data into the inputs, and answers (as targets).
IN = new_data(1:2, :);
TARGET = new_data(3, :);
% newp(): Creates new perceptron. Hardlims will give us -1 to 1 step
% function, however, "hardlim" will give us 0 to 1 step function.
net_all_data = newp(IN, TARGET, ['hardlims']);

% sim(): Sim is the function to find outputs for created perception.
% Then, we're going to find our error with our known outputs.
out_net_all_data = sim(net_all_data, IN);
error_net_all_data = TARGET - out_net_all_data;

% train(): This function changing the weights, and trying to find best
% model. It will open the GUI for showing you the details.

net_all_data = train(net_all_data, IN, TARGET);
out_net_all_data_train = sim(net_all_data,IN);
error_net_all_data_train = TARGET - out_net_all_data_train;

% The GUI will be opened. Input (2) means there are two inputs, 
% Layer (1) means there is one perceptron, and same for the output.
% After running the section, one can see that error_net_all_data's
% error is worse then error_net_all_data_train's error rate since it is
% trained.
% Weights of first property of the neuron.
weights_1a = net_all_data.IW{1}(1);
% Weights of second property of the neuron.
weights_2a = net_all_data.IW{1}(2);
% Bias of the neuron.
bias_a = net_all_data.b{1}(1);

$$
$$

% Plot the classification line.
plot([-5 3],[-weights_1a/weights_2a*(-5)-bias_a/weights_2a -weights_1a/weights_2a*3-bias_a/weights_2a], "b")
legend("Class A", "Class B", "Classification Function Line");

grid on
legend("Position", [0.15433,0.77401,0.325,0.11905])
% Eliminate four data to see imporantance of the datapoints.
new2_data = new_data(:, [1, 3, 5, 6, 7, 9, 10]);
figure;
% Plot the all Class A data.
plot(new_data(1, 1:5),new_data(2, 1:5),'or');
hold on;
% Plot the remained Class A data.
plot(new2_data(1, 1:3), new2_data(2, 1:3),'*r');
% Plot the all Class B data.
plot(new_data(1, 6:10), new_data(2, 6:10),'*g');
% Plot the remained Class A data.
plot(new2_data(1, 4:6), new2_data(2, 4:6),'og');

legend("Class A","Remained Class A", "Class B", "Remained Class B");

grid on
legend("Position", [0.14606,0.75675,0.26964,0.15595])
% Inputs and outputs selection to put into neuron.
IN_w = new2_data(1:2,:);
TARGET_w = new2_data(3,:);
% Create variables to store ins and outs.
IN_w = new2_data(1:2,:);
TARGET_w = new2_data(3,:);

% Create perceptron neuron.
net_ch_data = newp(IN_w, TARGET_w, ['hardlims']);
% Simulate with the validaton set (same as input).
out_net_ch_data = sim(net_ch_data, IN_w);
% Find the error.
error_net_ch_data = TARGET_w - out_net_ch_data;
% Train the perceptron weights.
net_ch_data = train(net_ch_data, IN_w, TARGET_w);
% Simulate it again.
out_net_ch_data_train = sim(net_ch_data,IN_w);
% Find the error.
error_net_ch_data_train = TARGET_w - out_net_ch_data_train;

% Get the weights and bias.
weights_1c = net_ch_data.IW{1}(1);
weights_2c = net_ch_data.IW{1}(2);
bias_c = net_ch_data.b{1}(1);
% Plot the new classification line
plot([-5 3],[-weights_1c/weights_2c*(-5)- bias_c/weights_2c -weights_1c/weights_2c*3- bias_c/weights_2c], "m-")
legend("Class A", "Class B", "Classification Trend Line");

grid on
legend("Position",[0.15985,0.78175,0.325,0.11905])
title("Classification Graph of Deleted Data")
% Let's see every single thing in one graph.

% Create new figure.
figure;
% Plot the original Class A data.
plot(data(1, 1:5), data(2, 1:5), 'or');
hold on;
% Plot the original Class B data.
plot(data(1, 6:11), data(2, 6:11), 'og');
% Put a diamond into the non-fitting data.
plot(data(1, 6), data(2, 6),'db');
% Put a star into the remained data.
plot(new2_data(1,1:3), new2_data(2,1:3), '*r')
plot(new2_data(1,4:6),new2_data(2,4:6),'*g')
% Plot the trend lines.
plot([-5 3],[-weights_1a/weights_2a*(-5)- bias_a/weights_2a -weights_1a/weights_2a*3- bias_a/weights_2a],"b", 'LineWidth', 2)
plot([-5 3],[-weights_1c/weights_2c*(-5)- bias_c/weights_2c -weights_1c/weights_2c*3- bias_c/weights_2c],"c", 'LineWidth',2)
legend("Class A", ...
    "Class B", ...
    "Not-fitted Data", ...
    "Remaining Class A", ...
    "Remaining Class B", ...
    "Classification for All Data", ...
    "Classification for Remaining Data" ...
    )

grid on
legend("Position",[0.064727,0.68633,0.45161,0.26731])

Files