100% found this document useful (1 vote)
63 views

ANN With GA

Uploaded by

andrew Smith
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
100% found this document useful (1 vote)
63 views

ANN With GA

Uploaded by

andrew Smith
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 3

%--------------------------------------------------------------------

------
% Training
%--------------------------------------------------------------------
------

clear % clear the workspace


clc % clear the command window
tic % start watch

% Task 1: Load and normalize training data


%
---------------------------------------------------------------------
----

load('Lightning_Data.mat'); % Load the historical data (assuming it


loads as variable 'LightningData')

tr_pattern = 300; % Number of patterns for training data

% Extract input and target data as matrices (not tables)


inputrain = table2array(LightningData(9:19, 1:tr_pattern)); % 11
input data
targetrain = table2array(LightningData(1:8, 1:tr_pattern)); % 8
target data

% Normalize using mapminmax


[pn, input_settings] = mapminmax(inputrain);
[tn, target_settings] = mapminmax(targetrain);

% Task 2: ANN design and initialization


%
---------------------------------------------------------------------
----

net = newff(minmax(pn), [64, 8], {'logsig', 'purelin'}, 'trainlm');

net = init(net); % initialize the network


net.trainParam.show = 1; % frequency for updates during training
net.trainParam.lr = 0.01; % learning rate (alpha)
net.trainParam.mc = 0.5; % momentum constant (beta)
net.trainParam.epochs = 1000; % iterations
net.trainParam.goal = 1e-2; % goal

% Task 3: Training and save the network (if successful)


%
---------------------------------------------------------------------
----

[net, tr] = train(net, pn, tn); % train the network

train_mse = tr.best_perf; % mean square error


train_goal = net.trainParam.goal; % goal

if train_mse <= train_goal


save ANN_Lightning.mat net; % save the trained network
save training_patterns.mat tr_pattern; % no. of training patterns
fprintf('\n Training was successful. Proceed to GA optimization...\n\
n\n');
pause(2)
%--------------------------------------------------------------------
--
% Genetic Algorithm Optimization
%--------------------------------------------------------------------
--

% Define GA options
options = optimoptions('ga', 'MaxGenerations', 50, 'PopulationSize',
20, 'Display', 'iter');

% Define the fitness function for GA


fitnessFunc = @(x) annFitness(x, net, pn, tn); % Create fitness
function handle

% Run Genetic Algorithm


[bestParams, bestFitness] = ga(fitnessFunc, net.numWeight +
net.numBias, [], [], [], [], [], [], [], options);
% Update network weights and biases with the best parameters found
net = setwb(net, bestParams);
% Proceed to testing
fprintf('\n GA optimization completed. Proceeding to testing...\n\n\
n');
%--------------------------------------------------------------------
--
% Testing
%--------------------------------------------------------------------
--

clear % clear the workspace


% Task 4: Load and normalize testing data
%
---------------------------------------------------------------------
load('Lightning_Data.mat'); % Reload the historical data
load('training_patterns.mat'); % Load the no. of training patterns
inputest = table2array(LightningData(9:19, tr_pattern+1:483)); % 11
input data
targetest = table2array(LightningData(1:8, tr_pattern+1:483)); % 8
target data

[pn,mininputrain,maxinputrain,tn,mintargetrain,maxtargetrain] =...
premnmx(inputrain,targetrain);
% Task 5: Network call, simulation, and post-regression analysis
%
---------------------------------------------------------------------

load('ANN_Lightning.mat'); % Call the trained network


an = sim(net, pn); % Simulate the trained network

[m, b, r] = postreg(an, tn); % Post-regression analysis


if r >= 0.95 % If correlation coefficient at least 0.95, then:
save ANN_successful.mat net; % Save the successful network
end
% Task 6: Comparison of predicted output vs actual output
%
---------------------------------------------------------------------
xx = (tr_pattern+1):483;
a = mapminmax('reverse', an, target_settings); % Denormalize 'an'
predicted_vs_actual = [xx' a' targetest']; % [hour predicted actual]
% Plotting the graph
figure(2);
plot(xx, a, 'r', xx, targetest, 'b');
xlabel('Hour');
ylabel('Output');
legend('Predicted', 'Actual (target)');
title('Predicted versus actual output');
fprintf('\n Testing of ANN completed with R = %d\n', r);
else
fprintf('\n Training failed. Re-train the network\n\n\n');
end

toc % Stop watch

%--------------------------------------------------------------------
------
% Fitness Function for GA
%--------------------------------------------------------------------
------
function mse = annFitness(params, net, pn, tn)
% Set network weights and biases
net = setwb(net, params);
% Simulate the network
an = sim(net, pn); % Simulate the trained network
% Calculate the mean squared error
mse = mean((an - tn).^2); % MSE
end

You might also like