Skip to content

Commit 6f41b9f

Browse files
Add files via upload
1 parent 666980f commit 6f41b9f

File tree

11 files changed

+455
-0
lines changed

11 files changed

+455
-0
lines changed

BEEFCN.m

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
function bestfis=BEEFCN(fis,data)
2+
% Variables
3+
p0=GettingFuzzyParameters(fis);
4+
Problem.CostFunction=@(x) FuzzyCost(x,fis,data);
5+
Problem.nVar=numel(p0);
6+
alpha=1;
7+
Problem.VarMin=-(10^alpha);
8+
Problem.VarMax=10^alpha;
9+
% Bees Algorithm Parameters
10+
Params.MaxIt=15;
11+
Params.nScoutBee = 10; % Number of Scout Bees
12+
Params.nSelectedSite = round(0.5*Params.nScoutBee); % Number of Selected Sites
13+
Params.nEliteSite = round(0.4*Params.nSelectedSite); % Number of Selected Elite Sites
14+
Params.nSelectedSiteBee = round(0.5*Params.nScoutBee); % Number of Recruited Bees for Selected Sites
15+
Params.nEliteSiteBee = 2*Params.nSelectedSiteBee; % Number of Recruited Bees for Elite Sites
16+
Params.r = 0.1*(Problem.VarMax-Problem.VarMin); % Neighborhood Radius
17+
Params.rdamp = 0.95; % Neighborhood Radius Damp Rate
18+
% Starting Bees Algorithm
19+
results=Runbee(Problem,Params);
20+
% Getting the Results
21+
p=results.BestSol.Position.*p0;
22+
bestfis=FuzzyParameters(fis,p);
23+
end
24+
%% Bees
25+
function results=Runbee(Problem,Params)
26+
disp('Starting Bees Algorithm Training :)');
27+
% Cost Function
28+
CostFunction=Problem.CostFunction;
29+
% Number of Decision Variables
30+
nVar=Problem.nVar;
31+
% Size of Decision Variables Matrixv
32+
VarSize=[1 nVar];
33+
% Lower Bound of Variables
34+
VarMin=Problem.VarMin;
35+
% Upper Bound of Variables
36+
VarMax=Problem.VarMax;
37+
% Some Change
38+
if isscalar(VarMin) && isscalar(VarMax)
39+
dmax = (VarMax-VarMin)*sqrt(nVar);
40+
else
41+
dmax = norm(VarMax-VarMin);
42+
end
43+
%% Bees Algorithm Parameters
44+
MaxIt=Params.MaxIt;
45+
nScoutBee = Params.nScoutBee; % Number of Scout Bees
46+
nSelectedSite = Params.nSelectedSite; % Number of Selected Sites
47+
nEliteSite = Params.nEliteSite; % Number of Selected Elite Sites
48+
nSelectedSiteBee = Params.nSelectedSiteBee; % Number of Recruited Bees for Selected Sites
49+
nEliteSiteBee = Params.nEliteSiteBee; % Number of Recruited Bees for Elite Sites
50+
r = Params.r; % Neighborhood Radius
51+
rdamp = Params.rdamp; % Neighborhood Radius Damp Rate
52+
%% Second Stage
53+
% Empty Bee Structure
54+
empty_bee.Position = [];
55+
empty_bee.Cost = [];
56+
% Initialize Bees Array
57+
bee = repmat(empty_bee, nScoutBee, 1);
58+
% Create New Solutions
59+
for i = 1:nScoutBee
60+
bee(i).Position = unifrnd(VarMin, VarMax, VarSize);
61+
bee(i).Cost = CostFunction(bee(i).Position);
62+
end
63+
% Sort
64+
[~, SortOrder] = sort([bee.Cost]);
65+
bee = bee(SortOrder);
66+
% Update Best Solution Ever Found
67+
BestSol = bee(1);
68+
% Array to Hold Best Cost Values
69+
BestCost = zeros(MaxIt, 1);
70+
%% Bees Algorithm Main Body
71+
for it = 1:MaxIt
72+
% Elite Sites
73+
for i = 1:nEliteSite
74+
bestnewbee.Cost = inf;
75+
for j = 1:nEliteSiteBee
76+
newbee.Position = PerformBeeDance(bee(i).Position, r);
77+
newbee.Cost = CostFunction(newbee.Position);
78+
if newbee.Cost<bestnewbee.Cost
79+
bestnewbee = newbee;
80+
end
81+
end
82+
if bestnewbee.Cost<bee(i).Cost
83+
bee(i) = bestnewbee;
84+
end
85+
end
86+
% Selected Non-Elite Sites
87+
for i = nEliteSite+1:nSelectedSite
88+
bestnewbee.Cost = inf;
89+
for j = 1:nSelectedSiteBee
90+
newbee.Position = PerformBeeDance(bee(i).Position, r);
91+
newbee.Cost = CostFunction(newbee.Position);
92+
if newbee.Cost<bestnewbee.Cost
93+
bestnewbee = newbee;
94+
end
95+
end
96+
if bestnewbee.Cost<bee(i).Cost
97+
bee(i) = bestnewbee;
98+
end
99+
end
100+
% Non-Selected Sites
101+
for i = nSelectedSite+1:nScoutBee
102+
bee(i).Position = unifrnd(VarMin, VarMax, VarSize);
103+
bee(i).Cost = CostFunction(bee(i).Position);
104+
end
105+
% Sort
106+
[~, SortOrder] = sort([bee.Cost]);
107+
bee = bee(SortOrder);
108+
% Update Best Solution Ever Found
109+
BestSol = bee(1);
110+
% Store Best Cost Ever Found
111+
BestCost(it) = BestSol.Cost;
112+
% Display Iteration Information
113+
disp(['In Iteration Number ' num2str(it) ' Bees Algorithm Fittest Value Is = ' num2str(BestCost(it))]);
114+
% Damp Neighborhood Radius
115+
r = r*rdamp;
116+
end
117+
disp('Bees Algorithm Came To End :)');
118+
% Store Res
119+
results.BestSol=BestSol;
120+
results.BestCost=BestCost;
121+
% Plot Bees Algorithm Training Stages
122+
set(gcf, 'Position', [600, 300, 500, 400])
123+
plot(BestCost,':',...
124+
'LineWidth',2,...
125+
'MarkerSize',8,...
126+
'MarkerEdgeColor','g',...
127+
'Color',[0.1,0.9,0.1]);
128+
title('Bees Algorithm Training')
129+
xlabel('Bees Algorithm Iteration Number','FontSize',10,...
130+
'FontWeight','bold','Color','m');
131+
ylabel('Bees Algorithm Fittest Value','FontSize',10,...
132+
'FontWeight','bold','Color','m');
133+
legend({'Bees Algorithm Train'});
134+
end
135+
136+
137+
138+

Bees CNN Algorithm.m

Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
%% Bees CNN Algorithm (A Fuzzy Evolutionary Deep Leaning) - Created in 20 Jan 2022 by Seyed Muhammad Hossein Mousavi
2+
% It is possible to fit deep learning weights and bias using evolutionary
3+
% algorithm, right after training stage. Here, CNN is used to classify 8
4+
% face classes. After CNN train, initial fuzzy model is created to aid the
5+
% learning process. Finally, CNN network weights (from Fully Connected Layer)
6+
% trains using Bees algorithm
7+
% to be fitted in a nature inspired manner (here behavior of Bees). You can
8+
% used your data with any number of samples and classes. Remember, code's
9+
% parameters are adjusted for this data and if you want to replace your
10+
% data you may have to change the parameters. Image data is in 64*64 size and
11+
% in 2 dimensions and stored in 'CNNDat' folder. So, important parameters
12+
% are as below:
13+
% 1.
14+
% 'numTrainFiles' = you have to change this based on number of your samples
15+
% in each class. for example if each class has 120 sample, 90 is good
16+
% enough as 90 samples considered for train and others for test.
17+
% 2.
18+
% 'imageInputLayer' = it is size of your image data like [64 64 1]
19+
% 3.
20+
% 'fullyConnectedLayer' = it is number of your classes like (8)
21+
% 4.
22+
% 'MaxEpochs' = the more the better and more computation run time like 40
23+
% 5.
24+
% 'ClusNum' = Fuzzy C Means (FCM) Cluster Number like 3 or 4 is nice
25+
% 6.
26+
% These two are from "BEEFCN.m" function :
27+
% 'Params.MaxIt' = it is iteration number in Bees algorithm. 20 is good
28+
% 'Params.nScoutBee' = it is population number in Bees algorithm. Like 10.
29+
% ------------------------------------------------
30+
% Feel free to contact me if you find any problem using the code:
31+
% Author: SeyedMuhammadHosseinMousavi
32+
% My Email: mosavi.a.i.buali@gmail.com
33+
% My Google Scholar: https://scholar.google.com/citations?user=PtvQvAQAAAAJ&hl=en
34+
% My GitHub: https://github.com/SeyedMuhammadHosseinMousavi?tab=repositories
35+
% My ORCID: https://orcid.org/0000-0001-6906-2152
36+
% My Scopus: https://www.scopus.com/authid/detail.uri?authorId=57193122985
37+
% My MathWorks: https://www.mathworks.com/matlabcentral/profile/authors/9763916#
38+
% my RG: https://www.researchgate.net/profile/Seyed-Mousavi-17
39+
% ------------------------------------------------
40+
% Hope it help you, enjoy the code and wish me luck :)
41+
42+
%% Cleaning
43+
clear;
44+
clc;
45+
warning('off');
46+
47+
%% CNN Deep Neural Network
48+
% Load the deep sample data as an image datastore.
49+
deepDatasetPath = fullfile('CNNDat');
50+
imds = imageDatastore(deepDatasetPath, ...
51+
'IncludeSubfolders',true, ...
52+
'LabelSource','foldernames');
53+
% Divide the data into training and validation data sets
54+
numTrainFiles = 90;
55+
[imdsTrain,imdsValidation] = splitEachLabel(imds,numTrainFiles,'randomize');
56+
% Define the convolutional neural network architecture.
57+
layers = [
58+
% Image Input Layer An imageInputLayer
59+
imageInputLayer([64 64 1])
60+
% Convolutional Layer
61+
convolution2dLayer(3,8,'Padding','same')
62+
% Batch Normalization
63+
batchNormalizationLayer
64+
% ReLU Layer The batch
65+
reluLayer
66+
% Max Pooling Layer
67+
% More values means less weights
68+
maxPooling2dLayer(4,'Stride',4)
69+
%------------------------------
70+
convolution2dLayer(3,8,'Padding','same')
71+
batchNormalizationLayer
72+
reluLayer
73+
maxPooling2dLayer(5,'Stride',5)
74+
convolution2dLayer(3,8,'Padding','same')
75+
batchNormalizationLayer
76+
reluLayer
77+
% Fully Connected Layer (Number of Classes)
78+
fullyConnectedLayer(8)
79+
% Softmax Layer
80+
softmaxLayer
81+
% Classification Layer The final layer
82+
classificationLayer];
83+
% Specify the training options
84+
options = trainingOptions('sgdm', ...
85+
'InitialLearnRate',0.001, ...
86+
'MaxEpochs',20, ...
87+
'Shuffle','every-epoch', ...
88+
'ValidationData',imdsValidation, ...
89+
'ValidationFrequency',8, ...
90+
'Verbose',false, ...
91+
'Plots','training-progress');
92+
% Train the network
93+
[net,info]= trainNetwork(imdsTrain,layers,options);
94+
95+
%% Bees Algorithm Weight Fitting
96+
% Converting Serial Network to an Object
97+
netobj = net.saveobj;
98+
% Extracting Fully Connected Layer's Weights To Evolve
99+
FullConn=netobj.Layers(13, 1).Weights;
100+
netbias=netobj.Layers(13, 1).Bias;
101+
102+
%% Data for Each Weight
103+
sizefinal=size(FullConn);
104+
sizefinal=sizefinal(1,1);
105+
for i=1:sizefinal
106+
Inputs=FullConn(i,:);
107+
Targets=Inputs;
108+
data.Inputs=Inputs;
109+
data.Targets=Targets;
110+
datam{i}=JustLoad(data);
111+
end;
112+
113+
%% Making Basic Fuzzy Model for Each Class Weight
114+
% Fuzzy C Means (FCM) Cluster Number
115+
ClusNum=3;
116+
% Creating Initial Fuzzy Model to Employ for Each Class Weight
117+
for i=1:sizefinal
118+
fism{i}=GenerateFuzzy(datam{i},ClusNum);
119+
end
120+
121+
%% Tarining Bees Algorithm
122+
% Fitting Fully Connected Layer's Weights with Bees Algorithm
123+
for i=1:sizefinal
124+
disp(['Bees Are Working on Weights of Class # (' num2str(i) ')']);
125+
BeesFISm{i}=BEEFCN(fism{i},datam{i});
126+
end;
127+
128+
%% Train Output Extraction
129+
for i=1:sizefinal
130+
TrTar{i}=datam{i}.TrainTargets;
131+
TrInp{i}=datam{i}.TrainInputs;
132+
TrainOutputs{i}=evalfis(TrInp{i},BeesFISm{i});
133+
end;
134+
% Train Errors
135+
for i=1:sizefinal
136+
tmp=datam{i};
137+
tt=tmp.TrainTargets;
138+
tp=TrainOutputs{i};
139+
Errors{i}=tt-tp;
140+
MSE{i}=mean(Errors{i}.^2);
141+
RMSE{i}=sqrt(MSE{i});
142+
error_mean{i}=mean(Errors{i});
143+
error_std{i}=std(Errors{i});
144+
end;
145+
% Convereting Output Cell to Matrix
146+
for i=1:sizefinal
147+
EvolvedFullConn(i,:)=TrainOutputs{i}';
148+
end;
149+
150+
%% Replacing Evolved Weights
151+
netobj.Layers(13, 1).Weights=EvolvedFullConn;
152+
% New Network
153+
Newnet=netobj.Layers;
154+
% Converting Network to Serial Network
155+
BeesNet = assembleNetwork(Newnet);
156+
157+
%% Predict The Labels
158+
% Common CNN Accuracy
159+
YPred = classify(net,imdsValidation);
160+
YValidation = imdsValidation.Labels;
161+
CNNaccuracy = sum(YPred == YValidation)/numel(YValidation);
162+
% Bees CNN Accuracy
163+
YPredbee = classify(BeesNet,imdsValidation);
164+
YValidationbee = imdsValidation.Labels;
165+
Beesaccuracy = sum(YPredbee == YValidationbee)/numel(YValidationbee);
166+
167+
%% Confusion Matrix
168+
figure;
169+
plotconfusion(YPred,YValidation);
170+
title(['CNN Accuracy = ' num2str(CNNaccuracy)]);
171+
figure;
172+
plotconfusion(YPredbee,YValidationbee);
173+
title(['Bees-CNN Accuracy = ' num2str(Beesaccuracy)]);
174+
175+
%% Statistics
176+
fprintf('The CNN Accuracy Is = %0.4f.\n',CNNaccuracy*100)
177+
fprintf('The Bees CNN Accuracy Is = %0.4f.\n',Beesaccuracy*100)
178+

Bees CNN.jpg

650 KB
Loading

CNNDat.rar

1.39 MB
Binary file not shown.

FuzzyCost.m

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
function [z, out]=FuzzyCost(x,fis,data)
2+
MinAbs=1e-5;
3+
if any(abs(x)<MinAbs)
4+
S=(abs(x)<MinAbs);
5+
x(S)=MinAbs.*sign(x(S));
6+
end
7+
p0=GettingFuzzyParameters(fis);
8+
p=x.*p0;
9+
fis=FuzzyParameters(fis,p);
10+
x=data.TrainInputs;
11+
t=data.TrainTargets;
12+
y=evalfis(x,fis);
13+
e=t-y;
14+
MSE=mean(e(:).^2);
15+
RMSE=sqrt(MSE);
16+
z=RMSE;
17+
out.fis=fis;
18+
out.MSE=MSE;
19+
out.RMSE=RMSE;
20+
21+
end

FuzzyParameters.m

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
function fis=FuzzyParameters(fis,p)
2+
nInput=numel(fis.input);
3+
for i=1:nInput
4+
nMF=numel(fis.input(i).mf);
5+
for j=1:nMF
6+
k=numel(fis.input(i).mf(j).params);
7+
fis.input(i).mf(j).params=p(1:k);
8+
p(1:k)=[];
9+
end
10+
end
11+
nOutput=numel(fis.output);
12+
for i=1:nOutput
13+
nMF=numel(fis.output(i).mf);
14+
for j=1:nMF
15+
k=numel(fis.output(i).mf(j).params);
16+
fis.output(i).mf(j).params=p(1:k);
17+
p(1:k)=[];
18+
end
19+
end
20+
end

GenerateFuzzy.m

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
2+
function fis=GenerateFuzzy(data,nCluster)
3+
if ~exist('nCluster','var')
4+
nCluster='auto';
5+
end
6+
x=data.TrainInputs;
7+
t=data.TrainTargets;
8+
% Important Params
9+
fcm_U=2;
10+
fcm_MaxIter=100;
11+
fcm_MinImp=1e-5;
12+
%
13+
fcm_Display=false;
14+
fcm_options=[fcm_U fcm_MaxIter fcm_MinImp fcm_Display];
15+
fis=genfis3(x,t,'sugeno',nCluster,fcm_options);
16+
end

GettingFuzzyParameters.m

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
function p=GettingFuzzyParameters(fis)
2+
p=[];
3+
nInput=numel(fis.input);
4+
for i=1:nInput
5+
nMF=numel(fis.input(i).mf);
6+
for j=1:nMF
7+
p=[p fis.input(i).mf(j).params];
8+
end
9+
end
10+
nOutput=numel(fis.output);
11+
for i=1:nOutput
12+
nMF=numel(fis.output(i).mf);
13+
for j=1:nMF
14+
p=[p fis.output(i).mf(j).params];
15+
end
16+
end
17+
end

0 commit comments

Comments
 (0)