%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%Gaining-Sharing Knowledge Based Algorithm for Solving Optimization
%%Problems: A Novel Nature-Inspired Algorithm
%% Authors: Ali Wagdy Mohamed, Anas A. Hadi , Ali Khater Mohamed
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
clc;
clear all;
format long;
Alg_Name='GSK';
n_problems=30;
ConvDisp=1;
Run_No=51;
for problem_size = [10 30 50 100]
max_nfes = 10000 * problem_size;
rand('seed', sum(100 * clock));
val_2_reach = 10^(-8);
max_region = 100.0;
min_region = -100.0;
lu = [-100 * ones(1, problem_size); 100 * ones(1, problem_size)];
fhd=@cec17_func;
analysis= zeros(30,6);
for func = 1 : n_problems
optimum = func * 100.0;
%% Record the best results
outcome = [];
fprintf('\n-------------------------------------------------------\n')
fprintf('Function = %d, Dimension size = %d\n', func, problem_size)
dim1=[];
dim2=[];
for run_id = 1 : Run_No
bsf_error_val=[];
run_funcvals = [];
pop_size = 100;
G_Max=fix(max_nfes/pop_size);
%% Initialize the main population
popold = repmat(lu(1, :), pop_size, 1) + rand(pop_size, problem_size) .* (repmat(lu(2, :) - lu(1, :), pop_size, 1));
pop = popold; % the old population becomes the current population
fitness = feval(fhd,pop',func);
fitness = fitness';
nfes = 0;
bsf_fit_var = 1e+300;
%%%%%%%%%%%%%%%%%%%%%%%% for out
for i = 1 : pop_size
nfes = nfes + 1;
%% if nfes > max_nfes; exit(1); end
if nfes > max_nfes; break; end
if fitness(i) < bsf_fit_var
bsf_fit_var = fitness(i);
end
run_funcvals = [run_funcvals;bsf_fit_var];
end
%%%%%%%%%%%%%%%%%%%%%%%% Parameter settings%%%%%%%%%%
KF=0.5;% Knowledge Factor
KR=0.9;%Knowledge Ratio
K=10*ones(pop_size,1);%Knowledge Rate
g=0;
%% main loop
while nfes < max_nfes
g=g+1;
D_Gained_Shared_Junior=ceil((problem_size)*(1-g/G_Max).^K);
D_Gained_Shared_Senior=problem_size-D_Gained_Shared_Junior;
pop = popold; % the old population becomes the current population
[valBest, indBest] = sort(fitness, 'ascend');
[Rg1, Rg2, Rg3] = Gained_Shared_Junior_R1R2R3(indBest);
[R1, R2, R3] = Gained_Shared_Senior_R1R2R3(indBest);
R01=1:pop_size;
Gained_Shared_Junior=zeros(pop_size, problem_size);
ind1=fitness(R01)>fitness(Rg3);
if(sum(ind1)>0)
Gained_Shared_Junior (ind1,:)= pop(ind1,:) + KF*ones(sum(ind1), problem_size) .* (pop(Rg1(ind1),:) - pop(Rg2(ind1),:)+pop(Rg3(ind1), :)-pop(ind1,:)) ;
end
ind1=~ind1;
if(sum(ind1)>0)
Gained_Shared_Junior(ind1,:) = pop(ind1,:) + KF*ones(sum(ind1), problem_size) .* (pop(Rg1(ind1),:) - pop(Rg2(ind1),:)+pop(ind1,:)-pop(Rg3(ind1), :)) ;
end
R0=1:pop_size;
Gained_Shared_Senior=zeros(pop_size, problem_size);
ind=fitness(R0)>fitness(R2);
if(sum(ind)>0)
Gained_Shared_Senior(ind,:) = pop(ind,:) + KF*ones(sum(ind), problem_size) .* (pop(R1(ind),:) - pop(ind,:) + pop(R2(ind),:) - pop(R3(ind), :)) ;
end
ind=~ind;
if(sum(ind)>0)
Gained_Shared_Senior(ind,:) = pop(ind,:) + KF*ones(sum(ind), problem_size) .* (pop(R1(ind),:) - pop(R2(ind),:) + pop(ind,:) - pop(R3(ind), :)) ;
end
Gained_Shared_Junior = boundConstraint(Gained_Shared_Junior, pop, lu);
Gained_Shared_Senior = boundConstraint(Gained_Shared_Senior, pop, lu);
D_Gained_Shared_Junior_mask=rand(pop_size, problem_size)<=(D_Gained_Shared_Junior(:, ones(1, problem_size))./problem_size);
D_Gained_Shared_Senior_mask=~D_Gained_Shared_Junior_mask;
D_Gained_Shared_Junior_rand_mask=rand(pop_size, problem_size)<=KR*ones(pop_size, problem_size);
D_Gained_Shared_Junior_mask=and(D_Gained_Shared_Junior_mask,D_Gained_Shared_Junior_rand_mask);
D_Gained_Shared_Senior_rand_mask=rand(pop_size, problem_size)<=KR*ones(pop_size, problem_size);
D_Gained_Shared_Senior_mask=and(D_Gained_Shared_Senior_mask,D_Gained_Shared_Senior_rand_mask);
ui=pop;
ui(D_Gained_Shared_Junior_mask) = Gained_Shared_Junior(D_Gained_Shared_Junior_mask);
ui(D_Gained_Shared_Senior_mask) = Gained_Shared_Senior(D_Gained_Shared_Senior_mask);
children_fitness = feval(fhd, ui', func);
children_fitness = children_fitness';
for i = 1 : pop_size
nfes = nfes + 1;
if nfes > max_nfes; break; end
if children_fitness(i) < bsf_fit_var
bsf_fit_var = children_fitness(i);
bsf_solution = ui(i, :);
end
run_funcvals = [run_funcvals;bsf_fit_var];
end
[fitness, Child_is_better_index] = min([fitness, children_fitness], [], 2);
popold = pop;
popold(Child_is_better_index == 2, :) = ui(Child_is_better_index == 2, :);
% fprintf('NFES:%d, bsf_fit:%1.6e,pop_Size:%d,D_Gained_Shared_Junior:%2.2e,D_Gained_Shared_Senior:%2.2e\n', nfes,bsf_fit_var,pop_size,problem_size*sum(sum(D_Gained_Shared_Junior))/(pop_size*problem_size),problem_size*sum(sum(D_Gained_Shared_Senior))/(pop_size*problem_size))
end % end while loop
bsf_error_val = bsf_fit_var - optimum;
if bsf_error_val < val_2_reach
bsf_error_val = 0;
end
fprintf('%d th run, best-so-far error value = %1.8e\n', run_id , bsf_error_val)
outcome = [outcome bsf_error_val];
%% plot convergence figures
if (ConvDisp)
run_funcvals=run_funcvals-optimum;
run_funcvals=run_funcvals';
dim1(run_id,:)=1:length(run_funcvals);
dim2(run_id,:)=log10(run_funcvals);
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%
end %% end 1 run
%% save ststiatical output in analysis file%%%%
analysis(func,1)=min(outcome);
analysis(func,2)=median(outcome);
analysis(func,3)=max(outcome);
analysis(func,4)=mean(outcome);
analysis(func,5)=std(outcome);
median_figure=find(outcome== median(outcome));
analysis(func,6)=median_figure(1);
file_name=sprintf('Results\\%s_CEC2017_Problem#%s_problem_size#%s',Alg_Name,int2str(func),int2str(problem_size));
save(file_name,'outcome');
%% print statistical output and save convergence figures%%%
fprintf('%e\n',min(outcome));
fprintf('%e\n',median(outcome));
fprintf('%e\n',mean(outcome));
fprintf('%e\n',max(outcome));
fprintf('%e\n',std(outcome));
dim11=dim1(median_figure,:);
dim22=dim2(median_figure,:);
file_name=sprintf('Figures\\Figure_Problem#%s_Run#%s',int2str(func),int2str(median_figure));
save(file_name,'dim1','dim2');
end %% end 1 function run
file_name=sprintf('Results\\analysis_%s_CEC2017_problem_size#%s',Alg_Name,int2str(problem_size));
save(file_name,'analysis');
end %% end all function runs in all dimensions