%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % % Set-based stochastic optimization for % evolving optimum % instinct-learning balance % % By: Kevin Passino % Version: 5/5/01 % % Note: This code can take a relatively long % time to run (of course depending on % what computer you are using). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% clear all % Initialize memory S=100; % Set the size of the population NL=200; % Set the number of steps in the lifetime of each organism % in the population time=1:NL; % Time steps during a lifetime Ng=200; % Set the number of generations timeepoch=1:Ng; % Set environmental characteristics y=zeros(NL,Ng); % Initialize/allocate memory for sensed variable from environment z=zeros(NL,Ng); % Initialize sigmaz2=0.5*ones(Ng); % Initialize the variance in the variable we want to estimate (could % change during a lifetime or over generation) xmin=0; % Set limits on size of x values (assume that same amount above, below 2 xmax=4; xbar=2; % For case where have an unknown constant in the environment that want to estimate x=xbar*ones(NL,Ng); % Set variable want to estimate as a constant xhat=zeros(NL,Ng); %Ndivxhat0=40; % Set the number of points on a xbar=xhat0 axis %xhat0=linspace(xmin,xmax,Ndivxhat0); % Initialize the estimator initial conditions xhat0=zeros(S,Ng); xhat0(:,1)=ones(S,1); % Sets initial condition (as if all individuals are the same, with % rather poor instincts) Ndivn=20; % Number of divisions on n space, same as the max value considered for n %n=1:Ndivn; nmax=20; nmin=1; n=zeros(S,Ng); n(:,1)=ones(S,1); % Set intial conditions on memory (so only can hold one value) % Initialize cost function (fitness function, but where we interpret it % in a different way compared to the fitness function used for genetic % algorithm). J=zeros(S,Ng); % Set weighting parameters for cost function % A choice to illustrate some concepts: w1=0.01; w2=1; w3=0.05; % Parameters for the set-based opt. method beta=0.01; % Sets variance on cloud of points along xhat0 dimension % using a normal distribution with zero mean ndev=1; % Set the max deviation on cloud for n (up and down) (set to a pos integer) pm=0.1; % Mutation probability %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Start loop for generation of data for response surfaces %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% for ell=1:Ng % Loop for testing multiple times... % Have the environment suddenly change at ell=100 such that there is more uncertainty if ell>=100, sigmaz2(ell)=0.75; end % Evaluate the cost for each individual for i=1:S % Simulate an individual: clear Y % Dimension changes on the regressor vector so clear it each time it changes Y=xhat0(i,ell)*ones(n(i,ell),1); % Set initial condition % Run estimator for lifetime of animal for k=1:NL % Generate sensed signals, including initial values that are instincts z(k,ell)=sigmaz2(ell)*randn; % Generate noise y(k,ell)=x(k,ell)+z(k,ell); % Generate signal that is sensed Y=[y(k,ell); Y(1:(n(i,ell)-1),1)]; % Shift regression vector, load in new value xhat(k,ell)=mean(Y); end J(i,ell)=w1*n(i,ell)... % Cost of storage +w2*(1/NL)*(x(:,ell)-xhat(:,ell))'*(x(:,ell)-xhat(:,ell))... % Quality of estimation +w3*exp(-(x(1,ell)-xhat0(i,ell))^2/((0.1)^2)); % Models cost of instinct end % End i loop for individuals... S [Jbest(ell),bestone(ell)]=min(J(:,ell)); % Find the best point in the cloud nbest(ell)=n(bestone(ell),ell); % Store for plotting xhat0best(ell)=xhat(bestone(ell),ell); % Generate next generation of parameters: xhat0(bestone(ell),ell+1)=xhat0(bestone(ell),ell); % Use elitism - keep the best one n(bestone(ell),ell+1)=n(bestone(ell),ell); % Use elitism - keep the best one % Create a cloud of points around the best one for ss=1:S if ss ~= bestone(ell) xhat0(ss,ell+1)=xhat0(bestone(ell),ell)+beta*randn; % Perturb points on xhat0 dimension n(ss,ell+1)=n(bestone(ell),ell)+round((1-2*rand)*ndev); % The last term generates a % random integer between % -ndev and +ndev if xhat0(ss,ell+1)xmax, xhat0(ss,ell+1)=xmax; end if n(ss,ell+1)nmax, n(ss,ell+1)=nmax; end end % End if loop end % End ss loop % Next place a mutant, do not mutate the best one if pm>rand, % Replace either the first or last member, provided not the best one nvec=randperm(Ndivn); if bestone(ell) ~= 1 xhat0(1,ell+1)=(xmax-xmin)*rand-((xmax-xmin)/2); % Generates a random parameter in range n(1,ell+1)=nvec(1); % Generates a random parameter in range 1,2,...,Ndivn else % So, the bestone is the first one so replace the last xhat0(S,ell+1)=(xmax-xmin)*rand-((xmax-xmin)/2); n(S,ell+1)=nvec(1); end end end % End ell loop %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Plot %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% figure(1) clf plot(timeepoch,Jbest,'k-') zoom grid on title('Cost function J (fitness function) for best estimator') xlabel('Generation') earlyavg=mean(xhat0best(1:100)) lateavg=mean(xhat0best(101:200)) figure(2) clf plot(timeepoch,xhat0best,'k-') zoom grid on hold on plot(timeepoch,[earlyavg*ones(size(xhat0best(1:100))) lateavg*ones(size(xhat0best(101:200)))],'r--') title('Initial condition (solid), average of first and last 100 generations (dashed)') xlabel('Generation') figure(3) clf stairs(timeepoch,nbest,'k-') zoom grid on title('n') xlabel('Generation') %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % End of program %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%