-
Sebastien GOYON authoredSebastien GOYON authored
SMAgent.java 10.17 KiB
package amakaque;
import java.util.ArrayList;
import java.util.List;
import baseOptiAgent.BaseAgent;
import eval.Eval;
import mas.core.Agent;
public class SMAgent extends BaseAgent {
private SMEnv env;
/*
* Is local leader/ is global leader
* */
private boolean isLL;
private boolean isGL;
private int currentGroup;
private Double pr;
private Phase phase;
// LLP
private List<Double> randomLocalMember;
private List<Double> localLeader;
// GLP
// private List<Double> randomLocalMember;
private double maxFitness;
private List<Double> globalLeader;
private int groupSize;
private int count;
private boolean allDone;
// GLL
private SMAgent nextGL;
private boolean isNewGL;
//LLL
private SMAgent nextLL;
private boolean isNewLL;
//LLD
// private List<Double> globalLeader;
// private List<Double> localLeader;
private int localLimit;
private int localLimitCount;
//GLD
private int globalLimit;
private int globalLimitCount;
private int numberOfGroups;
private int numberOfGroupsLimit;
/*
* Init the SM with random starting point within the env dimension
*/
public SMAgent(int _id, double _pr, SMEnv _env, Eval _eval) {
env = _env;
eval = _eval;
id = _id;
if (id == 0) {
isLL = true;
isGL = true;
}
else {
isLL = false;
isGL = false;
}
currentGroup = 0;
vector = generateRandomVector();
evaluate = evaluate(vector);
fitness = fitness(evaluate);
pr = _pr;
phase = Phase.LOCAL_LEADER_PHASE;
}
@Override
public void perceive() {
if (phase == Phase.LOCAL_LEADER_PHASE) {
localLeader = env.getLocalLeader(currentGroup);
randomLocalMember = env.getRandomGroupMember(currentGroup, id);
}
else if(phase == Phase.GLOBAL_LEADER_PHASE){
randomLocalMember = env.getRandomGroupMember(currentGroup, id);
globalLeader = env.getGlobalLeader();
groupSize = env.getGroupSize(currentGroup);
maxFitness = env.getBestFitness();
count = env.getCount(currentGroup);
allDone = env.allDone();
}
else if(phase == Phase.GLOBAL_LEADER_LEARNING) {
if (isGL) {
nextGL = env.findNextGL();
}
this.isNewGL = false;
}
else if(phase == Phase.LOCAL_LEADER_LEARNING) {
if (isLL) {
nextLL = env.findNextLL(currentGroup);
}
isNewLL = false;
}
else if(phase == Phase.LOCAL_LEADER_DECISION) {
globalLeader = env.getGlobalLeader();
localLeader = env.getLocalLeader(currentGroup);
localLimit = env.getLocalLimit();
localLimitCount = env.getLocalLimitCount(currentGroup);
}
else if(phase == Phase.GLOBAL_LEADER_DECISION) {
if (localLimitCount > localLimit) {
if (isLL) {
env.resetLocalCount(currentGroup);
}
}
globalLimit = env.getGlobalLimit();
globalLimitCount = env.getGlobalLimitCount();
numberOfGroups = env.getNbrGroup();
numberOfGroupsLimit = env.getGroupLimit();
}
}
private void nextPhase() {
if (phase == Phase.LOCAL_LEADER_PHASE) {
phase = Phase.GLOBAL_LEADER_PHASE;
return;
}
else if(phase == Phase.GLOBAL_LEADER_PHASE){
phase = Phase.GLOBAL_LEADER_LEARNING;
return;
}
else if(phase == Phase.GLOBAL_LEADER_LEARNING){
phase = Phase.LOCAL_LEADER_LEARNING;
return;
}
else if(phase == Phase.LOCAL_LEADER_LEARNING){
phase = Phase.LOCAL_LEADER_DECISION;
return;
}
else if(phase == Phase.LOCAL_LEADER_DECISION){
phase = Phase.GLOBAL_LEADER_DECISION;
return;
}
else if(phase == Phase.GLOBAL_LEADER_DECISION){
phase = Phase.LOCAL_LEADER_PHASE;
return;
}
}
private void localLeaderPhase() {
/*
* For finding the objective (Food Source),
* generate the new positions for all the group members by using self experience,
* local leader experience and group members experience
*/
List<Double> newValues = new ArrayList<Double>();
for (int i = 0; i < vector.size(); i++) {
Double currentValue = vector.get(i);
if (Math.random() >= pr) {
double value = currentValue
+ Math.random() * (localLeader.get(i) - currentValue)
+ 2 * (Math.random() - 0.5) * (randomLocalMember.get(i) - currentValue);
if (value > eval.getMax(i)) {
value = eval.getMax(i);
}
else if (value < eval.getMin(i)) {
value = eval.getMin(i);
}
newValues.add(value);
}
else {
newValues.add(currentValue);
}
}
/*
* Apply the greedy selection process between existing position and newly generated position,
* based on fitness and select the better one;
*/
compareAndUpdate(newValues);
}
private void globalLeaderPhase() {
/*
* Calculate the probability prob for all the group members using equation
*/
double prob = 0.9 * fitness/maxFitness + 0.1;
/*
* Produce new positions for the all the group members, selected by probi ,
* by using self experience, global leader experience and group members experiences.
*/
if (count < groupSize) {
if (Math.random() < prob) {
env.count(currentGroup);
int j = (int)(Math.random() * vector.size());
List<Double> newValues = new ArrayList<Double>(vector);
Double currentValue = vector.get(j);
double value = currentValue
+ Math.random() * (globalLeader.get(j) - currentValue)
+ (2*Math.random() - 1) * (randomLocalMember.get(j) - currentValue);
if (value > eval.getMax(j)) {
value = eval.getMax(j);
}
else if (value < eval.getMin(j)) {
value = eval.getMin(j);
}
newValues.set(j,value);
compareAndUpdate(newValues);
}
}
}
private void globalLeaderLearning() {
/*
* In this phase, the position of the global leader is updated by applying the greedy selection in the population i.e.,
* the position of the SM having best fitness in the population is selected as the updated position of the global leader.
* Further, it is checked that the position of global leader is updating or not and if not then the GlobalLimitCount is incremented by 1.
*/
if (isGL && ! this.isNewGL ) {
if (nextGL.equals(this)) {
env.addGlobalLimitCount();
}
else {
nextGL.becomeGL();
isGL = false;
}
}
}
private void localLeaderLearning() {
/*
* In this phase, the position of the local leader is updated by applying the greedy selection in that group i.e.,
* the position of the SM having best fitness in that group is selected as the updated position of the local leader.
* Next, the updated position of the local leader is compared with the old one
* and if the local leader is not updated then the LocalLimitCount is incremented by 1.
*/
if (isLL && ! this.isNewLL ) {
if (nextLL.equals(this)) {
env.addLocalLimit(currentGroup);
}
else {
nextLL.becomeLL();
isLL = false;
}
}
}
private void localLeaderDecision() {
/*
* If any Local group leader is not updating her position after a specified number of times (LocalLeaderLimit)
* then re-direct all members of that particular group for foraging by algorithm
*/
if (localLimitCount > localLimit) {
List<Double> newValues = new ArrayList<Double>();
for (int j = 0; j < vector.size(); j++) {
Double currentValue = vector.get(j);
if (Math.random() >= pr) {
newValues.add(eval.getMin(j) + Math.random() * (eval.getMax(j) - eval.getMin(j)));
}
else {
newValues.add(
currentValue
+ Math.random() * (globalLeader.get(j) - currentValue)
+ Math.random() * (currentValue - localLeader.get(j))
);
}
}
vector = newValues;
evaluate = evaluate(vector);
fitness = fitness(evaluate);
}
}
private void globalLeaderDecision() {
/*
* If Global Leader is not updating her position for a specified number of times (GlobalLeaderLimit)
* then she divides the group into smaller groups by algorithm
*/
if (isGL) {
if (globalLimitCount > globalLimit) {
env.resetGlobalCount();
if (numberOfGroups < numberOfGroupsLimit) {
env.combineAllGroups();
env.splitInNGroups(numberOfGroups+1);
for (int i = 0; i < numberOfGroups+1; i++ ) {
env.findNextLL(i).becomeLL();
}
}
else {
env.combineAllGroups();
env.findNextLL(0).becomeLL();
}
}
}
}
@Override
public void act() {
if (phase == Phase.LOCAL_LEADER_PHASE) {
localLeaderPhase();
nextPhase();
if (isLL) { // local leader reset his group count before next phase
env.resetGroupCount(currentGroup);
}
}
else if(phase == Phase.GLOBAL_LEADER_PHASE){
if (allDone) {
nextPhase();
}
else {
globalLeaderPhase();
}
}
else if(phase == Phase.GLOBAL_LEADER_LEARNING) {
globalLeaderLearning();
nextPhase();
}
else if(phase == Phase.LOCAL_LEADER_LEARNING) {
localLeaderLearning();
nextPhase();
}
else if(phase == Phase.LOCAL_LEADER_DECISION) {
localLeaderDecision();
nextPhase();
}
else if(phase == Phase.GLOBAL_LEADER_DECISION) {
globalLeaderDecision();
nextPhase();
}
}
public boolean isLL() {
return isLL;
}
public boolean isGL() {
return isGL;
}
public void becomeGL() {
this.isGL = true;
this.isNewGL = true;
}
public void becomeLL() {
isLL = true;
isNewLL = true;
}
public void giveLocalLeadership() {
isLL = false;
}
public void enterNewGroup(int group) {
currentGroup = group;
}
public Phase getPhase() {
return phase;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SMAgent)) return false;
SMAgent SMobj = (SMAgent) obj;
return this.id == SMobj.id;
}
}