Skip to content
Snippets Groups Projects
Commit c9529981 authored by Sebastien GOYON's avatar Sebastien GOYON
Browse files

Sm agent now also use BaseAgent

parent a499b2e4
Branches
No related tags found
No related merge requests found
package amakaque;
import java.util.ArrayList;
import java.util.List;
import eval.Eval;
import eval.fun.Rastrigin;
import eval.fun.SchwefelFunction;
import eval.fun.SchwefelFunction1_2;
import eval.fun.StepFunction;
import mas.core.Schedulable;
public class MainAmakaque {
public static void main(String[] args) {
// [PARAM]
int nbrAgent = 50;
// [0.1 : 0.9]
double pr = 0.1;
//
int maxGroup = 5;
int localLimit = 1500;
int globalLimit = 50;
int res = 0;
int nbrTry = 100;
long startTime = System.currentTimeMillis();
for (int i = 0; i < nbrTry; i++) {
res += findSolution(nbrAgent, pr, maxGroup, localLimit, globalLimit);
// System.out.println("run : "+i);
}
long estimatedTime = System.currentTimeMillis() - startTime;
System.out.println("Time : "+estimatedTime/1000+ "s "+estimatedTime%1000+"ms");
// System.out.println(res);
System.out.println("Average cycle : "+(double)res/(double)nbrTry);
// ******************** SCHEDULER ************************
/*
res = 0;
startTime = System.currentTimeMillis();
for (int i = 0; i < nbrTry; i++) {
res += findSolution2(nbrAgent, pr, maxGroup, localLimit, globalLimit);
// System.out.println("run : "+i);
}
estimatedTime = System.currentTimeMillis() - startTime;
System.out.println("Time : "+estimatedTime/1000+ "s "+estimatedTime%1000+"ms");
// System.out.println(res);
System.out.println("Average cycle : "+(double)res/(double)nbrTry);
*/
}
private static int findSolution(
int nbrAgent,
double pr,
int maxGroup,
int localLimit,
int globalLimit
) {
// [INIT]
Eval eval = new StepFunction();
//Eval eval = new SchwefelFunction1_2();
//Eval eval = new SchwefelFunction();
//Eval eval = new Rastrigin();
Env env = new Env(localLimit, globalLimit, maxGroup, eval);
List<SMAgent> agents = new ArrayList<SMAgent>();
for (int i = 0; i < nbrAgent; i++) {
agents.add(new SMAgent(i, pr, env, eval));
}
Group group = new Group();
group.addAgents(agents);
env.initGroup(group);
// [RUN]
int cycle = 0;
while(Math.abs(eval.evaluate(env.getGlobalLeader())-eval.getObjective())>eval.getErrorDelta()) {
do {
for (SMAgent agent : agents) {
agent.perceive();
}
for (SMAgent agent : agents) {
agent.decide();
agent.act();
}
cycle++;
}while (agents.get(0).getPhase() != Phase.LOCAL_LEADER_PHASE);
//cycle ++;
//System.out.println("Cycle : "+cycle+" Value : "+eval.evaluate(env.getGlobalLeader()));
//System.out.println(env.getGlobalLeader());
/*
// Pas à Pas
try {
System.in.read();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
*/
}
//System.out.println("Best solution : "+env.getGlobalLeader());
//System.out.println("That evaluate to : "+eval.evaluate(env.getGlobalLeader()));
//System.out.println("Eval : "+eval.getCount());
return cycle;
}
private static int findSolution2(
int nbrAgent,
double pr,
int maxGroup,
int localLimit,
int globalLimit) {
// [INIT]
Eval eval = new StepFunction();
//Eval eval = new SchwefelFunction1_2();
//Eval eval = new SchwefelFunction();
//Eval eval = new Rastrigin();
Env env = new Env(localLimit, globalLimit, maxGroup, eval);
List<SMAgent> agents = new ArrayList<SMAgent>();
for (int i = 0; i < nbrAgent; i++) {
agents.add(new SMAgent(i, pr, env, eval));
}
Group group = new Group();
group.addAgents(agents);
env.initGroup(group);
// [RUN]
Scheduler scheduler = new Scheduler(env, eval, agents.toArray(new SMAgent[agents.size()]));
scheduler.start();
scheduler.waitUntilFinish();
return scheduler.getNbOfCycles();
}
}
...@@ -3,12 +3,13 @@ package amakaque; ...@@ -3,12 +3,13 @@ package amakaque;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import baseOptiAgent.BaseAgent;
import eval.Eval; import eval.Eval;
import mas.core.Agent; import mas.core.Agent;
public class SMAgent extends Agent { public class SMAgent extends BaseAgent {
private Env env; private SMEnv env;
/* /*
* Is local leader/ is global leader * Is local leader/ is global leader
...@@ -17,17 +18,11 @@ public class SMAgent extends Agent { ...@@ -17,17 +18,11 @@ public class SMAgent extends Agent {
private boolean isGL; private boolean isGL;
private int currentGroup; private int currentGroup;
private int id;
private List<Double> dimVector;
private double fitness;
private Double pr; private Double pr;
private Phase phase; private Phase phase;
private Eval eval;
// LLP // LLP
private List<Double> randomLocalMember; private List<Double> randomLocalMember;
private List<Double> localLeader; private List<Double> localLeader;
...@@ -63,7 +58,7 @@ public class SMAgent extends Agent { ...@@ -63,7 +58,7 @@ public class SMAgent extends Agent {
/* /*
* Init the SM with random starting point within the env dimension * Init the SM with random starting point within the env dimension
*/ */
public SMAgent(int _id, double _pr, Env _env, Eval _eval) { public SMAgent(int _id, double _pr, SMEnv _env, Eval _eval) {
env = _env; env = _env;
eval = _eval; eval = _eval;
id = _id; id = _id;
...@@ -77,13 +72,9 @@ public class SMAgent extends Agent { ...@@ -77,13 +72,9 @@ public class SMAgent extends Agent {
} }
currentGroup = 0; currentGroup = 0;
dimVector = new ArrayList<Double>(); vector = generateRandomVector();
evaluate = evaluate(vector);
for (int i = 0; i < eval.getDim(); i++) { fitness = fitness(evaluate);
dimVector.add(eval.getMin(i) + Math.random() * (eval.getMax(i) - eval.getMin(i)));
}
fitness = fitness(dimVector);
pr = _pr; pr = _pr;
phase = Phase.LOCAL_LEADER_PHASE; phase = Phase.LOCAL_LEADER_PHASE;
...@@ -102,7 +93,7 @@ public class SMAgent extends Agent { ...@@ -102,7 +93,7 @@ public class SMAgent extends Agent {
randomLocalMember = env.getRandomGroupMember(currentGroup, id); randomLocalMember = env.getRandomGroupMember(currentGroup, id);
globalLeader = env.getGlobalLeader(); globalLeader = env.getGlobalLeader();
groupSize = env.getGroupSize(currentGroup); groupSize = env.getGroupSize(currentGroup);
maxFitness = env.getMaxFitness(); maxFitness = env.getBestFitness();
count = env.getCount(currentGroup); count = env.getCount(currentGroup);
allDone = env.allDone(); allDone = env.allDone();
} }
...@@ -137,16 +128,6 @@ public class SMAgent extends Agent { ...@@ -137,16 +128,6 @@ public class SMAgent extends Agent {
} }
} }
private double fitness(List<Double> values) {
double fun = eval.evaluate(values);
if (fun >= 0) {
return 1/(1 + fun);
}
else {
return 1 + Math.abs(fun);
}
}
private void nextPhase() { private void nextPhase() {
if (phase == Phase.LOCAL_LEADER_PHASE) { if (phase == Phase.LOCAL_LEADER_PHASE) {
...@@ -184,9 +165,9 @@ public class SMAgent extends Agent { ...@@ -184,9 +165,9 @@ public class SMAgent extends Agent {
List<Double> newValues = new ArrayList<Double>(); List<Double> newValues = new ArrayList<Double>();
for (int i = 0; i < dimVector.size(); i++) { for (int i = 0; i < vector.size(); i++) {
Double currentValue = dimVector.get(i); Double currentValue = vector.get(i);
if (Math.random() >= pr) { if (Math.random() >= pr) {
double value = currentValue double value = currentValue
...@@ -209,11 +190,7 @@ public class SMAgent extends Agent { ...@@ -209,11 +190,7 @@ public class SMAgent extends Agent {
* Apply the greedy selection process between existing position and newly generated position, * Apply the greedy selection process between existing position and newly generated position,
* based on fitness and select the better one; * based on fitness and select the better one;
*/ */
double new_fitness = fitness(newValues); compareAndUpdate(newValues);
if (fitness<new_fitness) {
dimVector = newValues;
fitness = new_fitness;
}
} }
private void globalLeaderPhase() { private void globalLeaderPhase() {
...@@ -230,10 +207,10 @@ public class SMAgent extends Agent { ...@@ -230,10 +207,10 @@ public class SMAgent extends Agent {
if (count < groupSize) { if (count < groupSize) {
if (Math.random() < prob) { if (Math.random() < prob) {
env.count(currentGroup); env.count(currentGroup);
int j = (int)(Math.random() * dimVector.size()); int j = (int)(Math.random() * vector.size());
List<Double> newValues = new ArrayList<Double>(dimVector); List<Double> newValues = new ArrayList<Double>(vector);
Double currentValue = dimVector.get(j); Double currentValue = vector.get(j);
double value = currentValue double value = currentValue
+ Math.random() * (globalLeader.get(j) - currentValue) + Math.random() * (globalLeader.get(j) - currentValue)
+ (2*Math.random() - 1) * (randomLocalMember.get(j) - currentValue); + (2*Math.random() - 1) * (randomLocalMember.get(j) - currentValue);
...@@ -246,11 +223,7 @@ public class SMAgent extends Agent { ...@@ -246,11 +223,7 @@ public class SMAgent extends Agent {
newValues.set(j,value); newValues.set(j,value);
double new_fitness = fitness(newValues); compareAndUpdate(newValues);
if (fitness<new_fitness) {
dimVector = newValues;
fitness = new_fitness;
}
} }
} }
} }
...@@ -300,9 +273,9 @@ public class SMAgent extends Agent { ...@@ -300,9 +273,9 @@ public class SMAgent extends Agent {
if (localLimitCount > localLimit) { if (localLimitCount > localLimit) {
List<Double> newValues = new ArrayList<Double>(); List<Double> newValues = new ArrayList<Double>();
for (int j = 0; j < dimVector.size(); j++) { for (int j = 0; j < vector.size(); j++) {
Double currentValue = dimVector.get(j); Double currentValue = vector.get(j);
if (Math.random() >= pr) { if (Math.random() >= pr) {
newValues.add(eval.getMin(j) + Math.random() * (eval.getMax(j) - eval.getMin(j))); newValues.add(eval.getMin(j) + Math.random() * (eval.getMax(j) - eval.getMin(j)));
...@@ -316,8 +289,9 @@ public class SMAgent extends Agent { ...@@ -316,8 +289,9 @@ public class SMAgent extends Agent {
} }
} }
dimVector = newValues; vector = newValues;
fitness = fitness(dimVector); evaluate = evaluate(vector);
fitness = fitness(evaluate);
} }
} }
...@@ -391,14 +365,6 @@ public class SMAgent extends Agent { ...@@ -391,14 +365,6 @@ public class SMAgent extends Agent {
} }
} }
public List<Double> getDimVector() {
return dimVector;
}
public int getId() {
return id;
}
public boolean isLL() { public boolean isLL() {
return isLL; return isLL;
} }
...@@ -429,10 +395,6 @@ public class SMAgent extends Agent { ...@@ -429,10 +395,6 @@ public class SMAgent extends Agent {
return phase; return phase;
} }
public double getFitness() {
return fitness;
}
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (!(obj instanceof SMAgent)) return false; if (!(obj instanceof SMAgent)) return false;
......
...@@ -4,9 +4,10 @@ import java.util.ArrayList; ...@@ -4,9 +4,10 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import baseOptiAgent.Env;
import eval.Eval; import eval.Eval;
public class Env { public class SMEnv extends Env{
private int globalLimitCount; private int globalLimitCount;
...@@ -15,9 +16,7 @@ public class Env { ...@@ -15,9 +16,7 @@ public class Env {
private int groupLimit; private int groupLimit;
private List<Group> groups; private List<Group> groups;
private Eval eval; public SMEnv(int _localLimit, int _globalLimit, int _groupLimit, Eval _eval) {
public Env(int _localLimit, int _globalLimit, int _groupLimit, Eval _eval) {
globalLimitCount = 0; globalLimitCount = 0;
eval = _eval; eval = _eval;
...@@ -71,7 +70,7 @@ public class Env { ...@@ -71,7 +70,7 @@ public class Env {
randomagent = group.getAgents().get(rand.nextInt(group.getAgents().size())); randomagent = group.getAgents().get(rand.nextInt(group.getAgents().size()));
} }
return randomagent.getDimVector(); return randomagent.getVector();
} }
public List<Double> getLocalLeader(int i_group){ public List<Double> getLocalLeader(int i_group){
...@@ -80,13 +79,14 @@ public class Env { ...@@ -80,13 +79,14 @@ public class Env {
List<SMAgent> agents = group.getAgents(); List<SMAgent> agents = group.getAgents();
for (SMAgent agent: agents) { for (SMAgent agent: agents) {
if(agent.isLL()) { if(agent.isLL()) {
return agent.getDimVector(); return agent.getVector();
} }
} }
throw new java.lang.Error("The group :"+String.valueOf(i_group)+" don t have a local leader."); throw new java.lang.Error("The group :"+String.valueOf(i_group)+" don t have a local leader.");
} }
public double getMaxFitness() { @Override
public double getBestFitness() {
double max = 0; double max = 0;
for (Group group: groups) { for (Group group: groups) {
...@@ -105,7 +105,7 @@ public class Env { ...@@ -105,7 +105,7 @@ public class Env {
for (Group group: groups) { for (Group group: groups) {
for (SMAgent agent: group.getAgents()) { for (SMAgent agent: group.getAgents()) {
if (agent.isGL()) { if (agent.isGL()) {
return agent.getDimVector(); return agent.getVector();
} }
} }
} }
...@@ -230,4 +230,18 @@ public class Env { ...@@ -230,4 +230,18 @@ public class Env {
} }
throw new java.lang.Error("No global leader found."); throw new java.lang.Error("No global leader found.");
} }
@Override
public double getBestEval() {
SMAgent maxAgent = groups.get(0).getAgents().get(0);
for (Group group: groups) {
for (SMAgent agent: group.getAgents()) {
if(agent.getFitness() > maxAgent.getFitness()) {
maxAgent = agent;
}
}
}
return maxAgent.getEvaluate();
}
} }
package amakaque;
import java.util.ArrayList;
import java.util.List;
import baseOptiAgent.Result;
import baseOptiAgent.Solver;
import eval.Eval;
public class SMSolver extends Solver{
public SMSolver(Eval _eval, int _maxCycle, int _maxEval) {
super(_eval, _maxCycle, _maxEval);
name = "Spider-Monkey";
}
@Override
public Result solve() {
// [PARAM]
int nbrAgent = 50;
// [0.1 : 0.9]
double pr = 0.1;
//
int maxGroup = 5;
int localLimit = 1500;
int globalLimit = 50;
// [INIT]
SMEnv env = new SMEnv(localLimit, globalLimit, maxGroup, eval);
List<SMAgent> agents = new ArrayList<SMAgent>();
for (int i = 0; i < nbrAgent; i++) {
agents.add(new SMAgent(i, pr, env, eval));
}
Group group = new Group();
group.addAgents(agents);
env.initGroup(group);
return findSolution(agents, env, 6);
}
}
...@@ -6,11 +6,11 @@ import mas.implementation.schedulers.variations.ThreeStepCycling; ...@@ -6,11 +6,11 @@ import mas.implementation.schedulers.variations.ThreeStepCycling;
public class Scheduler extends ThreeStepCycling{ public class Scheduler extends ThreeStepCycling{
public Env env; public SMEnv env;
public Eval eval; public Eval eval;
public Scheduler(Env _env, Eval _eval, ThreeStepCyclable... _threeStepCyclables) { public Scheduler(SMEnv _env, Eval _eval, ThreeStepCyclable... _threeStepCyclables) {
super(_threeStepCyclables); super(_threeStepCyclables);
env = _env; env = _env;
eval = _eval; eval = _eval;
......
...@@ -66,4 +66,16 @@ public abstract class BaseAgent extends Agent{ ...@@ -66,4 +66,16 @@ public abstract class BaseAgent extends Agent{
return fitness; return fitness;
} }
protected boolean compareAndUpdate(List<Double> otherVec) {
double newEval = evaluate(otherVec);
double newFitness = fitness(newEval);
if (fitness<newFitness) {
vector = otherVec;
fitness = newFitness;
evaluate = newEval;
return true;
}
return false;
}
} }
...@@ -4,7 +4,7 @@ import eval.Eval; ...@@ -4,7 +4,7 @@ import eval.Eval;
public abstract class Env { public abstract class Env {
Eval eval; protected Eval eval;
......
...@@ -4,6 +4,7 @@ import java.io.FileWriter; ...@@ -4,6 +4,7 @@ import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.util.stream.Stream; import java.util.stream.Stream;
import amakaque.SMSolver;
import ant.AntSolver; import ant.AntSolver;
import bee.BeeSolver; import bee.BeeSolver;
import eval.Eval; import eval.Eval;
...@@ -18,8 +19,9 @@ public class Ihm { ...@@ -18,8 +19,9 @@ public class Ihm {
int maxCycle = 200000; int maxCycle = 200000;
int maxEval = 1000000; int maxEval = 1000000;
Solver solver = new BeeSolver(eval, maxCycle, maxEval); Solver solver = new SMSolver(eval, maxCycle, maxEval);
//Solver solver = new BeeSolver(eval, maxCycle, maxEval);
//Solver solver = new AntSolver(eval, maxCycle, maxEval); //Solver solver = new AntSolver(eval, maxCycle, maxEval);
Result res = solver.solve(); Result res = solver.solve();
......
...@@ -71,18 +71,9 @@ public class Bee extends BaseAgent{ ...@@ -71,18 +71,9 @@ public class Bee extends BaseAgent{
newVector.set(dim, vector.get(dim) + 2 * (Math.random() - 0.5) * (vector.get(dim) - randomAgent.get(dim))); newVector.set(dim, vector.get(dim) + 2 * (Math.random() - 0.5) * (vector.get(dim) - randomAgent.get(dim)));
this.boundValue(newVector); boundValue(newVector);
double newEval = evaluate(newVector); return compareAndUpdate(newVector);
double newFit = fitness(newEval);
if (fitness < newFit) {
//System.out.println("Update from : "+evaluate+" to "+newEval);
vector = newVector;
evaluate = newEval;
fitness = newFit;
return true;
}
return false;
} }
private void employedPhase() { private void employedPhase() {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment