diff --git a/aabc/ABeeSolver.java b/aabc/ABeeSolver.java
index 0265404c0baf23077319fc80e7fbdd5147b2037c..94bd444007a6be7d1f34bc1277d65366e0e68aad 100644
--- a/aabc/ABeeSolver.java
+++ b/aabc/ABeeSolver.java
@@ -9,7 +9,7 @@ import baseOptiAgent.Result;
 import baseOptiAgent.Solver;
 import eval.Eval;
 
-public class ABeeSolver extends Solver{
+public class ABeeSolver extends Solver {
 
 	public ABeeSolver(Eval _eval, int _maxCycle, int _maxEval) {
 		super(_eval, _maxCycle, _maxEval);
@@ -18,21 +18,21 @@ public class ABeeSolver extends Solver{
 
 	@Override
 	public Result solve() {
-		
+
 		// [PARAM]
 		int nbrAgent = 100;
 		int maxTry = 200;
-		
+
 		// [INIT]
 		BeeEnv env = new BeeEnv(eval);
-		
+
 		List<Bee> agents = new ArrayList<Bee>();
 		for (int i_agent = 0; i_agent < nbrAgent; i_agent++) {
 			agents.add(new Bee(eval, i_agent, env, maxTry));
 		}
-		
+
 		env.initAgent(agents);
-		
+
 		return findSolution(agents, env, 3);
 	}
 
diff --git a/aabc/Bee.java b/aabc/Bee.java
index 7c58b77b17dc064366a1b0faa2437563723a3247..7c90713f6ebc675f3d9f8ea699c7a091f91b5419 100644
--- a/aabc/Bee.java
+++ b/aabc/Bee.java
@@ -6,140 +6,125 @@ import java.util.List;
 import baseOptiAgent.BaseAgent;
 import eval.Eval;
 
-public class Bee extends BaseAgent{
-
+public class Bee extends BaseAgent {
 
 	BeeEnv env;
-	
+
 	int maxCount;
-	
+
 	Phase phase;
 	int stuckCount;
-	
+
 	// EMPLOYED
 	List<List<Double>> randomAgents;
 	double groupValue;
-	
+
 	// ONLOOKERS
 	double bestFitness;
-	//List<List<Double>> randomAgents;
-	//double groupValue;
+	// List<List<Double>> randomAgents;
+	// double groupValue;
 
-	
 	public Bee(Eval _eval, int _id, BeeEnv _env, int _maxCount) {
 		super(_eval, _id);
 
 		env = _env;
-		
+
 		maxCount = _maxCount;
-		
+
 		phase = Phase.EMPLOYED;
 		stuckCount = 0;
-		
+
 		vector = generateRandomVector();
 		evaluate = this.evaluate(vector);
 		fitness = this.fitness(evaluate);
 	}
-	
+
 	private void nextPhase() {
 		if (phase == Phase.EMPLOYED) {
 			phase = Phase.ONLOOKERS;
-		}
-		else if (phase == Phase.ONLOOKERS) {
+		} else if (phase == Phase.ONLOOKERS) {
 			phase = Phase.SCOUTS;
-		}
-		else if (phase == Phase.SCOUTS) {
+		} else if (phase == Phase.SCOUTS) {
 			phase = Phase.EMPLOYED;
 		}
 	}
-	
+
 	@Override
 	public void perceive() {
 
 		if (phase == Phase.EMPLOYED) {
 			randomAgents = env.getRandomAgents(id);
 			groupValue = env.getGroup(this.getFitness());
-		}
-		else if (phase == Phase.ONLOOKERS) {
+		} else if (phase == Phase.ONLOOKERS) {
 			randomAgents = env.getRandomAgents(id);
 			groupValue = env.getGroup(this.getFitness());
 			bestFitness = env.getBestFitness();
+		} else if (phase == Phase.SCOUTS) {
 		}
-		else if (phase == Phase.SCOUTS) {
-		}
-		
+
 	}
-	
+
 	private boolean randomlyUpdate() {
 
 		List<Double> newVector = new ArrayList<Double>(vector);
-		
-		
-		for (int i_dim = 0; i_dim < eval.getDim(); i_dim ++) {
-			if(Math.random() < groupValue) {
-				newVector.set(
-						i_dim,
-							vector.get(i_dim) 
-							+ 2 * (Math.random() - 0.5) 
-								* (vector.get(i_dim) - randomAgents.get(i_dim).get(i_dim))
-						);
+
+		for (int i_dim = 0; i_dim < eval.getDim(); i_dim++) {
+			if (Math.random() < groupValue) {
+				newVector.set(i_dim, vector.get(i_dim)
+						+ 2 * (Math.random() - 0.5) * (vector.get(i_dim) - randomAgents.get(i_dim).get(i_dim)));
 			}
 		}
-		
+
 		boundValue(newVector);
-		
+
 		return compareAndUpdate(newVector);
 	}
-	
+
 	private void employedPhase() {
 		boolean res = randomlyUpdate();
 		if (!res) {
-			stuckCount ++;
-		}
-		else {
+			stuckCount++;
+		} else {
 			stuckCount = 0;
 		}
 	}
-	
+
 	private void onlookerPhase() {
-		
-		// change p to this ? https://www.hindawi.com/journals/jam/2014/402616/ 
+
+		// change p to this ? https://www.hindawi.com/journals/jam/2014/402616/
 		double p = (0.9 * fitness / bestFitness) + 0.1;
-		
+
 		if (Math.random() > p) {
 			randomlyUpdate();
 		}
 	}
-	
+
 	private void scoutPhase() {
 		if (stuckCount >= maxCount) {
 			stuckCount = 0;
-			
-			
+
 			double oldEval = evaluate;
 			vector = generateRandomVector();
 			evaluate = this.evaluate(vector);
 			fitness = this.fitness(evaluate);
 		}
 	}
-	
+
 	@Override
 	public void act() {
-		
+
 		if (phase == Phase.EMPLOYED) {
 			employedPhase();
-		}
-		else if (phase == Phase.ONLOOKERS) {
+		} else if (phase == Phase.ONLOOKERS) {
 			onlookerPhase();
-		}
-		else if (phase == Phase.SCOUTS) {
+		} else if (phase == Phase.SCOUTS) {
 			scoutPhase();
 		}
 		nextPhase();
 	}
-	
+
 	public Phase getPhase() {
 		return phase;
 	}
-	
+
 }
diff --git a/aabc/BeeEnv.java b/aabc/BeeEnv.java
index 4958605039c4f898f681fff294d7c81e4d3c7c9a..896cd5c4654b13f4de2d7d021bc5fcb62a41b2bd 100644
--- a/aabc/BeeEnv.java
+++ b/aabc/BeeEnv.java
@@ -7,25 +7,25 @@ import java.util.Random;
 import baseOptiAgent.Env;
 import eval.Eval;
 
-public class BeeEnv extends Env{
+public class BeeEnv extends Env {
 
 	private List<Bee> agents;
-	
+
 	public BeeEnv(Eval _eval) {
 		eval = _eval;
 	}
-	
+
 	public void initAgent(List<Bee> _agents) {
 		agents = new ArrayList<Bee>(_agents);
 	}
-	
+
 	public List<List<Double>> getRandomAgents(int id) {
 		if (agents.size() == 1) {
 			throw new java.lang.Error("Cannot return random agent because the environment only know 1 agent.");
 		}
-		
+
 		Random rand = new Random();
-		
+
 		List<List<Double>> randomAgents = new ArrayList<List<Double>>();
 
 		Bee randomagent = agents.get(rand.nextInt(agents.size()));
@@ -35,34 +35,31 @@ public class BeeEnv extends Env{
 		for (int i = 0; i < eval.getDim(); i++) {
 			randomAgents.add(randomagent.getVector());
 		}
-		
+
 		/*
-		for (int i = 0; i < eval.getDim(); i++) {
-			Bee randomagent = agents.get(rand.nextInt(agents.size()));
-			while (randomagent.getId() == id) {
-				randomagent = agents.get(rand.nextInt(agents.size()));
-			}
-			randomAgents.add(randomagent.getVector());
-		}
-		*/
+		 * for (int i = 0; i < eval.getDim(); i++) { Bee randomagent =
+		 * agents.get(rand.nextInt(agents.size())); while (randomagent.getId() == id) {
+		 * randomagent = agents.get(rand.nextInt(agents.size())); }
+		 * randomAgents.add(randomagent.getVector()); }
+		 */
 
 		return randomAgents;
 	}
-	
+
 	public double getGroup(double fitness) {
 		int res = 0;
-		for(Bee bee : agents) {
+		for (Bee bee : agents) {
 			if (bee.getFitness() > fitness) {
-				res ++;
+				res++;
 			}
 		}
-		return Math.ceil((double)res * 5.0 / (double) agents.size()) /10 ;
+		return Math.ceil((double) res * 5.0 / (double) agents.size()) / 10;
 	}
 
 	@Override
 	public double getBestFitness() {
 		double res = agents.get(0).getFitness();
-		for(Bee agent: agents) {
+		for (Bee agent : agents) {
 			if (res < agent.getFitness()) {
 				res = agent.getFitness();
 			}
@@ -73,12 +70,12 @@ public class BeeEnv extends Env{
 	@Override
 	public double getBestEval() {
 		Bee res = agents.get(0);
-		for(Bee agent: agents) {
+		for (Bee agent : agents) {
 			if (res.getFitness() < agent.getFitness()) {
 				res = agent;
 			}
 		}
 		return res.getEvaluate();
 	}
-	
+
 }
diff --git a/aabc/Phase.java b/aabc/Phase.java
index 14b6a1e3fb6342f316835068dc663e4e6bf9f20f..e91874234dc4c610220d4926fd3cb53142dbc1c1 100644
--- a/aabc/Phase.java
+++ b/aabc/Phase.java
@@ -1,7 +1,5 @@
 package aabc;
 
 public enum Phase {
-	EMPLOYED,
-	ONLOOKERS,
-	SCOUTS
+	EMPLOYED, ONLOOKERS, SCOUTS
 }
diff --git a/amakaque/Group.java b/amakaque/Group.java
index beed888b6fcbe6ab10b85cf4fa454f4434f2c268..e6474a75eb97b72ebfa7ae9b24155fc37cbbf658 100644
--- a/amakaque/Group.java
+++ b/amakaque/Group.java
@@ -8,49 +8,49 @@ public class Group {
 
 	private AtomicInteger count;
 	private int localLimitCount;
-	
+
 	private List<SMAgent> agents;
-	
+
 	public Group() {
 		count = new AtomicInteger(1);
 		localLimitCount = 0;
 		agents = new ArrayList<SMAgent>();
 	}
-	
+
 	public void count() {
 		count.getAndIncrement();
 	}
-	
+
 	public int getCount() {
 		return count.intValue();
 	}
-	
+
 	public void resetCount() {
 		count.set(1);
 	}
-	
+
 	public List<SMAgent> getAgents() {
 		return agents;
 	}
-	
+
 	public void removeAgent(SMAgent agent) {
 		agents.remove(agent);
 	}
-	
+
 	public void addAgents(List<SMAgent> _agents) {
-		for (SMAgent agent: _agents) {
+		for (SMAgent agent : _agents) {
 			agents.add(agent);
 		}
 	}
-	
+
 	public void addLocalLimit() {
-		localLimitCount ++;
+		localLimitCount++;
 	}
-	
+
 	public void resetLocalLimit() {
 		localLimitCount = 0;
 	}
-	
+
 	public int getLocallimit() {
 		return this.localLimitCount;
 	}
diff --git a/amakaque/Phase.java b/amakaque/Phase.java
index 9900e31908516f5eac4012a3aa0ac0ade52a9e17..339acbd93a2a43f7988bd44dfa04c0144743d7f0 100644
--- a/amakaque/Phase.java
+++ b/amakaque/Phase.java
@@ -1,10 +1,6 @@
 package amakaque;
 
 public enum Phase {
-	LOCAL_LEADER_PHASE,
-	GLOBAL_LEADER_PHASE,
-	GLOBAL_LEADER_LEARNING,
-	LOCAL_LEADER_LEARNING,
-	LOCAL_LEADER_DECISION,
+	LOCAL_LEADER_PHASE, GLOBAL_LEADER_PHASE, GLOBAL_LEADER_LEARNING, LOCAL_LEADER_LEARNING, LOCAL_LEADER_DECISION,
 	GLOBAL_LEADER_DECISION,
 }
diff --git a/amakaque/SMAgent.java b/amakaque/SMAgent.java
index debf14e92f3ed8c10255f01c7d0203503b5f049a..c51b2f2aada7a403acbd46e7a0f6d2e1bf657974 100644
--- a/amakaque/SMAgent.java
+++ b/amakaque/SMAgent.java
@@ -8,25 +8,25 @@ import eval.Eval;
 import mas.core.Agent;
 
 public class SMAgent extends BaseAgent {
-	
+
 	private SMEnv env;
-	
-	/* 
+
+	/*
 	 * Is local leader/ is global leader
-	 * */
+	 */
 	private boolean isLL;
 	private boolean isGL;
-	
+
 	private int currentGroup;
-	
+
 	private Double pr;
-	
+
 	private Phase phase;
-	
+
 	// LLP
 	private List<Double> randomLocalMember;
 	private List<Double> localLeader;
-	
+
 	// GLP
 	// private List<Double> randomLocalMember;
 	private double maxFitness;
@@ -34,22 +34,22 @@ public class SMAgent extends BaseAgent {
 	private int groupSize;
 	private int count;
 	private boolean allDone;
-	
+
 	// GLL
 	private SMAgent nextGL;
 	private boolean isNewGL;
-	
-	//LLL
+
+	// LLL
 	private SMAgent nextLL;
 	private boolean isNewLL;
-	
-	//LLD
+
+	// LLD
 	// private List<Double> globalLeader;
 	// private List<Double> localLeader;
 	private int localLimit;
 	private int localLimitCount;
-	
-	//GLD
+
+	// GLD
 	private int globalLimit;
 	private int globalLimitCount;
 	private int numberOfGroups;
@@ -57,349 +57,324 @@ public class SMAgent extends BaseAgent {
 
 	/*
 	 * Init the SM with random starting point within the env dimension
-	 */	
+	 */
 	public SMAgent(int _id, double _pr, SMEnv _env, Eval _eval) {
 		super(_eval, _id);
-		
+
 		env = _env;
-		
+
 		if (id == 0) {
 			isLL = true;
 			isGL = true;
-		}
-		else {
+		} else {
 			isLL = false;
 			isGL = false;
 		}
 		currentGroup = 0;
-		
+
 		vector = generateRandomVector();
 		evaluate = evaluate(vector);
 		fitness = fitness(evaluate);
-		
+
 		pr = _pr;
 		phase = Phase.LOCAL_LEADER_PHASE;
 	}
 
-    @Override
-    public void perceive() {
-
-    	
-    	if (phase == Phase.LOCAL_LEADER_PHASE) {
-    		
-    		localLeader = env.getLocalLeader(currentGroup);
-    		randomLocalMember = env.getRandomGroupMember(currentGroup, id);
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_PHASE){
-    		randomLocalMember = env.getRandomGroupMember(currentGroup, id);
-    		globalLeader = env.getGlobalLeader();
-    		groupSize = env.getGroupSize(currentGroup);
-    		maxFitness = env.getBestFitness();
-    		count = env.getCount(currentGroup);
-    		allDone = env.allDone();
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_LEARNING) {
-    		if (isGL) {
-    			nextGL = env.findNextGL();
-    		}
-    		this.isNewGL = false;
-    	}
-    	else if(phase == Phase.LOCAL_LEADER_LEARNING) {
-    		if (isLL) {
-    			nextLL = env.findNextLL(currentGroup);
-    		}
-    		isNewLL = false;
-    	}
-    	else if(phase == Phase.LOCAL_LEADER_DECISION) {
-    		globalLeader = env.getGlobalLeader();
-    		localLeader = env.getLocalLeader(currentGroup);
-    		localLimit = env.getLocalLimit();
-    		localLimitCount = env.getLocalLimitCount(currentGroup);
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_DECISION) {
-        	if (localLimitCount > localLimit) {
-        		if (isLL) {
-        			env.resetLocalCount(currentGroup);
-        		}
-        	}
-    		globalLimit = env.getGlobalLimit();
-    		globalLimitCount = env.getGlobalLimitCount();
-    		numberOfGroups = env.getNbrGroup();
-    		numberOfGroupsLimit = env.getGroupLimit();
-    	}
-    	
-    }
-    
-    private void nextPhase() {
-    	if (phase == Phase.LOCAL_LEADER_PHASE) {
-    		phase = Phase.GLOBAL_LEADER_PHASE;
-    		return;
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_PHASE){
-    		phase = Phase.GLOBAL_LEADER_LEARNING;
-    		return;
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_LEARNING){
-    		phase = Phase.LOCAL_LEADER_LEARNING;
-    		return;
-    	}
-    	else if(phase == Phase.LOCAL_LEADER_LEARNING){
-    		phase = Phase.LOCAL_LEADER_DECISION;
-    		return;
-    	}
-    	else if(phase == Phase.LOCAL_LEADER_DECISION){
-    		phase = Phase.GLOBAL_LEADER_DECISION;
-    		return;
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_DECISION){
-    		phase = Phase.LOCAL_LEADER_PHASE;
-    		return;
-    	}
-    }
-    
-    private void localLeaderPhase() {
-    	/*
-    	 * For finding the objective (Food Source),
-    	 *  generate the new positions for all the group members by using self experience,
-    	 *   local leader experience and group members experience
-    	 */
-
-    	List<Double> newValues = new ArrayList<Double>();
-    	
+	@Override
+	public void perceive() {
+
+		if (phase == Phase.LOCAL_LEADER_PHASE) {
+
+			localLeader = env.getLocalLeader(currentGroup);
+			randomLocalMember = env.getRandomGroupMember(currentGroup, id);
+		} else if (phase == Phase.GLOBAL_LEADER_PHASE) {
+			randomLocalMember = env.getRandomGroupMember(currentGroup, id);
+			globalLeader = env.getGlobalLeader();
+			groupSize = env.getGroupSize(currentGroup);
+			maxFitness = env.getBestFitness();
+			count = env.getCount(currentGroup);
+			allDone = env.allDone();
+		} else if (phase == Phase.GLOBAL_LEADER_LEARNING) {
+			if (isGL) {
+				nextGL = env.findNextGL();
+			}
+			this.isNewGL = false;
+		} else if (phase == Phase.LOCAL_LEADER_LEARNING) {
+			if (isLL) {
+				nextLL = env.findNextLL(currentGroup);
+			}
+			isNewLL = false;
+		} else if (phase == Phase.LOCAL_LEADER_DECISION) {
+			globalLeader = env.getGlobalLeader();
+			localLeader = env.getLocalLeader(currentGroup);
+			localLimit = env.getLocalLimit();
+			localLimitCount = env.getLocalLimitCount(currentGroup);
+		} else if (phase == Phase.GLOBAL_LEADER_DECISION) {
+			if (localLimitCount > localLimit) {
+				if (isLL) {
+					env.resetLocalCount(currentGroup);
+				}
+			}
+			globalLimit = env.getGlobalLimit();
+			globalLimitCount = env.getGlobalLimitCount();
+			numberOfGroups = env.getNbrGroup();
+			numberOfGroupsLimit = env.getGroupLimit();
+		}
+
+	}
+
+	private void nextPhase() {
+		if (phase == Phase.LOCAL_LEADER_PHASE) {
+			phase = Phase.GLOBAL_LEADER_PHASE;
+			return;
+		} else if (phase == Phase.GLOBAL_LEADER_PHASE) {
+			phase = Phase.GLOBAL_LEADER_LEARNING;
+			return;
+		} else if (phase == Phase.GLOBAL_LEADER_LEARNING) {
+			phase = Phase.LOCAL_LEADER_LEARNING;
+			return;
+		} else if (phase == Phase.LOCAL_LEADER_LEARNING) {
+			phase = Phase.LOCAL_LEADER_DECISION;
+			return;
+		} else if (phase == Phase.LOCAL_LEADER_DECISION) {
+			phase = Phase.GLOBAL_LEADER_DECISION;
+			return;
+		} else if (phase == Phase.GLOBAL_LEADER_DECISION) {
+			phase = Phase.LOCAL_LEADER_PHASE;
+			return;
+		}
+	}
+
+	private void localLeaderPhase() {
+		/*
+		 * For finding the objective (Food Source), generate the new positions for all
+		 * the group members by using self experience, local leader experience and group
+		 * members experience
+		 */
+
+		List<Double> newValues = new ArrayList<Double>();
+
 		for (int i = 0; i < vector.size(); i++) {
-			
+
 			Double currentValue = vector.get(i);
-			
+
 			if (Math.random() >= pr) {
-				double value = currentValue 
-						+ Math.random() * (localLeader.get(i) - currentValue)
+				double value = currentValue + Math.random() * (localLeader.get(i) - currentValue)
 						+ 2 * (Math.random() - 0.5) * (randomLocalMember.get(i) - currentValue);
 				if (value > eval.getMax(i)) {
 					value = eval.getMax(i);
-				}
-				else if (value < eval.getMin(i)) {
+				} else if (value < eval.getMin(i)) {
 					value = eval.getMin(i);
 				}
 				newValues.add(value);
-			}
-			else {
+			} else {
 				newValues.add(currentValue);
 			}
 		}
-		
+
 		/*
-		 *  Apply the greedy selection process between existing position and newly generated position,
-		 *   based on fitness and select the better one;
+		 * Apply the greedy selection process between existing position and newly
+		 * generated position, based on fitness and select the better one;
 		 */
 		compareAndUpdate(newValues);
-    }
-    
-    private void globalLeaderPhase() {
-    	/*
+	}
+
+	private void globalLeaderPhase() {
+		/*
 		 * Calculate the probability prob for all the group members using equation
 		 */
-		double prob = 0.9 * fitness/maxFitness + 0.1;
-		
+		double prob = 0.9 * fitness / maxFitness + 0.1;
+
 		/*
-		 *  Produce new positions for the all the group members, selected by probi , 
-		 *  by using self experience, global leader experience and group members experiences.
+		 * Produce new positions for the all the group members, selected by probi , by
+		 * using self experience, global leader experience and group members
+		 * experiences.
 		 */
-		
+
 		if (count < groupSize) {
 			if (Math.random() < prob) {
 				env.count(currentGroup);
-				int j = (int)(Math.random() * vector.size());
-				
+				int j = (int) (Math.random() * vector.size());
+
 				List<Double> newValues = new ArrayList<Double>(vector);
 				Double currentValue = vector.get(j);
-				double value = currentValue 
-						+ Math.random() * (globalLeader.get(j) - currentValue)
-						+ (2*Math.random() - 1) * (randomLocalMember.get(j) - currentValue);
+				double value = currentValue + Math.random() * (globalLeader.get(j) - currentValue)
+						+ (2 * Math.random() - 1) * (randomLocalMember.get(j) - currentValue);
 				if (value > eval.getMax(j)) {
 					value = eval.getMax(j);
-				}
-				else if (value < eval.getMin(j)) {
+				} else if (value < eval.getMin(j)) {
 					value = eval.getMin(j);
 				}
-				newValues.set(j,value);
-				
+				newValues.set(j, value);
 
 				compareAndUpdate(newValues);
 			}
 		}
-    }
-    
-    private void globalLeaderLearning() {
-    	/* 
-    	 * In this phase, the position of the global leader is updated by applying the greedy selection in the population i.e.,
-    	 *  the position of the SM having best fitness in the population is selected as the updated position of the global leader.
-    	 *   Further, it is checked that the position of global leader is updating or not and if not then the GlobalLimitCount is incremented by 1.
-    	 */
-    	
-    	if (isGL && ! this.isNewGL ) {
-    		if (nextGL.equals(this)) {
-    			env.addGlobalLimitCount();
-    		}
-    		else {
-        		nextGL.becomeGL();
-        		isGL = false;
-    		}
-    	}
-    }
-    
-    private void localLeaderLearning() {
-    	/*
-    	 * In this phase, the position of the local leader is updated by applying the greedy selection in that group i.e.,
-    	 * the position of the SM having best fitness in that group is selected as the updated position of the local leader.
-    	 *  Next, the updated position of the local leader is compared with the old one 
-    	 *  and if the local leader is not updated then the LocalLimitCount is incremented by 1.
-    	 */
-    	if (isLL && ! this.isNewLL ) {
-    		if (nextLL.equals(this)) {
-    			env.addLocalLimit(currentGroup);
-    		}
-    		else {
-    			nextLL.becomeLL();
-    			isLL = false;
-    		}
-    	}
-    }
- 
-    private void localLeaderDecision() {
-    	
-    	/*
-    	 * If any Local group leader is not updating her position after a specified number of times (LocalLeaderLimit)
-    	 * then re-direct all members of that particular group for foraging by algorithm 
-    	 */
-    	if (localLimitCount > localLimit) {
-        	List<Double> newValues = new ArrayList<Double>();
-    		
-    		for (int j = 0; j < vector.size(); j++) {
-
-    			Double currentValue = vector.get(j);
-    			
-        		if (Math.random() >= pr) {
-        			newValues.add(eval.getMin(j) + Math.random() * (eval.getMax(j) - eval.getMin(j)));
-        		}
-        		else {
-        			newValues.add(
-        					currentValue
-        					+ Math.random() * (globalLeader.get(j) - currentValue)
-        					+ Math.random() * (currentValue - localLeader.get(j))
-        					);
-        		}
-    		}
-    		
-    		vector = newValues;
-    		evaluate = evaluate(vector);
-    		fitness = fitness(evaluate);
-    		
-    	}
-    }
-    
-    private void globalLeaderDecision() {
-    	
-    	/*
-    	 * If Global Leader is not updating her position for a specified number of times (GlobalLeaderLimit)
-    	 *  then she divides the group into smaller groups by algorithm
-    	 */
-    	if (isGL) {
-    		if (globalLimitCount > globalLimit) {
-    			env.resetGlobalCount();
-    			
-    			if (numberOfGroups < numberOfGroupsLimit) {
-    				env.combineAllGroups();
-    				env.splitInNGroups(numberOfGroups+1);
-
-        			for (int i = 0; i < numberOfGroups+1; i++ ) {
-        				env.findNextLL(i).becomeLL();
-        			}
-    				
-    			}
-    			else {
-    				env.combineAllGroups();
-    				env.findNextLL(0).becomeLL();
-    			}
-    			
-    			
-    		}
-    	}
-    	
-    	
-    }
-    
-    @Override
-    public void act() {
-    	
-    	if (phase == Phase.LOCAL_LEADER_PHASE) {
-    		localLeaderPhase();
-    		nextPhase();
-    		
-    		if (isLL) { // local leader reset his group count before next phase
-    			env.resetGroupCount(currentGroup);
-    		}
-    		
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_PHASE){
-    		if (allDone) {
-    			nextPhase();
-    		}
-    		else {
-        		globalLeaderPhase();
-    		}
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_LEARNING) {
-    		globalLeaderLearning();
+	}
+
+	private void globalLeaderLearning() {
+		/*
+		 * In this phase, the position of the global leader is updated by applying the
+		 * greedy selection in the population i.e., the position of the SM having best
+		 * fitness in the population is selected as the updated position of the global
+		 * leader. Further, it is checked that the position of global leader is updating
+		 * or not and if not then the GlobalLimitCount is incremented by 1.
+		 */
+
+		if (isGL && !this.isNewGL) {
+			if (nextGL.equals(this)) {
+				env.addGlobalLimitCount();
+			} else {
+				nextGL.becomeGL();
+				isGL = false;
+			}
+		}
+	}
+
+	private void localLeaderLearning() {
+		/*
+		 * In this phase, the position of the local leader is updated by applying the
+		 * greedy selection in that group i.e., the position of the SM having best
+		 * fitness in that group is selected as the updated position of the local
+		 * leader. Next, the updated position of the local leader is compared with the
+		 * old one and if the local leader is not updated then the LocalLimitCount is
+		 * incremented by 1.
+		 */
+		if (isLL && !this.isNewLL) {
+			if (nextLL.equals(this)) {
+				env.addLocalLimit(currentGroup);
+			} else {
+				nextLL.becomeLL();
+				isLL = false;
+			}
+		}
+	}
+
+	private void localLeaderDecision() {
+
+		/*
+		 * If any Local group leader is not updating her position after a specified
+		 * number of times (LocalLeaderLimit) then re-direct all members of that
+		 * particular group for foraging by algorithm
+		 */
+		if (localLimitCount > localLimit) {
+			List<Double> newValues = new ArrayList<Double>();
+
+			for (int j = 0; j < vector.size(); j++) {
+
+				Double currentValue = vector.get(j);
+
+				if (Math.random() >= pr) {
+					newValues.add(eval.getMin(j) + Math.random() * (eval.getMax(j) - eval.getMin(j)));
+				} else {
+					newValues.add(currentValue + Math.random() * (globalLeader.get(j) - currentValue)
+							+ Math.random() * (currentValue - localLeader.get(j)));
+				}
+			}
+
+			vector = newValues;
+			evaluate = evaluate(vector);
+			fitness = fitness(evaluate);
+
+		}
+	}
+
+	private void globalLeaderDecision() {
+
+		/*
+		 * If Global Leader is not updating her position for a specified number of times
+		 * (GlobalLeaderLimit) then she divides the group into smaller groups by
+		 * algorithm
+		 */
+		if (isGL) {
+			if (globalLimitCount > globalLimit) {
+				env.resetGlobalCount();
+
+				if (numberOfGroups < numberOfGroupsLimit) {
+					env.combineAllGroups();
+					env.splitInNGroups(numberOfGroups + 1);
+
+					for (int i = 0; i < numberOfGroups + 1; i++) {
+						env.findNextLL(i).becomeLL();
+					}
+
+				} else {
+					env.combineAllGroups();
+					env.findNextLL(0).becomeLL();
+				}
+
+			}
+		}
+
+	}
+
+	@Override
+	public void act() {
+
+		if (phase == Phase.LOCAL_LEADER_PHASE) {
+			localLeaderPhase();
 			nextPhase();
-    	}
-    	else if(phase == Phase.LOCAL_LEADER_LEARNING) {
-    		localLeaderLearning();
+
+			if (isLL) { // local leader reset his group count before next phase
+				env.resetGroupCount(currentGroup);
+			}
+
+		} else if (phase == Phase.GLOBAL_LEADER_PHASE) {
+			if (allDone) {
+				nextPhase();
+			} else {
+				globalLeaderPhase();
+			}
+		} else if (phase == Phase.GLOBAL_LEADER_LEARNING) {
+			globalLeaderLearning();
+			nextPhase();
+		} else if (phase == Phase.LOCAL_LEADER_LEARNING) {
+			localLeaderLearning();
 			nextPhase();
-    	}
-    	else if(phase == Phase.LOCAL_LEADER_DECISION) {
-    		localLeaderDecision();
+		} else if (phase == Phase.LOCAL_LEADER_DECISION) {
+			localLeaderDecision();
 			nextPhase();
-    	}
-    	else if(phase == Phase.GLOBAL_LEADER_DECISION) {
-    		globalLeaderDecision();
+		} else if (phase == Phase.GLOBAL_LEADER_DECISION) {
+			globalLeaderDecision();
 			nextPhase();
-    	}
-    }
-    
-    public boolean isLL() {
+		}
+	}
+
+	public boolean isLL() {
 		return isLL;
 	}
-    
-    public boolean isGL() {
+
+	public boolean isGL() {
 		return isGL;
 	}
-    
-    public void becomeGL() {
-    	this.isGL = true;
-    	this.isNewGL = true;
-    }
-    
-    public void becomeLL() {
-    	isLL = true;
-    	isNewLL = true;
-    }
-    
-    public void giveLocalLeadership() {
-    	isLL = false;
-    }
-    
-    public void enterNewGroup(int group) {
-    	currentGroup = group;
-    }
-    
-    public Phase getPhase() {
+
+	public void becomeGL() {
+		this.isGL = true;
+		this.isNewGL = true;
+	}
+
+	public void becomeLL() {
+		isLL = true;
+		isNewLL = true;
+	}
+
+	public void giveLocalLeadership() {
+		isLL = false;
+	}
+
+	public void enterNewGroup(int group) {
+		currentGroup = group;
+	}
+
+	public Phase getPhase() {
 		return phase;
 	}
-    
-    @Override
+
+	@Override
 	public boolean equals(Object obj) {
-    	if (!(obj instanceof SMAgent)) return false;
-    	SMAgent SMobj = (SMAgent) obj;
+		if (!(obj instanceof SMAgent))
+			return false;
+		SMAgent SMobj = (SMAgent) obj;
 		return this.id == SMobj.id;
 	}
 }
diff --git a/amakaque/SMEnv.java b/amakaque/SMEnv.java
index b4c811e5374c8d3f618771bad54fd0e30661aac4..93989dc9b8a91d4efcf468f03ce48cb1a483b150 100644
--- a/amakaque/SMEnv.java
+++ b/amakaque/SMEnv.java
@@ -7,103 +7,102 @@ import java.util.Random;
 import baseOptiAgent.Env;
 import eval.Eval;
 
-public class SMEnv extends Env{
-
+public class SMEnv extends Env {
 
 	private int globalLimitCount;
 	private int localLimit;
 	private int globalLimit;
 	private int groupLimit;
 	private List<Group> groups;
-	
+
 	public SMEnv(int _localLimit, int _globalLimit, int _groupLimit, Eval _eval) {
 		globalLimitCount = 0;
 		eval = _eval;
-		
+
 		localLimit = _localLimit;
 		globalLimit = _globalLimit;
-		
+
 		groupLimit = _groupLimit;
-		
+
 		groups = new ArrayList<Group>();
 	}
-	
+
 	public void initGroup(Group group) {
 		groups.add(group);
 	}
-	
+
 	public int getGlobalLimitCount() {
 		return globalLimitCount;
 	}
-	
+
 	public int getGlobalLimit() {
 		return globalLimit;
 	}
-	
+
 	public int getNbrGroup() {
 		return this.groups.size();
 	}
-	
+
 	public int getGroupLimit() {
 		return this.groupLimit;
 	}
-	
+
 	public void addGlobalLimitCount() {
-		globalLimitCount ++;
+		globalLimitCount++;
 	}
-	
+
 	public void resetGlobalCount() {
 		globalLimitCount = 0;
 	}
-	
+
 	public List<Double> getRandomGroupMember(int i_group, int self_id) {
 		Group group = groups.get(i_group);
-		
+
 		Random rand = new Random();
 		SMAgent randomagent = group.getAgents().get(rand.nextInt(group.getAgents().size()));
-		
+
 		if (group.getAgents().size() == 1) {
-			throw new java.lang.Error("The group :"+String.valueOf(i_group)+" only have 1 member.");
+			throw new java.lang.Error("The group :" + String.valueOf(i_group) + " only have 1 member.");
 		}
-		
+
 		while (randomagent.getId() == self_id) {
 			randomagent = group.getAgents().get(rand.nextInt(group.getAgents().size()));
 		}
 
 		return randomagent.getVector();
 	}
-	
-	public List<Double> getLocalLeader(int i_group){
+
+	public List<Double> getLocalLeader(int i_group) {
 		Group group = groups.get(i_group);
-		
+
 		List<SMAgent> agents = group.getAgents();
-		for (SMAgent agent: agents) {
-			if(agent.isLL()) {
+		for (SMAgent agent : agents) {
+			if (agent.isLL()) {
 				return agent.getVector();
 			}
 		}
-		throw new java.lang.Error("The group :"+String.valueOf(i_group)+" don t have a local leader.");
+		throw new java.lang.Error("The group :" + String.valueOf(i_group) + " don t have a local leader.");
 	}
 
 	@Override
 	public double getBestFitness() {
 		double max = 0;
-		
-		for (Group group: groups) {
-			for (SMAgent agent: group.getAgents()) {
+
+		for (Group group : groups) {
+			for (SMAgent agent : group.getAgents()) {
 				double fitness = agent.getFitness();
-				if(fitness > max) {
+				if (fitness > max) {
 					max = fitness;
 				}
 			}
 		}
 		return max;
 	}
-	
+
 	public List<Double> getGlobalLeader() {
 
-		for (Group group: groups) {
-			for (SMAgent agent: group.getAgents()) {
+		for (Group group : groups) {
+			for (SMAgent agent : group.getAgents()) {
 				if (agent.isGL()) {
 					return agent.getVector();
 				}
@@ -111,102 +110,100 @@ public class SMEnv extends Env{
 		}
 		throw new java.lang.Error("No global leader found.");
 	}
-	
+
 	public int getGroupSize(int i_group) {
 		return groups.get(i_group).getAgents().size();
 	}
-	
+
 	public int getCount(int i_group) {
 		return groups.get(i_group).getCount();
 	}
-	
+
 	public void count(int i_group) {
 		groups.get(i_group).count();
 	}
-	
+
 	public void resetGroupCount(int i_group) {
 		groups.get(i_group).resetCount();
 	}
-	
+
 	public boolean allDone() {
-		for (Group group: groups) {
+		for (Group group : groups) {
 			if (group.getCount() < group.getAgents().size()) {
 				return false;
 			}
 		}
 		return true;
 	}
-	
+
 	public SMAgent findNextGL() {
 		SMAgent nextGL = null;
-		for (Group group: groups) {
-			for (SMAgent agent: group.getAgents()) {
+		for (Group group : groups) {
+			for (SMAgent agent : group.getAgents()) {
 				if (nextGL == null) {
 					nextGL = agent;
-				}
-				else if (nextGL.getFitness() < agent.getFitness()) {
+				} else if (nextGL.getFitness() < agent.getFitness()) {
 					nextGL = agent;
 				}
 			}
 		}
 		return nextGL;
 	}
-	
+
 	public void addLocalLimit(int i_group) {
 		groups.get(i_group).addLocalLimit();
 	}
-	
+
 	public SMAgent findNextLL(int i_group) {
 		SMAgent nextLL = null;
-		for (SMAgent agent: groups.get(i_group).getAgents()) {
+		for (SMAgent agent : groups.get(i_group).getAgents()) {
 			if (nextLL == null) {
 				nextLL = agent;
-			}
-			else if (nextLL.getFitness() < agent.getFitness()) {
+			} else if (nextLL.getFitness() < agent.getFitness()) {
 				nextLL = agent;
-				
+
 			}
 		}
 		return nextLL;
 	}
-	
+
 	public int getLocalLimitCount(int i_group) {
 		return groups.get(i_group).getLocallimit();
 	}
-	
+
 	public void resetLocalCount(int i_group) {
 		groups.get(i_group).resetLocalLimit();
 	}
-	
+
 	public int getLocalLimit() {
 		return localLimit;
 	}
 
 	public void combineAllGroups() {
 		while (groups.size() > 1) {
-			for(SMAgent agent: groups.get(1).getAgents()) {
+			for (SMAgent agent : groups.get(1).getAgents()) {
 				agent.giveLocalLeadership();
 				agent.enterNewGroup(0);
 			}
-			
+
 			groups.get(0).addAgents(groups.get(1).getAgents());
 			groups.remove(1);
 		}
 	}
-	
+
 	public void splitInNGroups(int n) {
 
-		for (int i = 0; i < n-1; i++) {
+		for (int i = 0; i < n - 1; i++) {
 			groups.add(new Group());
 		}
 
 		List<SMAgent> agents = new ArrayList<SMAgent>();
 		agents.addAll(groups.get(0).getAgents());
-		
+
 		for (SMAgent agent : agents) {
 			groups.get(0).removeAgent(agent);
 		}
-		
+
 		for (SMAgent agent : agents) {
 			agent.giveLocalLeadership();
 			agent.enterNewGroup(agent.getId() % n);
@@ -215,14 +212,14 @@ public class SMEnv extends Env{
 			groups.get(agent.getId() % n).addAgents(list);
 		}
 	}
-	
+
 	public List<Group> getGroups() {
 		return groups;
 	}
-	
+
 	public double getGLValue() {
-		for (Group group: groups) {
-			for (SMAgent agent: group.getAgents()) {
+		for (Group group : groups) {
+			for (SMAgent agent : group.getAgents()) {
 				if (agent.isGL()) {
 					return agent.getFitness();
 				}
@@ -234,10 +231,10 @@ public class SMEnv extends Env{
 	@Override
 	public double getBestEval() {
 		SMAgent maxAgent = groups.get(0).getAgents().get(0);
-		
-		for (Group group: groups) {
-			for (SMAgent agent: group.getAgents()) {
-				if(agent.getFitness() > maxAgent.getFitness()) {
+
+		for (Group group : groups) {
+			for (SMAgent agent : group.getAgents()) {
+				if (agent.getFitness() > maxAgent.getFitness()) {
 					maxAgent = agent;
 				}
 			}
diff --git a/amakaque/SMSolver.java b/amakaque/SMSolver.java
index 7a1ed05d4e8f51ed5df49f21f34a774170ee392e..103093f20ed8b2ea704d19a4d86d66cea8a2f1f6 100644
--- a/amakaque/SMSolver.java
+++ b/amakaque/SMSolver.java
@@ -7,7 +7,7 @@ import baseOptiAgent.Result;
 import baseOptiAgent.Solver;
 import eval.Eval;
 
-public class SMSolver extends Solver{
+public class SMSolver extends Solver {
 
 	public SMSolver(Eval _eval, int _maxCycle, int _maxEval) {
 		super(_eval, _maxCycle, _maxEval);
@@ -16,31 +16,31 @@ public class SMSolver extends Solver{
 
 	@Override
 	public Result solve() {
-		
+
 		// [PARAM]
 		int nbrAgent = 50;
-		
+
 		// [0.1 : 0.9]
 		double pr = 0.1;
-		
+
 		//
 		int maxGroup = 5;
-		
+
 		int localLimit = 1500;
 		int globalLimit = 50;
-		
+
 		// [INIT]
 		SMEnv env = new SMEnv(localLimit, globalLimit, maxGroup, eval);
-		
+
 		List<SMAgent> agents = new ArrayList<SMAgent>();
 		for (int i = 0; i < nbrAgent; i++) {
 			agents.add(new SMAgent(i, pr, env, eval));
 		}
-		
+
 		Group group = new Group();
 		group.addAgents(agents);
 		env.initGroup(group);
-		
+
 		return findSolution(agents, env, 6);
 	}
 }
diff --git a/amakaque/Scheduler.java b/amakaque/Scheduler.java
index 997aede50c69141fc454986e300fd6f0ad129661..cc828a77f962cf3c15df92b9070005c62e19712a 100644
--- a/amakaque/Scheduler.java
+++ b/amakaque/Scheduler.java
@@ -4,23 +4,22 @@ import eval.Eval;
 import mas.core.ThreeStepCyclable;
 import mas.implementation.schedulers.variations.ThreeStepCycling;
 
-public class Scheduler extends ThreeStepCycling{
+public class Scheduler extends ThreeStepCycling {
 
 	public SMEnv env;
 	public Eval eval;
-	
-	
+
 	public Scheduler(SMEnv _env, Eval _eval, ThreeStepCyclable... _threeStepCyclables) {
 		super(_threeStepCyclables);
 		env = _env;
 		eval = _eval;
 	}
-	
+
 	@Override
 	public boolean stopCondition() {
 		if (env.getGroups().get(0).getAgents().get(0).getPhase() != Phase.LOCAL_LEADER_PHASE) {
 			return false;
 		}
-		return Math.abs(eval.evaluate(env.getGlobalLeader())-eval.getObjective())<=eval.getErrorDelta();
+		return Math.abs(eval.evaluate(env.getGlobalLeader()) - eval.getObjective()) <= eval.getErrorDelta();
 	}
 }
diff --git a/ant/Ant.java b/ant/Ant.java
index 3534a06ff202235eb812343df0f3c37620f6f0b4..cc133e3f844abe0c7e02161a6715e2a958bcc27e 100644
--- a/ant/Ant.java
+++ b/ant/Ant.java
@@ -7,141 +7,129 @@ import java.util.Random;
 import baseOptiAgent.BaseAgent;
 import eval.Eval;
 
-public class Ant extends BaseAgent{
-	
+public class Ant extends BaseAgent {
+
 	AntEnv env;
-	
+
 	Phase phase;
 
 	private double evaporationRate;
-	
+
 	// EVAL
-	
+
 	// BUILD NEW
 	private List<Solution> archive;
 	private List<Double> archiveProba;
-	
+
 	public Ant(int _id, Eval _eval, AntEnv _env, double _evaporationRate) {
 		super(_eval, _id);
 
 		env = _env;
 
 		evaporationRate = _evaporationRate;
-		
+
 		vector = new ArrayList<Double>();
 		for (int i = 0; i < eval.getDim(); i++) {
 			vector.add(eval.getMin(i) + Math.random() * (eval.getMax(i) - eval.getMin(i)));
 		}
-		
+
 		evaluate = evaluate(vector);
-		
+
 		phase = Phase.EVALUATE;
 	}
-	
-    private void nextPhase() {
+
+	private void nextPhase() {
 		if (phase == Phase.EVALUATE) {
 			phase = Phase.BUILD_NEW;
-		}
-		else if (phase == Phase.BUILD_NEW) {
+		} else if (phase == Phase.BUILD_NEW) {
 			phase = Phase.EVALUATE;
 		}
-    }
-    
+	}
+
 	@Override
 	public void perceive() {
 		/*
-		if(id == 0) {
-			System.out.println("\n\n");
-		}
-		System.out.println(this);
-		*/
-		
-		
+		 * if(id == 0) { System.out.println("\n\n"); } System.out.println(this);
+		 */
+
 		if (phase == Phase.EVALUATE) {
-			if(id == 0) {
-				
+			if (id == 0) {
+
 			}
-		}
-		else if (phase == Phase.BUILD_NEW) {
+		} else if (phase == Phase.BUILD_NEW) {
 			archive = env.getArchive();
 			archiveProba = env.getArchiveProba();
 		}
 	}
-	
+
 	@Override
 	public void act() {
 		if (phase == Phase.EVALUATE) {
-			if(id == 0) {
+			if (id == 0) {
 				env.updateArchive();
 				env.computeProba();
 			}
-		}
-		else if (phase == Phase.BUILD_NEW) {
-			
+		} else if (phase == Phase.BUILD_NEW) {
+
 			vector = buildNewSolution();
 			evaluate = evaluate(vector);
-			
+
 		}
 		nextPhase();
 	}
-	
+
 	public int chooseSolution() {
 		double prob = Math.random();
-		
+
 		double res = archiveProba.get(0);
 		int i = 0;
-		while (res<prob) {
-			i ++;
+		while (res < prob) {
+			i++;
 			res += archiveProba.get(i);
 		}
 		return i;
 	}
-	
-	public List<Double> buildNewSolution(){
+
+	public List<Double> buildNewSolution() {
 
 		int archiveLen = archive.size();
-		
-        Random ran = new Random();
-		
+
+		Random ran = new Random();
+
 		List<Double> sol = new ArrayList<Double>();
-		
-		
+
 		for (int j = 0; j < eval.getDim(); j++) {
 			int i_archive = chooseSolution();
 			List<Double> init = archive.get(i_archive).getVector();
-			
+
 			double d = 0;
-			for(int i = 0; i < archiveLen; i++) {
-				d += Math.abs(archive.get(i).getVector().get(j) - init.get(j))/(archiveLen-1);
+			for (int i = 0; i < archiveLen; i++) {
+				d += Math.abs(archive.get(i).getVector().get(j) - init.get(j)) / (archiveLen - 1);
 			}
-			
+
 			double o = evaporationRate * d;
-			
+
 			double possibleSol = ran.nextGaussian(init.get(j), o);
-			
+
 			if (possibleSol > eval.getMax(j)) {
 				possibleSol = eval.getMax(j);
-			}
-			else if (possibleSol < eval.getMin(j)) {
+			} else if (possibleSol < eval.getMin(j)) {
 				possibleSol = eval.getMin(j);
 			}
 
-	        sol.add(possibleSol);
+			sol.add(possibleSol);
 		}
-		
+
 		return sol;
 	}
-	
+
 	public Phase getPhase() {
 		return phase;
 	}
-	
+
 	@Override
 	public String toString() {
-		return 
-				"[AGENT] " + id + "\n" +
-				" - Phase " + phase + "\n" +
-				" - Eval " + evaluate + "\n" +
-				" - Vector" + vector + "\n";
+		return "[AGENT] " + id + "\n" + " - Phase " + phase + "\n" + " - Eval " + evaluate + "\n" + " - Vector" + vector
+				+ "\n";
 	}
 }
diff --git a/ant/AntEnv.java b/ant/AntEnv.java
index 0db7d3c43596d2a3797ed4200784fae3abfaa672..01534b414e6e546361a5ad225d1c0642e66591c8 100644
--- a/ant/AntEnv.java
+++ b/ant/AntEnv.java
@@ -9,33 +9,33 @@ import java.util.Random;
 import baseOptiAgent.Env;
 import eval.Eval;
 
-public class AntEnv extends Env{
-	
+public class AntEnv extends Env {
+
 	private List<Solution> archive;
 	private List<Double> archiveProba;
-	
+
 	private Eval eval;
-	
+
 	private List<Ant> agents;
-	
+
 	private int archiveLen;
 	private double q;
-	
+
 	public AntEnv(int _archiveLen, double _q, Eval _eval) {
 		archiveLen = _archiveLen;
 		q = _q;
-		
+
 		archive = new ArrayList<Solution>();
 		archiveProba = new ArrayList<Double>();
-		
+
 		eval = _eval;
 	}
-	
+
 	public void initAgents(List<Ant> _agents) {
 		agents = new ArrayList<Ant>(_agents);
 		updateArchive();
 	}
-	
+
 	private void sortArchive() {
 		Collections.sort(archive, new Comparator<Solution>() {
 			@Override
@@ -50,53 +50,53 @@ public class AntEnv extends Env{
 			}
 		});
 	}
-	
+
 	private void sortAndRemoveSolution() {
 		sortArchive();
-		
-		while(archive.size() > archiveLen) {
-			archive.remove(archive.size()-1);
+
+		while (archive.size() > archiveLen) {
+			archive.remove(archive.size() - 1);
 		}
 	}
-	
+
 	public void updateArchive() {
-		for (Ant agent: agents) {
+		for (Ant agent : agents) {
 			archive.add(new Solution(agent.getVector(), agent.getEvaluate()));
 		}
 
 		sortAndRemoveSolution();
 	}
-	
+
 	public void computeProba() {
 		archiveProba = new ArrayList<Double>();
-		
+
 		List<Double> w = new ArrayList<Double>();
 		double wMax = 0;
-		
-		for(int i = 0; i < archiveLen; i++) {
-			double wi = 1/(q * archiveLen * Math.sqrt(2 * Math.PI))
-					* Math.exp(-Math.pow(i,2) / (2 * Math.pow(q, 2) * Math.pow(archiveLen, 2)));
+
+		for (int i = 0; i < archiveLen; i++) {
+			double wi = 1 / (q * archiveLen * Math.sqrt(2 * Math.PI))
+					* Math.exp(-Math.pow(i, 2) / (2 * Math.pow(q, 2) * Math.pow(archiveLen, 2)));
 			w.add(wi);
 			wMax += wi;
 		}
-		
-		for(int i = 0; i < archiveLen; i++) {
-			archiveProba.add(w.get(i)/wMax);
+
+		for (int i = 0; i < archiveLen; i++) {
+			archiveProba.add(w.get(i) / wMax);
 		}
 	}
-	
+
 	public List<Solution> getArchive() {
 		return archive;
 	}
-	
+
 	public List<Double> getArchiveProba() {
 		return archiveProba;
 	}
-	
+
 	public Eval getEval() {
 		return eval;
 	}
-	
+
 	public List<Ant> getAgents() {
 		return agents;
 	}
@@ -109,6 +109,6 @@ public class AntEnv extends Env{
 
 	@Override
 	public double getBestEval() {
-		return archive.get(archive.size()-1).getEval();
+		return archive.get(archive.size() - 1).getEval();
 	}
 }
diff --git a/ant/AntSolver.java b/ant/AntSolver.java
index 53e9c89252a6d12e96040e1e6d91830de766d8db..dd4995d1a2aa5910b98ac78e834eb6cc1262def1 100644
--- a/ant/AntSolver.java
+++ b/ant/AntSolver.java
@@ -7,7 +7,7 @@ import baseOptiAgent.Result;
 import baseOptiAgent.Solver;
 import eval.Eval;
 
-public class AntSolver extends Solver{
+public class AntSolver extends Solver {
 
 	public AntSolver(Eval _eval, int _maxCycle, int _maxEval) {
 		super(_eval, _maxCycle, _maxEval);
@@ -16,24 +16,24 @@ public class AntSolver extends Solver{
 
 	@Override
 	public Result solve() {
-		
+
 		// [PARAM]
 		int nbrAgent = 40;
 		int archiveSize = 10;
-		
+
 		double evaporation = 0.6;
 		double q = 0.2;
-		
+
 		// [INIT]
 		AntEnv env = new AntEnv(archiveSize, q, eval);
-		
+
 		List<Ant> agents = new ArrayList<Ant>();
 		for (int i_agent = 0; i_agent < nbrAgent; i_agent++) {
 			agents.add(new Ant(i_agent, eval, env, evaporation));
 		}
-		
+
 		env.initAgents(agents);
-		
+
 		return findSolution(agents, env, 2);
 	}
 
diff --git a/ant/Phase.java b/ant/Phase.java
index d6d70260942c736c95d1ee4d67cc8a4cee367bb7..5e571b23a64aaeee0e61c94004e6ce3851045d58 100644
--- a/ant/Phase.java
+++ b/ant/Phase.java
@@ -1,6 +1,5 @@
 package ant;
 
 public enum Phase {
-	BUILD_NEW,
-	EVALUATE
+	BUILD_NEW, EVALUATE
 }
diff --git a/ant/Solution.java b/ant/Solution.java
index 1142fb126a58740a6987fc76e32232505b554d1b..9652a133287f8133fa0b139a7a03d520302ce4e8 100644
--- a/ant/Solution.java
+++ b/ant/Solution.java
@@ -6,22 +6,22 @@ import java.util.List;
 public class Solution {
 	private List<Double> vector;
 	private double eval;
-	
+
 	public Solution(List<Double> _vector, double _eval) {
 		eval = _eval;
 		vector = new ArrayList<Double>(_vector);
 	}
-	
+
 	public double getEval() {
 		return eval;
 	}
-	
+
 	public List<Double> getVector() {
 		return vector;
 	}
-	
+
 	@Override
 	public String toString() {
-		return ""+eval;
+		return "" + eval;
 	}
 }
diff --git a/bacteria/BFOSolver.java b/bacteria/BFOSolver.java
index 134b54ad7d8820f9e8e66d3bfca17a2b046fa10a..28acdd6a6572c6f076adb9310178d78e248449ed 100644
--- a/bacteria/BFOSolver.java
+++ b/bacteria/BFOSolver.java
@@ -7,7 +7,7 @@ import baseOptiAgent.Result;
 import baseOptiAgent.Solver;
 import eval.Eval;
 
-public class BFOSolver extends Solver{
+public class BFOSolver extends Solver {
 
 	public BFOSolver(Eval _eval, int _maxCycle, int _maxEval) {
 		super(_eval, _maxCycle, _maxEval);
@@ -16,10 +16,10 @@ public class BFOSolver extends Solver{
 
 	@Override
 	public Result solve() {
-		
+
 		// [PARAM]
 		int nbrAgent = 50;
-		
+
 		int maxChemotaxis = 100;
 		int maxSwim = 4;
 		double stepSize = 0.1;
@@ -29,30 +29,18 @@ public class BFOSolver extends Solver{
 		double wRepellan = 10;
 		double ped = 0.25;
 		int maxRepr = 4;
-		
+
 		// [INIT]
 		BacEnv env = new BacEnv();
-		
+
 		List<Bacteria> agents = new ArrayList<Bacteria>();
 		for (int i_agent = 0; i_agent < nbrAgent; i_agent++) {
-			agents.add(new Bacteria(
-					i_agent,
-					eval,
-					env,
-					maxChemotaxis,
-					maxSwim,
-					stepSize,
-					dAttractant,
-					wAttractant,
-					hRepellant,
-					wRepellan,
-					ped,
-					maxRepr
-					));
+			agents.add(new Bacteria(i_agent, eval, env, maxChemotaxis, maxSwim, stepSize, dAttractant, wAttractant,
+					hRepellant, wRepellan, ped, maxRepr));
 		}
-		
+
 		env.initAgent(agents);
-		
+
 		return findSolution(agents, env, 4);
 	}
 
diff --git a/bacteria/BacEnv.java b/bacteria/BacEnv.java
index 9dfa0b678ae0de86d1eb928e67c66fed29b2c522..3c6e2800f9cefee3c27726fd88bc7411f4e26082 100644
--- a/bacteria/BacEnv.java
+++ b/bacteria/BacEnv.java
@@ -8,46 +8,46 @@ import java.util.Random;
 
 import baseOptiAgent.Env;
 
-public class BacEnv extends Env{
+public class BacEnv extends Env {
 
 	List<Bacteria> agents;
-	
+
 	public void initAgent(List<Bacteria> _agents) {
 		agents = new ArrayList<Bacteria>(_agents);
 	}
-	
+
 	public void sortAgents() {
-		Collections.sort(agents, new Comparator<Bacteria>(){
-		    public int compare(Bacteria s1, Bacteria s2) {
-		        return ((Double) s1.getHealth()).compareTo(((Double) s2.getHealth()));
-		    }
+		Collections.sort(agents, new Comparator<Bacteria>() {
+			public int compare(Bacteria s1, Bacteria s2) {
+				return ((Double) s1.getHealth()).compareTo(((Double) s2.getHealth()));
+			}
 		});
 	}
-	
-	public List<Double> getNewGenValue(int id){
-		if(id < (agents.size() / 2 )) {
+
+	public List<Double> getNewGenValue(int id) {
+		if (id < (agents.size() / 2)) {
 			return agents.get(id).getVector();
 		}
-		return agents.get(id/2).getVector();
+		return agents.get(id / 2).getVector();
 	}
-	
-	public double getNewGenFit(int id){
-		if(id < (agents.size() / 2 )) {
+
+	public double getNewGenFit(int id) {
+		if (id < (agents.size() / 2)) {
 			return agents.get(id).getFitness();
 		}
-		return agents.get(id/2).getFitness();
+		return agents.get(id / 2).getFitness();
 	}
-	
-	public double getNewGenEval(int id){
-		if(id < (agents.size() / 2 )) {
+
+	public double getNewGenEval(int id) {
+		if (id < (agents.size() / 2)) {
 			return agents.get(id).getEvaluate();
 		}
-		return agents.get(id/2).getEvaluate();
+		return agents.get(id / 2).getEvaluate();
 	}
-	
-	public List<List<Double>> getValues(){
+
+	public List<List<Double>> getValues() {
 		List<List<Double>> res = new ArrayList<List<Double>>();
-		for(Bacteria agent : agents) {
+		for (Bacteria agent : agents) {
 			res.add(agent.getVector());
 		}
 		return res;
@@ -56,7 +56,7 @@ public class BacEnv extends Env{
 	@Override
 	public double getBestFitness() {
 		double res = agents.get(0).getFitness();
-		for(Bacteria agent: agents) {
+		for (Bacteria agent : agents) {
 			if (res < agent.getFitness()) {
 				res = agent.getFitness();
 			}
@@ -67,7 +67,7 @@ public class BacEnv extends Env{
 	@Override
 	public double getBestEval() {
 		Bacteria res = agents.get(0);
-		for(Bacteria agent: agents) {
+		for (Bacteria agent : agents) {
 			if (res.getFitness() < agent.getFitness()) {
 				res = agent;
 			}
diff --git a/bacteria/Bacteria.java b/bacteria/Bacteria.java
index 061eaaa0481b40ed03d25771822c8ec0095a3aa4..344f8d9804becdbb82a7b66dce710579e853834f 100644
--- a/bacteria/Bacteria.java
+++ b/bacteria/Bacteria.java
@@ -7,229 +7,201 @@ import baseOptiAgent.BaseAgent;
 import bee.BeeEnv;
 import eval.Eval;
 
-public class Bacteria extends BaseAgent{
+public class Bacteria extends BaseAgent {
 
 	private BacEnv env;
-	
+
 	private List<Double> del;
-	
+
 	private double health;
-	
+
 	private Phase phase;
-	
+
 	private int repCount;
-	
+
 	// [PARAM]
 	private int maxChemotaxis;
 	private int maxSwim;
 	private double stepSize;
-	
+
 	private double dAttractant;
 	private double wAttractant;
 	private double hRepellant;
 	private double wRepellant;
-	
+
 	private int maxRepr;
-	
+
 	private double ped; // [0,1] -> probability elimination
-	
+
 	// SWARFMING
 	List<List<Double>> bacteriaValues;
-	
+
 	// REPRODUCTION
 	List<Double> nextGenValue;
 	double nextGenFit;
 	double nectGenEval;
-	
-	public Bacteria(int _id, Eval _eval, BacEnv _env, 
-			int _maxChemotaxis,
-			int _maxSwim,
-			double _stepSize,
-			double _dAttractant,
-			double _wAttractant,
-			double _hRepellant,
-			double _wRepellant,
-			double _ped,
-			int _maxRepr
-			) {
+
+	public Bacteria(int _id, Eval _eval, BacEnv _env, int _maxChemotaxis, int _maxSwim, double _stepSize,
+			double _dAttractant, double _wAttractant, double _hRepellant, double _wRepellant, double _ped,
+			int _maxRepr) {
 		super(_eval, _id);
 		env = _env;
-		
+
 		maxChemotaxis = _maxChemotaxis;
 		maxSwim = _maxSwim;
 		stepSize = _stepSize;
-		
+
 		dAttractant = _dAttractant;
 		wAttractant = _wAttractant;
 		hRepellant = _hRepellant;
 		wRepellant = _wRepellant;
 		ped = _ped;
 		maxRepr = _maxRepr;
-		
+
 		phase = Phase.CHEMOTAXIS;
-		
+
 		repCount = 0;
-		
 
 		vector = generateRandomVector();
 		evaluate = this.evaluate(vector);
 		fitness = this.fitness(evaluate);
 	}
-	
+
 	private void generateDel() {
 		del = new ArrayList<Double>();
 		double norm = 0;
-		
-		for(int i = 0; i < eval.getDim(); i++) {
+
+		for (int i = 0; i < eval.getDim(); i++) {
 			del.add((Math.random() - 0.5) * 2);
 			norm += (del.get(i) * del.get(i));
 		}
 		norm = Math.sqrt(norm);
-		for(int i = 0; i < eval.getDim(); i++) {
-			del.set(i, del.get(i)/norm);
+		for (int i = 0; i < eval.getDim(); i++) {
+			del.set(i, del.get(i) / norm);
 		}
 	}
-	
+
 	private double computeJcc() {
 		double jcc = 0;
-		
-		for(int i = 0; i < bacteriaValues.size(); i++ ) {
-			
+
+		for (int i = 0; i < bacteriaValues.size(); i++) {
+
 			double tmp = 0;
-			for(int m = 0; m < eval.getDim(); m++) {
+			for (int m = 0; m < eval.getDim(); m++) {
 				tmp += Math.pow(vector.get(m) - bacteriaValues.get(i).get(m), 2);
 			}
 			jcc += (-dAttractant * Math.exp(-wAttractant * tmp));
 		}
-		
 
-		for(int i = 0; i < bacteriaValues.size(); i++ ) {
-			
+		for (int i = 0; i < bacteriaValues.size(); i++) {
+
 			double tmp = 0;
-			for(int m = 0; m < eval.getDim(); m++) {
+			for (int m = 0; m < eval.getDim(); m++) {
 				tmp += Math.pow(vector.get(m) - bacteriaValues.get(i).get(m), 2);
 			}
 			jcc += (-hRepellant * Math.exp(-wRepellant * tmp));
 		}
-		
+
 		return jcc;
 	}
-	
+
 	private void chemotaxis() {
 
 		int chemo = 0;
-		
+
 		health = 0;
-		
-		while(chemo < maxChemotaxis) {
-			
+
+		while (chemo < maxChemotaxis) {
+
 			generateDel();
 
 			double jcc = computeJcc();
 			double jlast = fitness + jcc;
-			
+
 			int swim = 0;
-			
+
 			while (chemo < maxChemotaxis && swim < maxSwim && fitness + jcc <= jlast) {
-				swim ++;
+				swim++;
 				jlast = fitness + jcc;
-				
+
 				List<Double> newVector = new ArrayList<Double>();
-				
-				for(int i = 0; i < eval.getDim(); i++) {
+
+				for (int i = 0; i < eval.getDim(); i++) {
 					newVector.add(vector.get(i) + stepSize * del.get(i));
 				}
-				
+
 				boundValue(newVector);
 				compareAndUpdate(newVector);
 				jcc = computeJcc();
 				health = getHealth() + fitness + jcc;
 
-				
 			}
 			chemo++;
 		}
 	}
-	
+
 	private void reproduction() {
 		vector = nextGenValue;
 		evaluate = nectGenEval;
 		fitness = nextGenFit;
-		repCount ++;
+		repCount++;
 	}
-	
+
 	private void elimination() {
-		
-		if(repCount >= maxRepr) {
+
+		if (repCount >= maxRepr) {
 			return;
 		}
 		repCount = 0;
-		
-		
+
 		// Elimination
-		if(Math.random() > ped) {
+		if (Math.random() > ped) {
 			// PASS
-		}
-		else {
+		} else {
 			vector = generateRandomVector();
 			evaluate = this.evaluate(vector);
 			fitness = this.fitness(evaluate);
 		}
 	}
-	
+
 	private void nextPhase() {
-		if(phase == Phase.CHEMOTAXIS) {
+		if (phase == Phase.CHEMOTAXIS) {
 			phase = Phase.ENV;
-		}
-		else if (phase == Phase.ENV) {
+		} else if (phase == Phase.ENV) {
 			phase = Phase.REPRODUCTION;
-		}
-		else if (phase == Phase.REPRODUCTION) {
+		} else if (phase == Phase.REPRODUCTION) {
 			phase = Phase.ELIMINATION;
-		}
-		else {
+		} else {
 			phase = Phase.CHEMOTAXIS;
 		}
 	}
-	
+
 	@Override
 	public void perceive() {
-		if(phase == Phase.CHEMOTAXIS) {
+		if (phase == Phase.CHEMOTAXIS) {
 			bacteriaValues = env.getValues();
-		}
-		else if (phase == Phase.ENV) {
-			
-		}
-		else if (phase == Phase.REPRODUCTION) {
+		} else if (phase == Phase.ENV) {
+
+		} else if (phase == Phase.REPRODUCTION) {
 			nextGenValue = env.getNewGenValue(id);
 			nextGenFit = env.getNewGenFit(id);
 			nectGenEval = env.getNewGenEval(id);
-		}
-		else {
+		} else {
 
 		}
 	}
-	
+
 	@Override
 	public void act() {
-		if(id == 0) {
-			System.out.println(" ");
-			System.out.println(" ");
-			System.out.println(" ");
-		}
-		System.out.println(this);
-		if(phase == Phase.CHEMOTAXIS) {
+		if (phase == Phase.CHEMOTAXIS) {
 			chemotaxis();
-		}
-		else if (phase == Phase.ENV) {
-			if(id == 0) {
+		} else if (phase == Phase.ENV) {
+			if (id == 0) {
 				env.sortAgents();
 			}
-		}
-		else if (phase == Phase.REPRODUCTION) {
+		} else if (phase == Phase.REPRODUCTION) {
 			reproduction();
-		}
-		else {
+		} else {
 			elimination();
 		}
 		nextPhase();
@@ -238,11 +210,10 @@ public class Bacteria extends BaseAgent{
 	public double getHealth() {
 		return health;
 	}
-	
+
 	@Override
 	public String toString() {
-		return "["+id+"] Eval / Fit -> "+this.evaluate+" / "+this.fitness+"\n"
-				+ "Vec -> "+this.vector;
+		return "[" + id + "] Eval / Fit -> " + this.evaluate + " / " + this.fitness + "\n" + "Vec -> " + this.vector;
 	}
-	
+
 }
diff --git a/bacteria/Phase.java b/bacteria/Phase.java
index ba82f09dc753a1b173eea3a249473ebee20e7275..4c471a676fb090de9ccfeb63208d9451f4ea01da 100644
--- a/bacteria/Phase.java
+++ b/bacteria/Phase.java
@@ -1,8 +1,5 @@
 package bacteria;
 
 public enum Phase {
-	CHEMOTAXIS,
-	ENV,
-	REPRODUCTION,
-	ELIMINATION
+	CHEMOTAXIS, ENV, REPRODUCTION, ELIMINATION
 }
diff --git a/baseOptiAgent/BaseAgent.java b/baseOptiAgent/BaseAgent.java
index 7fe08b335bb1f55854caefa0e745ead556bbb92e..ce726a5dbb61c6fe35d0fcefe3f3c1311bda7401 100644
--- a/baseOptiAgent/BaseAgent.java
+++ b/baseOptiAgent/BaseAgent.java
@@ -6,75 +6,73 @@ import java.util.List;
 import eval.Eval;
 import mas.core.Agent;
 
-public abstract class BaseAgent extends Agent{
+public abstract class BaseAgent extends Agent {
 
 	protected List<Double> vector;
 	protected double fitness;
 	protected double evaluate;
-	
+
 	protected Eval eval;
-	
+
 	protected int id;
-	
+
 	public BaseAgent(Eval _eval, int _id) {
 		eval = _eval;
 		id = _id;
 	}
-	
+
 	protected void boundValue(List<Double> vector) {
-		for(int i = 0; i < vector.size(); i++) {
+		for (int i = 0; i < vector.size(); i++) {
 			if (vector.get(i) > eval.getMax(i)) {
 				vector.set(i, eval.getMax(i));
-			}
-			else if (vector.get(i) < eval.getMin(i)) {
+			} else if (vector.get(i) < eval.getMin(i)) {
 				vector.set(i, eval.getMin(i));
 			}
 		}
 	}
-	
-	protected List<Double> generateRandomVector(){
+
+	protected List<Double> generateRandomVector() {
 		List<Double> result = new ArrayList<Double>();
-		
+
 		for (int i = 0; i < eval.getDim(); i++) {
 			result.add(eval.getMin(i) + Math.random() * (eval.getMax(i) - eval.getMin(i)));
 		}
 		boundValue(result);
 		return result;
 	}
-	
+
 	protected double evaluate(List<Double> vector) {
 		return eval.evaluate(vector);
 	}
-	
+
 	protected double fitness(double value) {
-    	if (value >= 0) {
-        	return 1/(1 + value);
-    	}
-    	else {
-    		return 1 + Math.abs(value);
-    	}
-    }
-	
+		if (value >= 0) {
+			return 1 / (1 + value);
+		} else {
+			return 1 + Math.abs(value);
+		}
+	}
+
 	public int getId() {
 		return id;
 	}
-	
+
 	public List<Double> getVector() {
 		return vector;
 	}
-	
+
 	public double getEvaluate() {
 		return evaluate;
 	}
-	
+
 	public double getFitness() {
 		return fitness;
 	}
-	
+
 	protected boolean compareAndUpdate(List<Double> otherVec) {
 		double newEval = evaluate(otherVec);
 		double newFitness = fitness(newEval);
-		if (fitness<newFitness) {
+		if (fitness < newFitness) {
 			vector = otherVec;
 			fitness = newFitness;
 			evaluate = newEval;
@@ -82,5 +80,5 @@ public abstract class BaseAgent extends Agent{
 		}
 		return false;
 	}
-	
+
 }
diff --git a/baseOptiAgent/Cycle.java b/baseOptiAgent/Cycle.java
index 2d2566ed697069363ffa27dc38c6932f02d842e6..4bb541241d8ea3f07f0e7848249427d531845139 100644
--- a/baseOptiAgent/Cycle.java
+++ b/baseOptiAgent/Cycle.java
@@ -1,21 +1,21 @@
 package baseOptiAgent;
 
 public class Cycle {
-	
+
 	private int eval;
 	private double bestSolution;
-	
+
 	public Cycle(int _eval, double _bestSolution) {
 		eval = _eval;
-		bestSolution = _bestSolution;	
+		bestSolution = _bestSolution;
 	}
-	
+
 	public double getBestSolution() {
 		return bestSolution;
 	}
-	
+
 	public int getEval() {
 		return eval;
 	}
-	
+
 }
diff --git a/baseOptiAgent/Env.java b/baseOptiAgent/Env.java
index dad602cadc31e5172aa4af62c83cd603c0c857d0..ec5af5f591578a202566c2606e2d610666b32da0 100644
--- a/baseOptiAgent/Env.java
+++ b/baseOptiAgent/Env.java
@@ -3,12 +3,10 @@ package baseOptiAgent;
 import eval.Eval;
 
 public abstract class Env {
-	
+
 	protected Eval eval;
-	
-	
-	
+
 	public abstract double getBestFitness();
-	
+
 	public abstract double getBestEval();
 }
diff --git a/baseOptiAgent/Ihm.java b/baseOptiAgent/Ihm.java
index bd97c55f1e7e505f89f59c0202cc4a7410253082..0bce798de4fda6ef135d39b4dc095fe4fe831e7d 100644
--- a/baseOptiAgent/Ihm.java
+++ b/baseOptiAgent/Ihm.java
@@ -18,57 +18,53 @@ import eval.fun.Cigar;
 import eval.fun.Rastrigin;
 import eval.fun.SchwefelFunction1_2;
 import firefly.FASolver;
+import smac.SmacSolver;
 
 public class Ihm {
 	public static void main(String[] args) {
-		
+
 		// [PARAM]
-		
-		//Eval eval = new StepFunction();
-		//Eval eval = new SchwefelFunction1_2();
-		//Eval eval = new SchwefelFunction();
+
+		// Eval eval = new StepFunction();
+		// Eval eval = new SchwefelFunction1_2();
+		// Eval eval = new SchwefelFunction();
 		Eval eval = new Rastrigin();
-		//Eval eval = new Cigar();
-		//Eval eval = new AxisParallelHyper_Ellipsoid();
-		//Eval eval = new Beale();
-		
-		int maxCycle = 200_000;
+		// Eval eval = new Cigar();
+		// Eval eval = new AxisParallelHyper_Ellipsoid();
+		// Eval eval = new Beale();
+
+		int maxCycle = 1_000;
 		int maxEval = 1_000_000;
-		
+
 		/*
-		// [INIT]
-		//Solver solver = new SMSolver(eval, maxCycle, maxEval);
-		//Solver solver = new BeeSolver(eval, maxCycle, maxEval);
-		//Solver solver = new AntSolver(eval, maxCycle, maxEval);
-		//Solver solver = new ABeeSolver(eval, maxCycle, maxEval);
-		Solver solver = new FASolver(eval, maxCycle, maxEval);
-		
-		// [SOLVING]
-		Result res = solver.solve();
-		
-		
-		// [RESULT]
-		
-		System.out.println("Cycle : "+res.totalCycle+" Out of "+maxCycle);
-		System.out.println("Eval : "+res.totalEval+" Out of "+maxEval);
-		System.out.println("Solution found "+res.optiFound);
-		
-		writeEval(res);
-		*/
-		
-
-		
+		 * // [INIT] //Solver solver = new SMSolver(eval, maxCycle, maxEval); //Solver
+		 * solver = new BeeSolver(eval, maxCycle, maxEval); //Solver solver = new
+		 * AntSolver(eval, maxCycle, maxEval); //Solver solver = new ABeeSolver(eval,
+		 * maxCycle, maxEval); Solver solver = new FASolver(eval, maxCycle, maxEval);
+		 * 
+		 * // [SOLVING] Result res = solver.solve();
+		 * 
+		 * 
+		 * // [RESULT]
+		 * 
+		 * System.out.println("Cycle : "+res.totalCycle+" Out of "+maxCycle);
+		 * System.out.println("Eval : "+res.totalEval+" Out of "+maxEval);
+		 * System.out.println("Solution found "+res.optiFound);
+		 * 
+		 * writeEval(res);
+		 */
+
 		startAll(maxCycle, maxEval);
 	}
-	
+
 	private static void writeEval(Result res) {
 		try {
-			FileWriter csvWriter = new FileWriter("eval_"+res.name+".csv");
+			FileWriter csvWriter = new FileWriter("eval_" + res.name + ".csv");
 			csvWriter.append("cycle,eval,sol\n");
 
-
-			for (Integer name: res.cycle.keySet()) {
-			    csvWriter.append(name+","+res.cycle.get(name).getEval()+","+res.cycle.get(name).getBestSolution()+"\n");
+			for (Integer name : res.cycle.keySet()) {
+				csvWriter.append(name + "," + res.cycle.get(name).getEval() + ","
+						+ res.cycle.get(name).getBestSolution() + "\n");
 			}
 			csvWriter.flush();
 			csvWriter.close();
@@ -76,66 +72,41 @@ public class Ihm {
 			e.printStackTrace();
 		}
 	}
-	
+
 	private static void startAll(int maxCycle, int maxEval) {
-		//Eval eval = new StepFunction();
-		//Eval eval = new SchwefelFunction1_2();
-		//Eval eval = new SchwefelFunction();
-		Eval eval = new Rastrigin();
-		//Eval eval = new Cigar();
-		//Eval eval = new AxisParallelHyper_Ellipsoid();
-		//Eval eval = new Beale();
-		
-
-		eval = new SchwefelFunction1_2();
-		Solver solver = new SMSolver(eval, maxCycle, maxEval);
-		Result res = solver.solve();
-		writeEval(res);
-		
-		eval = new SchwefelFunction1_2();
-		solver = new BeeSolver(eval, maxCycle, maxEval);
-		res = solver.solve();
-		writeEval(res);
-		
-		eval = new SchwefelFunction1_2();
-		solver = new AntSolver(eval, maxCycle, maxEval);
-		res = solver.solve();
-		writeEval(res);
-		
-		eval = new SchwefelFunction1_2();
-		solver = new ABeeSolver(eval, maxCycle, maxEval);
-		res = solver.solve();
-		writeEval(res);
-		
-		eval = new SchwefelFunction1_2();
-		solver = new FASolver(eval, maxCycle, maxEval);
-		res = solver.solve();
-		writeEval(res);
-		
-		eval = new SchwefelFunction1_2();
-		solver = new BFOSolver(eval, maxCycle, maxEval);
-		res = solver.solve();
-		writeEval(res);
-		
-		System.out.println(res.optiFound);
-		System.out.println(res.totalCycle);
-		System.out.println(res.totalEval);
-		System.out.println(res.cycle);
-		System.out.println(res.cycle.get(0).getBestSolution());
-		System.out.println(res.cycle.get(0).getEval());
-		
+		Eval eval = new SchwefelFunction1_2();
+		Solver solver = new SMSolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
+		solver = new BeeSolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
+		solver = new AntSolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
+		solver = new ABeeSolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
+		solver = new FASolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
+		solver = new BFOSolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
+		solver = new SmacSolver(new SchwefelFunction1_2(), maxCycle, maxEval);
+		writeEval(solver.solve());
+
 		System.out.println("Done");
 	}
-	
+
 	private void benchmarkAlgo(int maxCycle, int maxEval) {
-		
+
 		int nbrTry = 100;
-				
-		for(int i = 0; i < nbrTry; i++) {
+
+		for (int i = 0; i < nbrTry; i++) {
 			Eval eval = new Rastrigin();
 			Solver solver = new SMSolver(eval, maxCycle, maxEval);
 			Result res = solver.solve();
 		}
-		
 	}
 }
diff --git a/baseOptiAgent/Result.java b/baseOptiAgent/Result.java
index ef1e12711be7b3fa1257c690e6428497f149af02..64f003f9031c972db8042fa5395cfd9dec4b934c 100644
--- a/baseOptiAgent/Result.java
+++ b/baseOptiAgent/Result.java
@@ -5,20 +5,20 @@ import java.util.Map;
 import java.util.TreeMap;
 
 public class Result {
-	
+
 	public String name;
-	
+
 	public int totalEval;
 	public int totalCycle;
-	
+
 	public boolean optiFound;
-	
+
 	public Map<Integer, Cycle> cycle;
-	
+
 	public Result(String _name) {
 		name = _name;
 		optiFound = false;
 		cycle = new TreeMap<Integer, Cycle>();
 	}
-	
+
 }
diff --git a/baseOptiAgent/Solver.java b/baseOptiAgent/Solver.java
index f7bec412d524251a038ec92860038602ec0bd8e4..3cee2ef6cfe91eb03a0352995397b6ebb6abd6c2 100644
--- a/baseOptiAgent/Solver.java
+++ b/baseOptiAgent/Solver.java
@@ -7,32 +7,31 @@ import eval.Eval;
 public abstract class Solver {
 
 	protected String name;
-	
+
 	protected Eval eval;
-	
-	int maxCycle;
-	int maxEval;
-	
+
+	protected int maxCycle;
+	protected int maxEval;
+
 	public Solver(Eval _eval, int _maxCycle, int _maxEval) {
 		eval = _eval;
 		maxCycle = _maxCycle;
 		maxEval = _maxEval;
 	}
-	
+
 	public abstract Result solve();
 	// Define name, init env and agents, call findSolution
-	
-	protected <A extends BaseAgent, E extends Env> Result findSolution(List<A> agents, E  env, int cycleModulo) {
+
+	protected <A extends BaseAgent, E extends Env> Result findSolution(List<A> agents, E env, int cycleModulo) {
 		Result res = new Result(name);
-		
 
 		int cycle = 0;
-		
+
 		double bestEval = env.getBestEval();
-		
-		do  {
+
+		do {
 			res.cycle.put(cycle, new Cycle(eval.getCount(), env.getBestEval()));
-			for(int i = 0; i < cycleModulo; i++) {
+			for (int i = 0; i < cycleModulo; i++) {
 				for (BaseAgent agent : agents) {
 					agent.perceive();
 				}
@@ -41,36 +40,27 @@ public abstract class Solver {
 					agent.act();
 				}
 			}
-			
+
 			double nextEval = env.getBestEval();
-			
+
 			if (nextEval < bestEval) {
 				bestEval = nextEval;
 			}
-			
-			cycle ++;
-			
+
+			cycle++;
+
 			/*
-			// Pas à Pas
-			try {
-				System.in.read();
-			} catch (IOException e) {
-				// TODO Auto-generated catch block
-				e.printStackTrace();
-			}
-			*/
-			
-			
-		} while(
-				Math.abs(env.getBestEval()-eval.getObjective())>eval.getErrorDelta()
-				&& cycle < maxCycle
-				&& eval.getCount() < maxEval
-				);
-		
+			 * // Pas à Pas try { System.in.read(); } catch (IOException e) { // TODO
+			 * Auto-generated catch block e.printStackTrace(); }
+			 */
+
+		} while (Math.abs(env.getBestEval() - eval.getObjective()) > eval.getErrorDelta() && cycle < maxCycle
+				&& eval.getCount() < maxEval);
+
 		res.totalCycle = cycle;
 		res.totalEval = eval.getCount();
-		res.optiFound = Math.abs(env.getBestEval()-eval.getObjective())<=eval.getErrorDelta();
+		res.optiFound = Math.abs(env.getBestEval() - eval.getObjective()) <= eval.getErrorDelta();
 		return res;
 	}
-	
+
 }
diff --git a/bee/Bee.java b/bee/Bee.java
index 07a36ffeb63d508a4d76d304ccb8984e5136f514..4767fdd3812d4aec3cc76388372c4fad2c8a9a8e 100644
--- a/bee/Bee.java
+++ b/bee/Bee.java
@@ -6,126 +6,118 @@ import java.util.List;
 import baseOptiAgent.BaseAgent;
 import eval.Eval;
 
-public class Bee extends BaseAgent{
-	
+public class Bee extends BaseAgent {
+
 	BeeEnv env;
-	
+
 	int maxCount;
-	
+
 	Phase phase;
 	int stuckCount;
-	
+
 	// EMPLOYED
 	List<Double> randomAgent;
-	
+
 	// ONLOOKERS
 	double bestFitness;
-	//List<Double> randomAgent;
+	// List<Double> randomAgent;
 
 	public Bee(int _id, Eval _eval, BeeEnv _env, int _maxCount) {
 		super(_eval, _id);
 
 		env = _env;
-		
+
 		maxCount = _maxCount;
-		
+
 		phase = Phase.EMPLOYED;
 		stuckCount = 0;
-		
+
 		vector = generateRandomVector();
 		evaluate = this.evaluate(vector);
 		fitness = this.fitness(evaluate);
 	}
-	
+
 	private void nextPhase() {
 		if (phase == Phase.EMPLOYED) {
 			phase = Phase.ONLOOKERS;
-		}
-		else if (phase == Phase.ONLOOKERS) {
+		} else if (phase == Phase.ONLOOKERS) {
 			phase = Phase.SCOUTS;
-		}
-		else if (phase == Phase.SCOUTS) {
+		} else if (phase == Phase.SCOUTS) {
 			phase = Phase.EMPLOYED;
 		}
 	}
-	
+
 	@Override
 	public void perceive() {
 
 		if (phase == Phase.EMPLOYED) {
 			randomAgent = env.getRandomAgent(id);
-		}
-		else if (phase == Phase.ONLOOKERS) {
+		} else if (phase == Phase.ONLOOKERS) {
 			randomAgent = env.getRandomAgent(id);
 			bestFitness = env.getBestFitness();
+		} else if (phase == Phase.SCOUTS) {
 		}
-		else if (phase == Phase.SCOUTS) {
-		}
-		
+
 	}
-	
+
 	private boolean randomlyUpdate() {
-		int dim = (int)(Math.random() * vector.size());
-		
+		int dim = (int) (Math.random() * vector.size());
+
 		List<Double> newVector = new ArrayList<Double>(vector);
-		
+
 		newVector.set(dim, vector.get(dim) + 2 * (Math.random() - 0.5) * (vector.get(dim) - randomAgent.get(dim)));
-		
+
 		boundValue(newVector);
-		
+
 		return compareAndUpdate(newVector);
 	}
-	
+
 	private void employedPhase() {
 		boolean res = randomlyUpdate();
 		if (!res) {
-			stuckCount ++;
-		}
-		else {
+			stuckCount++;
+		} else {
 			stuckCount = 0;
 		}
 	}
-	
+
 	private void onlookerPhase() {
-		
-		// change p to this ? https://www.hindawi.com/journals/jam/2014/402616/ 
+
+		// change p to this ? https://www.hindawi.com/journals/jam/2014/402616/
 		double p = (0.9 * fitness / bestFitness) + 0.1;
-		
+
 		if (Math.random() > p) {
 			randomlyUpdate();
 		}
 	}
-	
+
 	private void scoutPhase() {
 		if (stuckCount >= maxCount) {
 			stuckCount = 0;
-			
-			
+
 			double oldEval = evaluate;
 			vector = generateRandomVector();
 			evaluate = this.evaluate(vector);
 			fitness = this.fitness(evaluate);
-			//System.out.println("Reseted : "+oldEval+" to "+evaluate);
+			// System.out.println("Reseted : "+oldEval+" to "+evaluate);
 		}
 	}
-	
+
 	@Override
 	public void act() {
-		
+
 		if (phase == Phase.EMPLOYED) {
 			employedPhase();
-		}
-		else if (phase == Phase.ONLOOKERS) {
+		} else if (phase == Phase.ONLOOKERS) {
 			onlookerPhase();
-		}
-		else if (phase == Phase.SCOUTS) {
+		} else if (phase == Phase.SCOUTS) {
 			scoutPhase();
 		}
 		nextPhase();
 	}
-	
+
 	public Phase getPhase() {
 		return phase;
 	}
-	
+
 }
diff --git a/bee/BeeEnv.java b/bee/BeeEnv.java
index ca64a0237d3a2a632f4e404699a3fd84f4de7155..628d4992a5de951635bf1f0abe58c733bd1894d5 100644
--- a/bee/BeeEnv.java
+++ b/bee/BeeEnv.java
@@ -6,19 +6,19 @@ import java.util.Random;
 
 import baseOptiAgent.Env;
 
-public class BeeEnv extends Env{
+public class BeeEnv extends Env {
 
 	private List<Bee> agents;
-	
+
 	public void initAgent(List<Bee> _agents) {
 		agents = new ArrayList<Bee>(_agents);
 	}
-	
+
 	public List<Double> getRandomAgent(int id) {
 		if (agents.size() == 1) {
 			throw new java.lang.Error("Cannot return random agent because the environment only know 1 agent.");
 		}
-		
+
 		Random rand = new Random();
 		Bee randomagent = agents.get(rand.nextInt(agents.size()));
 		while (randomagent.getId() == id) {
@@ -30,7 +30,7 @@ public class BeeEnv extends Env{
 	@Override
 	public double getBestFitness() {
 		double res = agents.get(0).getFitness();
-		for(Bee agent: agents) {
+		for (Bee agent : agents) {
 			if (res < agent.getFitness()) {
 				res = agent.getFitness();
 			}
@@ -41,12 +41,12 @@ public class BeeEnv extends Env{
 	@Override
 	public double getBestEval() {
 		Bee res = agents.get(0);
-		for(Bee agent: agents) {
+		for (Bee agent : agents) {
 			if (res.getFitness() < agent.getFitness()) {
 				res = agent;
 			}
 		}
 		return res.getEvaluate();
 	}
-	
+
 }
diff --git a/bee/BeeSolver.java b/bee/BeeSolver.java
index b44bf082a0190787a5cee66a1442c1ed1916519b..20ce9cc6a5e77419df067095ee9f19f43fc20efa 100644
--- a/bee/BeeSolver.java
+++ b/bee/BeeSolver.java
@@ -9,7 +9,7 @@ import baseOptiAgent.Result;
 import baseOptiAgent.Solver;
 import eval.Eval;
 
-public class BeeSolver extends Solver{
+public class BeeSolver extends Solver {
 
 	public BeeSolver(Eval _eval, int _maxCycle, int _maxEval) {
 		super(_eval, _maxCycle, _maxEval);
@@ -18,21 +18,21 @@ public class BeeSolver extends Solver{
 
 	@Override
 	public Result solve() {
-		
+
 		// [PARAM]
 		int nbrAgent = 100;
 		int maxTry = 200;
-		
+
 		// [INIT]
 		BeeEnv env = new BeeEnv();
-		
+
 		List<Bee> agents = new ArrayList<Bee>();
 		for (int i_agent = 0; i_agent < nbrAgent; i_agent++) {
 			agents.add(new Bee(i_agent, eval, env, maxTry));
 		}
-		
+
 		env.initAgent(agents);
-		
+
 		return findSolution(agents, env, 3);
 	}
 
diff --git a/bee/Phase.java b/bee/Phase.java
index 95a926eb2eb97a83626f703ba36e1ed91539d7b6..8a217b75c1d74351ed2d0fc52865c3d6a575b4a6 100644
--- a/bee/Phase.java
+++ b/bee/Phase.java
@@ -1,7 +1,5 @@
 package bee;
 
 public enum Phase {
-	EMPLOYED,
-	ONLOOKERS,
-	SCOUTS
+	EMPLOYED, ONLOOKERS, SCOUTS
 }
diff --git a/eval/Eval.java b/eval/Eval.java
index d775864fad0779ce0ec7efb567f8012480c26931..333a941eff10f0423f978dc277523e7651fd13f3 100644
--- a/eval/Eval.java
+++ b/eval/Eval.java
@@ -10,31 +10,31 @@ public abstract class Eval {
 	protected List<Double> min;
 	protected List<Double> max;
 	protected double errorDelta;
-	
+
 	protected AtomicInteger count;
-	
+
 	public abstract double evaluate(List<Double> value);
-	
+
 	public Double getMin(int i) {
 		return min.get(i);
 	}
-	
+
 	public Double getMax(int i) {
 		return max.get(i);
 	}
-	
+
 	public double getErrorDelta() {
 		return errorDelta;
 	}
-	
+
 	public double getObjective() {
 		return objective;
 	}
-	
+
 	public int getDim() {
 		return dim;
 	}
-	
+
 	public int getCount() {
 		return count.intValue();
 	}
diff --git a/eval/fun/AxisParallelHyper_Ellipsoid.java b/eval/fun/AxisParallelHyper_Ellipsoid.java
index 3eaf94ddd9b6c3c692f122fbb2401bd7bdb32968..daaaba94f3c001c338d05069599efd171f49e232 100644
--- a/eval/fun/AxisParallelHyper_Ellipsoid.java
+++ b/eval/fun/AxisParallelHyper_Ellipsoid.java
@@ -6,15 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class AxisParallelHyper_Ellipsoid extends Eval{
+public class AxisParallelHyper_Ellipsoid extends Eval {
 
 	public AxisParallelHyper_Ellipsoid() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 30;
 		objective = (double) 0;
 		errorDelta = 0.000_01;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -27,11 +27,11 @@ public class AxisParallelHyper_Ellipsoid extends Eval{
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
 		double result = 0;
-		
-		for(int i_dim = 0; i_dim < dim; i_dim++ ) {
+
+		for (int i_dim = 0; i_dim < dim; i_dim++) {
 			result += (i_dim * value.get(i_dim) * value.get(i_dim));
 		}
-		
+
 		return result;
 	}
 }
\ No newline at end of file
diff --git a/eval/fun/Beale.java b/eval/fun/Beale.java
index f11a3aa16f68b6b4e9e8b4a3cf64e0dde90b3f07..a265b1f9eb4eadeec4cc03c8224e32c713e3597e 100644
--- a/eval/fun/Beale.java
+++ b/eval/fun/Beale.java
@@ -6,15 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class Beale extends Eval{
+public class Beale extends Eval {
 
 	public Beale() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 2;
 		objective = (double) 0;
 		errorDelta = 0.000_01;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -26,13 +26,11 @@ public class Beale extends Eval{
 	@Override
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
-		
+
 		double x = value.get(0);
 		double y = value.get(1);
-		
-		double result = 
-				Math.pow(1.5 - x * (1 - y), 2)
-				+ Math.pow(2.25 - x * (1 - y * y), 2)
+
+		double result = Math.pow(1.5 - x * (1 - y), 2) + Math.pow(2.25 - x * (1 - y * y), 2)
 				+ Math.pow(2.625 - x * (1 - y * y * y), 2);
 		return result;
 	}
diff --git a/eval/fun/Cigar.java b/eval/fun/Cigar.java
index 74dfc512a7ed799b50dc0106dcd05c22bea96bb6..b1c05327063e143374dd6cf5f949111e2cc0f8bc 100644
--- a/eval/fun/Cigar.java
+++ b/eval/fun/Cigar.java
@@ -6,16 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class Cigar extends Eval{
+public class Cigar extends Eval {
 
-	
 	public Cigar() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 30;
 		objective = (double) 0;
 		errorDelta = 0.000_01;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -28,12 +27,12 @@ public class Cigar extends Eval{
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
 		double result = value.get(0) * value.get(0);
-		
+
 		double other = 0;
-		for(int i_dim = 0; i_dim < dim; i_dim++ ) {
+		for (int i_dim = 0; i_dim < dim; i_dim++) {
 			other += value.get(i_dim) * value.get(i_dim);
 		}
-		
+
 		return result + 100_000 * other;
 	}
 }
diff --git a/eval/fun/Rastrigin.java b/eval/fun/Rastrigin.java
index 6fde05c346414b0f36d961896e2b4c62fc69b330..fe95f0835d5f256726f0e2ca1b7696b4acc047c8 100644
--- a/eval/fun/Rastrigin.java
+++ b/eval/fun/Rastrigin.java
@@ -6,15 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class Rastrigin extends Eval{
+public class Rastrigin extends Eval {
 
 	public Rastrigin() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 30;
 		objective = (double) 0;
 		errorDelta = 0.001;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -26,14 +26,13 @@ public class Rastrigin extends Eval{
 	@Override
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
-		double result = 10*dim;
-		
-		for(int i_dim = 0; i_dim < dim; i_dim++ ) {
+		double result = 10 * dim;
+
+		for (int i_dim = 0; i_dim < dim; i_dim++) {
 			result += (value.get(i_dim) * value.get(i_dim) - 10 * Math.cos(2 * Math.PI * value.get(i_dim)));
 		}
-		
+
 		return result;
 	}
-	
 
 }
\ No newline at end of file
diff --git a/eval/fun/SchwefelFunction.java b/eval/fun/SchwefelFunction.java
index 739e29673ac4cd718789b7f9d69687d0278ec69b..cb037590fd4e9e5459cdba46844416529acee712 100644
--- a/eval/fun/SchwefelFunction.java
+++ b/eval/fun/SchwefelFunction.java
@@ -6,15 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class SchwefelFunction extends Eval{
+public class SchwefelFunction extends Eval {
 
 	public SchwefelFunction() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 30;
-		objective = (double) -418.9829* dim;
+		objective = (double) -418.9829 * dim;
 		errorDelta = 0.001;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -27,13 +27,12 @@ public class SchwefelFunction extends Eval{
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
 		double result = 0;
-		
-		for(int i_dim = 0; i_dim < dim; i_dim++ ) {
+
+		for (int i_dim = 0; i_dim < dim; i_dim++) {
 			result += (value.get(i_dim) * Math.sin(Math.sqrt(Math.abs(value.get(i_dim)))));
 		}
-		
+
 		return result;
 	}
-	
 
 }
diff --git a/eval/fun/SchwefelFunction1_2.java b/eval/fun/SchwefelFunction1_2.java
index f20c23a8339ec2a9a173c0b7809445b23222f4bf..7832e3b81271fc80778a024a7b509e23da512aff 100644
--- a/eval/fun/SchwefelFunction1_2.java
+++ b/eval/fun/SchwefelFunction1_2.java
@@ -6,16 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class SchwefelFunction1_2 extends Eval{
+public class SchwefelFunction1_2 extends Eval {
 
-	
 	public SchwefelFunction1_2() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 30;
 		objective = (double) 0;
 		errorDelta = 0.001;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -28,15 +27,15 @@ public class SchwefelFunction1_2 extends Eval{
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
 		double result = 0;
-		
-		for(int i_dim = 0; i_dim < dim; i_dim++ ) {
+
+		for (int i_dim = 0; i_dim < dim; i_dim++) {
 			double tmp_res = 0;
-			for(int j_dim = 0; j_dim < i_dim; j_dim++ ) {
+			for (int j_dim = 0; j_dim < i_dim; j_dim++) {
 				tmp_res += value.get(j_dim);
 			}
-			result += tmp_res*tmp_res;
+			result += tmp_res * tmp_res;
 		}
-		
+
 		return result;
 	}
 
diff --git a/eval/fun/StepFunction.java b/eval/fun/StepFunction.java
index 91e30e70548084c9fa8bfd96f046d0dfea57f4b5..dc7f7fc031e3c329e1d844aa7cf418170ac76f6b 100644
--- a/eval/fun/StepFunction.java
+++ b/eval/fun/StepFunction.java
@@ -6,15 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class StepFunction extends Eval{
+public class StepFunction extends Eval {
 
 	public StepFunction() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 30;
 		objective = (double) 0;
 		errorDelta = 0.01;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -27,12 +27,11 @@ public class StepFunction extends Eval{
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
 		double result = 0;
-		
-		for(int i_dim = 0; i_dim < dim; i_dim++ ) {
-			result += Math.pow(Math.floor(0.5+value.get(i_dim)), 2);
+
+		for (int i_dim = 0; i_dim < dim; i_dim++) {
+			result += Math.pow(Math.floor(0.5 + value.get(i_dim)), 2);
 		}
 		return result;
 	}
 
-
 }
diff --git a/eval/twoDFun/Squared.java b/eval/twoDFun/Squared.java
index e540ba4f5626e4d36daeb7829e380912e196ae26..c6a15dd5b727ccf7b206c9198201fa130bbe1d6c 100644
--- a/eval/twoDFun/Squared.java
+++ b/eval/twoDFun/Squared.java
@@ -6,15 +6,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import eval.Eval;
 
-public class Squared extends Eval{
+public class Squared extends Eval {
 
 	public Squared() {
 		count = new AtomicInteger(0);
-		
+
 		dim = 2;
 		objective = (double) 0;
 		errorDelta = 0.000_01;
-		
+
 		min = new ArrayList<Double>();
 		max = new ArrayList<Double>();
 		for (int i = 0; i < dim; i++) {
@@ -26,10 +26,10 @@ public class Squared extends Eval{
 	@Override
 	public double evaluate(List<Double> value) {
 		count.getAndIncrement();
-		
+
 		double x = value.get(0);
 		double y = value.get(1);
-		
+
 		double result = Math.abs(x) * Math.abs(y) + Math.abs(x) + Math.abs(y);
 		return result;
 	}
diff --git a/firefly/Calibration.java b/firefly/Calibration.java
index f0c874f2f2c67e8daec3b736f44e12326de76c1d..ab7984d7037c3466744a5b37c24eaff227dd88ef 100644
--- a/firefly/Calibration.java
+++ b/firefly/Calibration.java
@@ -14,16 +14,15 @@ import eval.fun.Rastrigin;
 
 public class Calibration {
 	public static void main(String[] args) {
-		
+
 		// [PARAM]
 		int maxCycle = 200_000;
 		int maxEval = 1_000_000;
-		
-		
+
 		double wr = 0;
 		double afe = 0;
 		double bestSol = 0;
-		
+
 		int nbrTry = 10;
 
 		// [INIT]
@@ -31,53 +30,53 @@ public class Calibration {
 		double bMax = 1; // [0, 1]
 		double y = 0.2; // [0.1, 10] théoriquement [0, inf)
 		double randomAlpha = 1; // [0, 1]
-		
+
 		for (int i = 0; i < nbrTry; i++) {
-			
-			//Eval eval = new StepFunction();
-			//Eval eval = new SchwefelFunction1_2();
-			//Eval eval = new SchwefelFunction();
+
+			// Eval eval = new StepFunction();
+			// Eval eval = new SchwefelFunction1_2();
+			// Eval eval = new SchwefelFunction();
 			Eval eval = new Rastrigin();
-			//Eval eval = new Cigar();
-			//Eval eval = new AxisParallelHyper_Ellipsoid();
-			//Eval eval = new Beale();
-			
+			// Eval eval = new Cigar();
+			// Eval eval = new AxisParallelHyper_Ellipsoid();
+			// Eval eval = new Beale();
+
 			// [INIT]
 			FAEnv env = new FAEnv(bMax, y);
-			
+
 			List<Firefly> agents = new ArrayList<Firefly>();
 			for (int i_agent = 0; i_agent < nbrAgent; i_agent++) {
 				agents.add(new Firefly(i_agent, eval, env, randomAlpha));
 			}
 			env.initAgent(agents);
-			
+
 			solve(env, agents, eval, maxCycle, maxEval);
-			
-			if(Math.abs(env.getBestEval()-eval.getObjective())<=eval.getErrorDelta()) {
+
+			if (Math.abs(env.getBestEval() - eval.getObjective()) <= eval.getErrorDelta()) {
 				wr++;
 			}
 			afe += eval.getCount();
 			bestSol += env.getBestEval();
 			System.out.println("done");
-			
+
 		}
 		wr = wr / (double) nbrTry;
 		afe = afe / (double) nbrTry;
 		bestSol = bestSol / (double) nbrTry;
 		System.out.println("wr " + wr);
-		System.out.println("afe "+afe);
-		System.out.println("best sol "+bestSol);
-		
+		System.out.println("afe " + afe);
+		System.out.println("best sol " + bestSol);
+
 	}
-	
+
 	private static void solve(FAEnv env, List<Firefly> agents, Eval eval, int maxCycle, int maxEval) {
 		int cycleModulo = 2;
 		int cycle = 0;
-		
+
 		double bestEval = env.getBestEval();
-		
-		do  {
-			for(int i = 0; i < cycleModulo; i++) {
+
+		do {
+			for (int i = 0; i < cycleModulo; i++) {
 				for (BaseAgent agent : agents) {
 					agent.perceive();
 				}
@@ -86,18 +85,15 @@ public class Calibration {
 					agent.act();
 				}
 			}
-			
+
 			double nextEval = env.getBestEval();
-			
+
 			if (nextEval < bestEval) {
 				bestEval = nextEval;
 			}
-			
-			cycle ++;
-		} while(
-				Math.abs(env.getBestEval()-eval.getObjective())>eval.getErrorDelta()
-				&& cycle < maxCycle
-				&& eval.getCount() < maxEval
-				);
+
+			cycle++;
+		} while (Math.abs(env.getBestEval() - eval.getObjective()) > eval.getErrorDelta() && cycle < maxCycle
+				&& eval.getCount() < maxEval);
 	}
 }
diff --git a/firefly/FAEnv.java b/firefly/FAEnv.java
index e859b353ff0f3fe9c934ddbc236384468eb7d867..01b7c13ee3eeb8441a248e912c891daf5b1a803f 100644
--- a/firefly/FAEnv.java
+++ b/firefly/FAEnv.java
@@ -5,71 +5,70 @@ import java.util.List;
 
 import baseOptiAgent.Env;
 
-public class FAEnv extends Env{
+public class FAEnv extends Env {
 
 	private List<Firefly> agents;
 	private List<List<Double>> attractiveness;
-	
+
 	private double bMax;
 	private double y;
-	
+
 	public FAEnv(double _bMax, double _y) {
 		bMax = _bMax;
 		y = _y;
 	}
-	
-	
+
 	public void initAgent(List<Firefly> _agents) {
 		agents = new ArrayList<Firefly>(_agents);
 	}
-	
+
 	private double distance(List<Double> a, List<Double> b) {
 		double res = 0;
-		for(int i = 0; i < a.size(); i++) {
+		for (int i = 0; i < a.size(); i++) {
 			res += Math.pow(a.get(i) - b.get(i), 2);
 		}
 		return Math.sqrt(res);
 	}
-	
+
 	public void computeAttractiveness() {
 		attractiveness = new ArrayList<List<Double>>();
-		
-		for(int i = 0; i < agents.size(); i++) {
+
+		for (int i = 0; i < agents.size(); i++) {
 			List<Double> tmp = new ArrayList<Double>();
-			for(int j = 0; j < i; j++) {
+			for (int j = 0; j < i; j++) {
 				double dist = distance(agents.get(i).getVector(), agents.get(j).getVector());
 				tmp.add(bMax * Math.exp(-y * dist));
 			}
 			attractiveness.add(tmp);
 		}
-		
+
 	}
-	
+
 	public List<List<Double>> getAttractiveness() {
 		return attractiveness;
 	}
-	
+
 	public List<Double> getFitness() {
 		List<Double> res = new ArrayList<Double>();
-		for(int i = 0; i < agents.size(); i++) {
+		for (int i = 0; i < agents.size(); i++) {
 			res.add(agents.get(i).getFitness());
 		}
-		return res;		
+		return res;
 	}
-	
-	public List<List<Double>> getValues(){
+
+	public List<List<Double>> getValues() {
 		List<List<Double>> res = new ArrayList<List<Double>>();
-		for(int i = 0; i < agents.size(); i++) {
+		for (int i = 0; i < agents.size(); i++) {
 			res.add(agents.get(i).getVector());
 		}
-		return res;		
-		
+		return res;
+
 	}
-	
+
 	@Override
 	public double getBestFitness() {
 		double res = agents.get(0).getFitness();
-		for(Firefly agent: agents) {
+		for (Firefly agent : agents) {
 			if (res < agent.getFitness()) {
 				res = agent.getFitness();
 			}
@@ -80,7 +79,7 @@ public class FAEnv extends Env{
 	@Override
 	public double getBestEval() {
 		Firefly res = agents.get(0);
-		for(Firefly agent: agents) {
+		for (Firefly agent : agents) {
 			if (res.getFitness() < agent.getFitness()) {
 				res = agent;
 			}
diff --git a/firefly/FASolver.java b/firefly/FASolver.java
index 4652b6e60c90fe730b4fd957254a059c87563950..46378d2b7181b1c18402bc15538d7b4ecd87c4d7 100644
--- a/firefly/FASolver.java
+++ b/firefly/FASolver.java
@@ -7,7 +7,7 @@ import baseOptiAgent.Result;
 import baseOptiAgent.Solver;
 import eval.Eval;
 
-public class FASolver extends Solver{
+public class FASolver extends Solver {
 
 	public FASolver(Eval _eval, int _maxCycle, int _maxEval) {
 		super(_eval, _maxCycle, _maxEval);
@@ -16,25 +16,25 @@ public class FASolver extends Solver{
 
 	@Override
 	public Result solve() {
-		
+
 		// [PARAM]
 		int nbrAgent = 10;
 		double bMax = 0.2; // [0, 1]
 		double y = 1; // [0.1, 10] théoriquement [0, inf)
 		double randomAlpha = 0.5; // [0, 1]
-		
+
 		// [INIT]
 		FAEnv env = new FAEnv(bMax, y);
-		
+
 		List<Firefly> agents = new ArrayList<Firefly>();
 		for (int i_agent = 0; i_agent < nbrAgent; i_agent++) {
 			agents.add(new Firefly(i_agent, eval, env, randomAlpha));
 		}
-		
+
 		env.initAgent(agents);
-		
+
 		Result res = findSolution(agents, env, 2);
-		
+
 		return res;
 	}
 
diff --git a/firefly/Firefly.java b/firefly/Firefly.java
index a25254831d5087d352c21333f0f4690d360e4ed4..73c0fefe693c0653ef05d5f9508ddd09bc91a247 100644
--- a/firefly/Firefly.java
+++ b/firefly/Firefly.java
@@ -6,114 +6,103 @@ import java.util.List;
 import baseOptiAgent.BaseAgent;
 import eval.Eval;
 
-public class Firefly extends BaseAgent{
-	
+public class Firefly extends BaseAgent {
+
 	FAEnv env;
-	
+
 	Phase phase;
-	
+
 	double randomAlpha;
-	
-	//CYCLE
+
+	// CYCLE
 	List<List<Double>> agents;
 	List<List<Double>> attractiveness;
 	List<Double> agentsFit;
-	
 
 	public Firefly(int _id, Eval _eval, FAEnv _env, double _randomAlpha) {
 		super(_eval, _id);
 
 		env = _env;
-		
+
 		randomAlpha = _randomAlpha;
-		
+
 		phase = Phase.COMPUTE_DIST;
-		
+
 		vector = generateRandomVector();
 		evaluate = this.evaluate(vector);
 		fitness = this.fitness(evaluate);
 	}
-	
+
 	private void nextPhase() {
 		if (phase == Phase.COMPUTE_DIST) {
 			phase = Phase.CYCLE;
-		}
-		else {
+		} else {
 			phase = Phase.COMPUTE_DIST;
 		}
 	}
-	
+
 	@Override
 	public void perceive() {
-		if(phase == Phase.COMPUTE_DIST) {
-			if(id == 0) {
-				
+		if (phase == Phase.COMPUTE_DIST) {
+			if (id == 0) {
+
 			}
-		}
-		else {
+		} else {
 			agents = env.getValues();
 			attractiveness = env.getAttractiveness();
 			agentsFit = env.getFitness();
 		}
 	}
-	
+
 	@Override
 	public void act() {
-		if(phase == Phase.COMPUTE_DIST) {
-			if(id == 0) {
+		if (phase == Phase.COMPUTE_DIST) {
+			if (id == 0) {
 				env.computeAttractiveness();
 			}
-		}
-		else {
+		} else {
 			mainAct();
 		}
 		nextPhase();
 	}
-	
+
 	private void mainAct() {
 		int update = 0;
-		
-		for(int i = 0; i < agents.size(); i++ ) {
 
-			if( i != id &&  agentsFit.get(i) > fitness ) {
-				
+		for (int i = 0; i < agents.size(); i++) {
+
+			if (i != id && agentsFit.get(i) > fitness) {
+
 				double att;
-				if(i<id) {
+				if (i < id) {
 					att = attractiveness.get(id).get(i);
-				}
-				else {
+				} else {
 					att = attractiveness.get(i).get(id);
 				}
-				
+
 				List<Double> newvector = new ArrayList<Double>();
-				
-				for(int dim = 0; dim < eval.getDim(); dim ++) {
-					newvector.add(
-							vector.get(dim) // TODO Both variant without (1-att)
+
+				for (int dim = 0; dim < eval.getDim(); dim++) {
+					newvector.add(vector.get(dim) // TODO Both variant without (1-att)
 							+ att * (agents.get(i).get(dim) - vector.get(dim))
-							+ (Math.random()-0.5) * randomAlpha * (eval.getMax(dim) - eval.getMin(dim))
-							);
-					update ++;
+							+ (Math.random() - 0.5) * randomAlpha * (eval.getMax(dim) - eval.getMin(dim)));
+					update++;
 				}
 				this.boundValue(newvector);
 				this.compareAndUpdate(newvector);
 			}
 		}
-		
-		if(update == 0) {
+
+		if (update == 0) {
 			List<Double> newvector = new ArrayList<Double>();
-			
-			
-			for(int dim = 0; dim < eval.getDim(); dim ++) {
-				newvector.add(
-						vector.get(dim)
-						+ (Math.random()-0.5)* randomAlpha
-						);
-				update ++;
+
+			for (int dim = 0; dim < eval.getDim(); dim++) {
+				newvector.add(vector.get(dim) + (Math.random() - 0.5) * randomAlpha);
+				update++;
 			}
 			this.boundValue(newvector);
 			this.compareAndUpdate(newvector);
 		}
-		
+
 	}
 }
diff --git a/firefly/Phase.java b/firefly/Phase.java
index d975c2a538f21f86e6ccc7eea0456cc6287db459..d9a450d1934e90805589605aa37a7e3da193f5b5 100644
--- a/firefly/Phase.java
+++ b/firefly/Phase.java
@@ -1,6 +1,5 @@
 package firefly;
 
 public enum Phase {
-	COMPUTE_DIST,
-	CYCLE
+	COMPUTE_DIST, CYCLE
 }
diff --git a/smac/Main.java b/smac/Main.java
index cd1a870d8d06588cbe801de22c271be73318d4df..33c3582f346ff51332f15579fef0dc0a260c6ae2 100644
--- a/smac/Main.java
+++ b/smac/Main.java
@@ -14,29 +14,28 @@ import mas.ui.SchedulerToolbar;
 
 public class Main {
 
-    public static void main(String[] args) {
-
-        int nAgents = 50;
-        double perception = 50.0;
-        double maxDensity = 0.3;
-        int maxInactivity = 5;
-        double bMax = 1;
-        double vision = 1;
-
-        List<Smagent> agents = new ArrayList<Smagent>();
-        Eval eval = new Squared();
-        
-        SmacEnv env = new SmacEnv(perception);
-
-        for(int i = 0; i<nAgents ; i++){
-        	agents.add(new Smagent(eval,i, env, nAgents, maxDensity, maxInactivity, bMax, vision));
-        }
-        
-        env.initAgents(agents);
-
-        TwoDCycling scheduler = new TwoDCycling(agents.toArray(new Smagent[agents.size()]));
-
-        MainWindow.instance();
-        MainWindow.addToolbar(new SchedulerToolbar("Amas", scheduler));
-    }
+	public static void main(String[] args) {
+
+		int nAgents = 50;
+		double perception = 50.0;
+		double maxDensity = 0.3;
+		int maxInactivity = 5;
+		int maxMemory = 1500;
+
+		List<Smagent> agents = new ArrayList<Smagent>();
+		Eval eval = new Squared();
+
+		SmacEnv env = new SmacEnv(perception);
+
+		for (int i = 0; i < nAgents; i++) {
+			agents.add(new Smagent(eval, i, env, nAgents, maxDensity, maxInactivity, maxMemory));
+		}
+
+		env.initAgents(agents);
+
+		TwoDCycling scheduler = new TwoDCycling(agents.toArray(new Smagent[agents.size()]));
+
+		MainWindow.instance();
+		MainWindow.addToolbar(new SchedulerToolbar("Amas", scheduler));
+	}
 }
diff --git a/smac/Phase.java b/smac/Phase.java
index 43360ab6613cd41dc5b386003997c37c53b78c43..90ec8ff980059d512f1afc088aa7ce77ebde6c03 100644
--- a/smac/Phase.java
+++ b/smac/Phase.java
@@ -1,6 +1,5 @@
 package smac;
 
 public enum Phase {
-	CYCLING,
-	DIST
+	CYCLING, DIST
 }
diff --git a/smac/SmacEnv.java b/smac/SmacEnv.java
index 3eae3d83818c6db88e31db0b9964d16e5e5882b2..e29e64ec649ba76b7bf3d3eab5eec0d1c597c3e0 100644
--- a/smac/SmacEnv.java
+++ b/smac/SmacEnv.java
@@ -5,72 +5,104 @@ import java.util.List;
 
 import baseOptiAgent.Env;
 
-public class SmacEnv extends Env{
+public class SmacEnv extends Env {
 
 	private List<Smagent> agents;
 	private List<List<Double>> distance;
-	
+	private List<Double> density;
+
 	private double maxDist;
-	
+
 	public SmacEnv(double _maxDist) {
 		maxDist = _maxDist;
 	}
-	
+
 	public void initAgents(List<Smagent> _agents) {
 		agents = _agents;
 	}
-	
+
 	private double dist(List<Double> a, List<Double> b) {
 		double res = 0;
-		for(int i = 0; i < a.size(); i++) {
+		for (int i = 0; i < a.size(); i++) {
 			res += Math.pow(a.get(i) - b.get(i), 2);
 		}
 		return Math.sqrt(res);
 	}
-	
+
 	public void computeDistance() {
 		distance = new ArrayList<List<Double>>();
-		for(int i_agent = 0; i_agent < agents.size(); i_agent ++) {
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
 			List<Double> tmpDist = new ArrayList<Double>();
-			for(int j_agent = 0; j_agent < agents.size(); j_agent ++) {
+			for (int j_agent = 0; j_agent < agents.size(); j_agent++) {
 				tmpDist.add(0.0);
 			}
 			distance.add(tmpDist);
 		}
-		
-		for(int i_agent = 0; i_agent < agents.size(); i_agent ++) {
-			for(int j_agent = 0; j_agent < i_agent; j_agent ++) {
+
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
+			for (int j_agent = 0; j_agent < i_agent; j_agent++) {
 				double dist = dist(agents.get(i_agent).getVector(), agents.get(j_agent).getVector());
 				distance.get(i_agent).set(j_agent, dist);
 				distance.get(j_agent).set(i_agent, dist);
 			}
 		}
 	}
-	
-	public List<List<Double>> getNei(int id){
+
+	public List<List<Double>> getNei(int id) {
 		List<List<Double>> res = new ArrayList<List<Double>>();
-		for(int i_agent = 0; i_agent < agents.size(); i_agent ++) {
-			if(distance.get(id).get(i_agent) <= maxDist) {
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
+			if (distance.get(id).get(i_agent) <= maxDist) {
 				res.add(agents.get(i_agent).getVector());
 			}
 		}
 		return res;
 	}
-	
-	public List<Double> getFit(int id){
+
+	public List<Double> getFit(int id) {
 		List<Double> res = new ArrayList<Double>();
-		for(int i_agent = 0; i_agent < agents.size(); i_agent ++) {
-			if(distance.get(id).get(i_agent) <= maxDist) {
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
+			if (distance.get(id).get(i_agent) <= maxDist) {
 				res.add(agents.get(i_agent).getFitness());
 			}
 		}
 		return res;
 	}
-	
+
+	public List<Double> getDensity(int id) {
+		List<Double> res = new ArrayList<Double>();
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
+			if (distance.get(id).get(i_agent) <= maxDist) {
+				int nbrNei = getNei(i_agent).size();
+				res.add((double) nbrNei / this.agents.size());
+			}
+		}
+		return res;
+	}
+
+	public List<List<Double>> getMemory(int id) {
+		List<List<Double>> res = new ArrayList<List<Double>>();
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
+			if (distance.get(id).get(i_agent) <= maxDist) {
+				res.addAll(agents.get(i_agent).getMemoryValue());
+			}
+		}
+		return res;
+	}
+
+	public List<Double> getMemoryFit(int id) {
+		List<Double> res = new ArrayList<Double>();
+		for (int i_agent = 0; i_agent < agents.size(); i_agent++) {
+			if (distance.get(id).get(i_agent) <= maxDist) {
+				res.addAll(agents.get(i_agent).getMemoryFit());
+			}
+		}
+		return res;
+	}
+
 	@Override
 	public double getBestFitness() {
 		double res = agents.get(0).getFitness();
-		for(Smagent agent: agents) {
+		for (Smagent agent : agents) {
 			if (res < agent.getFitness()) {
 				res = agent.getFitness();
 			}
@@ -81,11 +113,21 @@ public class SmacEnv extends Env{
 	@Override
 	public double getBestEval() {
 		Smagent res = agents.get(0);
-		for(Smagent agent: agents) {
+		for (Smagent agent : agents) {
 			if (res.getFitness() < agent.getFitness()) {
 				res = agent;
 			}
 		}
 		return res.getEvaluate();
 	}
+
+	public List<Double> getBest() {
+		Smagent res = agents.get(0);
+		for (Smagent agent : agents) {
+			if (res.getFitness() < agent.getFitness()) {
+				res = agent;
+			}
+		}
+		return res.getVector();
+	}
 }
diff --git a/smac/SmacSolver.java b/smac/SmacSolver.java
new file mode 100644
index 0000000000000000000000000000000000000000..d480ca271feb86b0ea895c3cc7a1f7753d7f1a8a
--- /dev/null
+++ b/smac/SmacSolver.java
@@ -0,0 +1,39 @@
+package smac;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import baseOptiAgent.Result;
+import baseOptiAgent.Solver;
+import eval.Eval;
+
+public class SmacSolver extends Solver {
+
+	public SmacSolver(Eval _eval, int _maxCycle, int _maxEval) {
+		super(_eval, _maxCycle, _maxEval);
+		name = "Smac";
+	}
+
+	@Override
+	public Result solve() {
+
+		int nAgents = 50;
+		double perception = 50.0;
+		double maxDensity = 0.3;
+		int maxInactivity = 5;
+		int maxMemory = 1500;
+
+		List<Smagent> agents = new ArrayList<Smagent>();
+
+		SmacEnv env = new SmacEnv(perception);
+
+		for (int i = 0; i < nAgents; i++) {
+			agents.add(new Smagent(eval, i, env, nAgents, maxDensity, maxInactivity, maxMemory));
+		}
+
+		env.initAgents(agents);
+
+		return findSolution(agents, env, 2);
+	}
+
+}
diff --git a/smac/Smagent.java b/smac/Smagent.java
index 42dd93d7c487a9c41f77b62bc8e54a1f943d6e47..fdb19988d450b60f1324338b39ac98f5927bc5e4 100644
--- a/smac/Smagent.java
+++ b/smac/Smagent.java
@@ -1,6 +1,5 @@
 package smac;
 
-
 import java.util.ArrayList;
 import java.util.List;
 
@@ -9,201 +8,290 @@ import eval.Eval;
 import mas.ui.VUI;
 import mas.ui.drawables.DrawableImage;
 
-public class Smagent extends BaseAgent{
-	
+public class Smagent extends BaseAgent {
+
 	private int nbrAgent;
-	
-    private DrawableImage image;
-    
-    private double maxDensity;
-    
-    private Phase phase;
-    
-    private SmacEnv env;
-    
-    private int countInactivity;
-    private int maxInactivity;
-    
-    private double bMax;
-    private double vision;
-    
-    // PHASE CYCLING
-    private List<List<Double>> neiValue;
-    private List<Double> neiFit;
-	
-	public Smagent(Eval _eval, int _id, SmacEnv _env, 
-			int _nbrAgent,
-			double _maxDensity,
-			int _maxInactivity,
-			double _bMax,
-			double _vision
-			
-			) {
+
+	private DrawableImage image;
+
+	private double maxDensity;
+
+	private Phase phase;
+
+	private SmacEnv env;
+
+	private int countInactivity;
+	private int maxInactivity;
+
+	private List<Double> memoryFit;
+	private List<List<Double>> memoryValue;
+	private int maxMemory;
+
+	// PHASE CYCLING
+	private List<List<Double>> neiValue;
+	private List<Double> neiFit;
+	private List<Double> neiDensity;
+	private List<List<Double>> neiMem;
+	private List<Double> neiMemFit;
+
+	private double bestFit;
+	private List<Double> bestPos;
+
+	public Smagent(Eval _eval, int _id, SmacEnv _env, int _nbrAgent, double _maxDensity, int _maxInactivity,
+			int _maxMemory) {
 		super(_eval, _id);
-		
+
 		env = _env;
-		
+
+		maxMemory = _maxMemory;
+		memoryFit = new ArrayList<Double>();
+		memoryValue = new ArrayList<List<Double>>();
+
 		maxInactivity = _maxInactivity;
-		bMax = _bMax;
-		vision = _vision;
-		
+
 		vector = generateRandomVector();
 		evaluate = this.evaluate(vector);
 		fitness = this.fitness(evaluate);
 
-        image = VUI.get().createImage(vector.get(0), vector.get(1), "example/randomants/ressources/ant.png");
-        
-        nbrAgent = _nbrAgent;
-        maxDensity = _maxDensity;
-        
-        phase = Phase.DIST;
-        
-        countInactivity = 0;
+		// image = VUI.get().createImage(vector.get(0), vector.get(1),
+		// "example/randomants/ressources/ant.png");
+
+		nbrAgent = _nbrAgent;
+		maxDensity = _maxDensity;
+
+		phase = Phase.DIST;
+
+		countInactivity = 0;
 	}
 
 	@Override
 	public void perceive() {
-		if(phase == Phase.DIST) {
-			if(id == 0) {
+		if (phase == Phase.DIST) {
+			if (id == 0) {
 				env.computeDistance();
 			}
-		}
-		else {
+		} else {
 			neiValue = env.getNei(id);
 			neiFit = env.getFit(id);
+			neiDensity = env.getDensity(id);
+			neiMem = env.getMemory(id);
+			neiMemFit = env.getMemoryFit(id);
+
+			bestFit = env.getBestFitness();
+			bestPos = env.getBest();
 		}
 	}
-	
-	public void actionPhase() {
+
+	@Override
+	protected double evaluate(List<Double> vector) {
+		double res = eval.evaluate(vector);
+		memoryFit.add(fitness(res));
+		memoryValue.add(vector);
+
+		if (memoryFit.size() > maxMemory) {
+			memoryFit.remove(0);
+			memoryValue.remove(0);
+		}
+
+		return res;
+	}
+
+	private double attractiveness(double dest) {
+
+		// x = current agent fitness
+		// x' = destination fitness
+		// xm = max fitness known
+
+		// attractiveness = abs(x' - x) / (best fitness - x)
+
+		return Math.abs(dest - fitness) / (bestFit);
+	}
+
+	private List<Double> explorationValue() {
+
 		/*
-		 * neiValue List<List<Double>> -> Contain all neighbors vectors 
-		 * neiFit List<Double> -> Contain all neighbors fitness
+		 * List<Double> res = new ArrayList<Double>(); for(int dim = 0; dim <
+		 * eval.getDim(); dim ++) { res.add((Math.random()-0.5) * 2); } return res;
+		 * 
 		 */
+		if (memoryFit.size() < 2) {
+			List<Double> res = new ArrayList<Double>();
+			for (int dim = 0; dim < eval.getDim(); dim++) {
+				res.add((Math.random() - 0.5) * 2);
+			}
+			return res;
+		}
 
-		int update = 0; // count if the agent will update himself
-		// TODO : REWORK UPDATE
-		
-		double randStepSize = 1; 
-		
-		if((double) neiValue.size()/ (double) nbrAgent < maxDensity) { // Seuil densité bas
-			
-			
-			for(int i = 0; i < neiValue.size(); i++) {
-				
-				/*
-				 * ### Go closer to other agents ###
-				 * if other has better fitness
-				 * - compute attractivness ( FA with fitness ?)
-				 * 
-				 * Generate new solution : currentPos + attractivness * (other - currentPos) +- randomValue
-				 * 
-				 * Evaluate new solution, if better use it instead of the current one
-				 */
-				
-				if(neiFit.get(i) > fitness) { // Other have better fitness
-					
-					// TODO : compute attractiveness
-					double attractiveness = bMax * Math.exp(-vision * 1 / neiFit.get(i)); 
-					
-					List<Double> newvector = new ArrayList<Double>();
-					
-					for(int dim = 0; dim < eval.getDim(); dim ++) {
-						newvector.add(
-								vector.get(dim) // TODO Variant with vector.get(dim) * (1 - attractiveness)
-								+ attractiveness * (neiValue.get(i).get(dim) - vector.get(dim))
-								+ (Math.random()-0.5) * randStepSize
-								);
-					}
-					boundValue(newvector); // minValue <= newvector <= maxValue 
-					if (compareAndUpdate(newvector)) {//Evaluate, update if better
-						update ++;
-					}; 
+		List<Double> res = new ArrayList<Double>();
+		for (int dim = 0; dim < eval.getDim(); dim++) {
+			res.add((double) 0);
+		}
+
+		for (int i_mem = 0; i_mem < memoryFit.size(); i_mem++) {
+			for (int dim = 0; dim < eval.getDim(); dim++) {
+				double addValue = attractiveness(memoryFit.get(i_mem)) * memoryValue.get(i_mem).get(dim);
+				if (fitness < memoryFit.get(i_mem)) {
+					// Attraction
+					res.set(dim, res.get(dim) + addValue);
+				} else {
+					// repulsion
+					res.set(dim, res.get(dim) - addValue);
 				}
 			}
-			
-			if(neiValue.size() == 0) { // no one's arround
-				/*
-				 * ### Explore around ###
-				 * Generate new solution : currentPos +- randomValue
-				 * Evaluate new solution, if better use it instead of the current one
-				 */
-				List<Double> newvector = new ArrayList<Double>();
-				
-				for(int dim = 0; dim < eval.getDim(); dim ++) {
-					newvector.add(
-							vector.get(dim)
-							+ (Math.random()-0.5) * randStepSize
-							);
+		}
+
+		for (int i_mem = 0; i_mem < neiMem.size(); i_mem++) {
+			for (int dim = 0; dim < eval.getDim(); dim++) {
+				double addValue = attractiveness(neiMemFit.get(i_mem)) * neiMem.get(i_mem).get(dim);
+				if (fitness < neiMemFit.get(i_mem)) {
+					// Attraction
+					res.set(dim, res.get(dim) + addValue);
+				} else {
+					// repulsion
+					res.set(dim, res.get(dim) - addValue);
 				}
-				boundValue(newvector); // minValue <= newvector <= maxValue 
-				if (compareAndUpdate(newvector)) {//Evaluate, update if better
-					update ++;
-				}; 
 			}
 		}
-		else { // TODO : Seuil densité haut (& not local best ?)
-			for(int i = 0; i < neiValue.size(); i++) {
-				
-				/*
-				 * compute the repulsion
-				 * 
-				 * Generate new solution : currentPos + repulsion * (other - currentPos) +- randomValue
-				 * 
-				 * Evaluate new solution, and use it
-				 */
 
-			}
+		double dist = 0;
+		for (int dim = 0; dim < eval.getDim(); dim++) {
+			dist += Math.pow(res.get(dim), 2);
 		}
-		
-		if (update == 0) {
-			countInactivity ++;
+		dist = Math.sqrt(dist);
+		for (int dim = 0; dim < eval.getDim(); dim++) {
+			res.set(dim, res.get(dim) / dist);
 		}
-		else {
-			countInactivity = 0;
+
+		return res;
+	}
+
+	public void actionPhase() {
+
+		int i = 0; // indice of the selected agent to look for
+
+		if ((double) neiValue.size() / (double) nbrAgent < maxDensity) {
+			// Low Density
+
+			// find the best nei (max local fitness)
+			for (int nextAgent = 0; nextAgent < neiValue.size(); nextAgent++) {
+				if (neiFit.get(nextAgent) > neiFit.get(i)) {
+					i = nextAgent;
+				}
+			}
+
+		} else {
+			// High Density
+
+			// find the nei with lowest density that have a better fitness
+			for (int nextAgent = 0; nextAgent < neiValue.size(); nextAgent++) {
+				if (neiDensity.get(nextAgent) < neiDensity.get(i) && neiFit.get(nextAgent) > fitness) {
+					i = nextAgent;
+				}
+			}
+
 		}
-		
-		if (countInactivity > maxInactivity) { // TODO : no update in a while & not best
-			/*
-			 * Find a new place to go : (?)
-			 * 	- Randomly generate new solution <- THIS
-			 *  - Go forward Global best solution
-			 *  - Find promising place
-			 */
-			vector = generateRandomVector();
-			evaluate = this.evaluate(vector);
-			fitness = this.fitness(evaluate);
+
+		List<Double> randomStep = explorationValue();
+		if (neiFit.get(i) <= fitness) { // i'm the best
+			List<Double> newvector = new ArrayList<Double>();
+
+			for (int dim = 0; dim < eval.getDim(); dim++) {
+				newvector.add(vector.get(dim) + randomStep.get(dim));
+			}
+			boundValue(newvector); // minValue <= newvector <= maxValue
+			if (compareAndUpdate(newvector)) {// Evaluate, update if better
+				countInactivity = 0;
+			} else {
+				countInactivity++;
+			}
+		} else {
+			double att = attractiveness(neiFit.get(i));
+
+			// Step toward him
+			List<Double> newvector = new ArrayList<Double>();
+
+			for (int dim = 0; dim < eval.getDim(); dim++) {
+				newvector.add(vector.get(dim) // TODO Variant with vector.get(dim) * (1 - attractiveness)
+						+ att * (neiValue.get(i).get(dim) - vector.get(dim)) + randomStep.get(dim));
+			}
+			boundValue(newvector); // minValue <= newvector <= maxValue
+			if (compareAndUpdate(newvector)) {// Evaluate, update if better
+				countInactivity = 0;
+			} else {
+				countInactivity++;
+			}
+
+			if (countInactivity > maxInactivity) {
+				countInactivity = 0;
+
+				// select nei with lowest density -> step toward him.
+				if (neiValue.size() != 0) {
+					i = 0;
+					for (int nextAgent = 1; nextAgent < neiValue.size(); nextAgent++) {
+						if (neiDensity.get(nextAgent) < neiDensity.get(i)) {
+							i = nextAgent;
+						}
+					}
+
+					att = attractiveness(neiFit.get(i));
+					newvector = new ArrayList<Double>();
+					randomStep = explorationValue();
+
+					for (int dim = 0; dim < eval.getDim(); dim++) {
+						newvector.add(vector.get(dim) // TODO Variant with vector.get(dim) * (1 - attractiveness)
+								+ att * (neiValue.get(i).get(dim) - vector.get(dim)) + randomStep.get(dim));
+					}
+
+				} else { // if no nei -> step toward GL
+					newvector = new ArrayList<Double>();
+					att = attractiveness(bestFit);
+
+					for (int dim = 0; dim < eval.getDim(); dim++) {
+						newvector.add(vector.get(dim) // TODO Variant with vector.get(dim) * (1 - attractiveness)
+								+ att * (this.bestPos.get(dim) - vector.get(dim)) + randomStep.get(dim));
+					}
+				}
+				boundValue(newvector); // minValue <= newvector <= maxValue
+				vector = newvector;
+				evaluate = this.evaluate(vector);
+				fitness = this.fitness(evaluate);
+			}
 		}
+
 	}
-	
-	
+
 	private void nextPhase() {
-		if(phase == Phase.DIST) {
+		if (phase == Phase.DIST) {
 			phase = Phase.CYCLING;
-		}
-		else {
+		} else {
 			phase = Phase.DIST;
 		}
 	}
-	
+
 	@Override
 	public void act() {
-		if(phase == Phase.DIST) {
-			
-		}
-		else {
+		if (phase == Phase.DIST) {
+
+		} else {
 			actionPhase();
-	        image.move(vector.get(0), vector.get(1));
-	        //System.out.println(this);
+			// image.move(vector.get(0), vector.get(1));
+			// System.out.println(this);
 		}
 		nextPhase();
 	}
-	
-	
+
+	public List<Double> getMemoryFit() {
+		return memoryFit;
+	}
+
+	public List<List<Double>> getMemoryValue() {
+		return memoryValue;
+	}
+
 	@Override
 	public String toString() {
-		return "[AGENT] pos -> "+this.vector.get(0)+" / "+this.vector.get(1)+"\n"
-				+"Fit -> "+fitness+" / "+" Eval -> "+evaluate;
+		return "[AGENT] pos -> " + this.vector.get(0) + " / " + this.vector.get(1) + "\n" + "Fit -> " + fitness + " / "
+				+ " Eval -> " + evaluate;
 	}
-	
 
 }