diff --git a/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LaunchExampleXPWithArgs.java b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LaunchExampleXPWithArgs.java
new file mode 100644
index 0000000000000000000000000000000000000000..8fe3e9996ab065490847404108f76ca696b97056
--- /dev/null
+++ b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LaunchExampleXPWithArgs.java
@@ -0,0 +1,342 @@
+package experiments.simpleLauncherExternalSource;
+
+
+import agents.head.REQUEST;
+import agents.head.SITUATION;
+import experiments.mathematicalModels.Model_Manager;
+import fr.irit.smac.amak.Configuration;
+import kernel.ELLSA;
+import kernel.StudiedSystem;
+import kernel.backup.BackupSystem;
+import kernel.backup.IBackupSystem;
+import utils.*;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.OptionalDouble;
+
+public class LaunchExampleXPWithArgs {
+
+    private static CSVWriter xpCSV;
+    private static HashMap<String, Double> perceptions = new HashMap<String, Double>();
+
+
+    public static void main (String[] args)  {
+
+
+
+        TRACE.minLevel = TRACE_LEVEL.CYCLE;
+
+        PARAMS.dimension = Integer.parseInt(args[0]);
+        PARAMS.configFile = args[1] +".xml";
+
+        PARAMS.nbLearningCycle = Integer.parseInt(args[2]);
+        PARAMS.nbExploitationCycle = Integer.parseInt(args[3]);
+        PARAMS.nbEpisodes = Integer.parseInt(args[4]);
+
+        // Neighborhood
+        PARAMS.validityRangesPrecision =  Double.parseDouble(args[5]);
+        PARAMS.neighborhoodRadiusCoefficient = Double.parseDouble(args[6]);
+        PARAMS.influenceRadiusCoefficient = Double.parseDouble(args[7]);
+        PARAMS.modelErrorMargin = Double.parseDouble(args[8]);
+
+        // Learning
+        PARAMS.setActiveLearning = Boolean.parseBoolean(args[9]);
+        PARAMS.setSelfLearning = Boolean.parseBoolean(args[10]);
+
+        //NCS
+        PARAMS.setConflictDetection = Boolean.parseBoolean(args[11]);
+        PARAMS.setConcurrenceDetection = Boolean.parseBoolean(args[12]);
+        PARAMS.setIncompetenceDetection = Boolean.parseBoolean(args[13]);
+        PARAMS.setSubIncompetencedDetection = Boolean.parseBoolean(args[14]);
+        PARAMS.setRangeAmbiguityDetection = Boolean.parseBoolean(args[15]);
+        PARAMS.setModelAmbiguityDetection = Boolean.parseBoolean(args[16]);
+        PARAMS.setCompleteRedundancyDetection = Boolean.parseBoolean(args[17]);
+        PARAMS.setPartialRedundancyDetection = Boolean.parseBoolean(args[18]);
+
+        PARAMS.setDream = Boolean.parseBoolean(args[19]);
+        PARAMS.setDreamCycleLaunch = Integer.parseInt(args[20]);
+
+        PARAMS.setCooperativeNeighborhoodLearning = Boolean.parseBoolean(args[21]);
+        PARAMS.nbOfNeighborForLearningFromNeighbors = Integer.parseInt(args[22]);
+        PARAMS.nbOfNeighborForContexCreationWithouOracle = Integer.parseInt(args[23]);
+        PARAMS.nbOfNeighborForVoidDetectionInSelfLearning =  PARAMS.nbOfNeighborForContexCreationWithouOracle;
+
+        PARAMS.setisCreationWithNeighbor = Boolean.parseBoolean(args[24]);
+
+
+
+
+        PARAMS.model = args[25];
+        PARAMS.setbootstrapCycle = Integer.parseInt(args[26]);
+
+
+
+        PARAMS.exogenousLearningWeight = Double.parseDouble(args[27]);
+        PARAMS.endogenousLearningWeight = Double.parseDouble(args[28]);
+
+        PARAMS.LEARNING_WEIGHT_ACCURACY = Double.parseDouble(args[29]);
+        PARAMS.LEARNING_WEIGHT_PROXIMITY = Double.parseDouble(args[30]);
+        PARAMS.LEARNING_WEIGHT_EXPERIENCE = Double.parseDouble(args[31]);
+        PARAMS.LEARNING_WEIGHT_GENERALIZATION = Double.parseDouble(args[32]);
+
+        PARAMS.EXPLOITATION_WEIGHT_PROXIMITY = Double.parseDouble(args[33]);
+        PARAMS.EXPLOITATION_WEIGHT_EXPERIENCE = Double.parseDouble(args[34]);
+        PARAMS.EXPLOITATION_WEIGHT_GENERALIZATION = Double.parseDouble(args[35]);
+
+        PARAMS.perceptionsGenerationCoefficient = Double.parseDouble(args[36]);
+
+        PARAMS.modelSimilarityThreshold = Double.parseDouble(args[37]);
+
+        PARAMS.maxRangeRadiusCoefficient = Double.parseDouble(args[38]);
+        PARAMS.rangeSimilarityCoefficient = Double.parseDouble(args[39]);
+        PARAMS.minimumRangeCoefficient = Double.parseDouble(args[40]);
+
+        PARAMS.isAllContextSearchAllowedForLearning = Boolean.parseBoolean(args[41]);
+        PARAMS.isAllContextSearchAllowedForExploitation = Boolean.parseBoolean(args[42]);
+
+        PARAMS.probabilityOfRangeAmbiguity = Double.parseDouble(args[43]);
+
+        PARAMS.transferCyclesRatio = Double.parseDouble(args[44]);
+
+        PARAMS.nbEndoExploitationCycle = Integer.parseInt(args[45]);
+        PARAMS.setActiveExploitation = Boolean.parseBoolean(args[46]);
+
+        PARAMS.noiseRange = Double.parseDouble(args[47]);
+
+        PARAMS.extension = args[48];
+
+
+
+
+        experimentation();
+
+        System.out.print(" DONE");
+
+        System.exit(1);
+    }
+
+
+    public static void experimentation() {
+
+        xpCSV = new CSVWriter( PARAMS.extension );
+
+        // Set AMAK configuration before creating an AMOEBA
+        Configuration.multiUI=true;
+        Configuration.commandLineMode = true;
+        Configuration.allowedSimultaneousAgentsExecution = 1;
+        Configuration.waitForGUI = false;
+        Configuration.plotMilliSecondsUpdate = 20000;
+
+        Pair<ArrayList<List<String>>,HashMap<String, ArrayList<Double>>> dataPair = WRITER.getData();
+        ArrayList<List<String>> dataStrings = dataPair.getA();
+        HashMap<String, ArrayList<Double>> data = dataPair.getB();
+
+        double start = System.nanoTime();
+
+        for (int i = 0; i < PARAMS.nbEpisodes; ++i) {
+            //System.out.print(i + " ");
+            learningEpisode(data);
+        }
+        //System.out.println(" ");
+        double total = (System.nanoTime()- start)/1000;
+        double mean = total/ PARAMS.nbEpisodes;
+        System.out.println("[TIME MEAN] " + mean + " s");
+        System.out.println("[TIME TOTAL] " + total + " s");
+
+        WRITER.writeData(xpCSV, data, dataStrings, total, mean);
+
+        data = null;
+    }
+
+
+
+    private static void learningEpisode(HashMap<String, ArrayList<Double>> data) {
+        RAND_REPEATABLE.setSeed(0);
+        ELLSA ellsa = new ELLSA(null,  null);
+        StudiedSystem studiedSystem = new Model_Manager(PARAMS.spaceSize, PARAMS.dimension, PARAMS.nbOfModels, PARAMS.normType, PARAMS.randomExploration, PARAMS.explorationIncrement, PARAMS.explorationWidht, PARAMS.limitedToSpaceZone, PARAMS.noiseRange);
+        ellsa.setStudiedSystem(studiedSystem);
+        IBackupSystem backupSystem = new BackupSystem(ellsa);
+        File file = new File("resources/"+ PARAMS.configFile);
+        backupSystem.load(file);
+        ellsa.getEnvironment().setSeed(0);
+
+
+        ellsa.allowGraphicalScheduler(false);
+        ellsa.setRenderUpdate(false);
+
+
+        ellsa.getEnvironment().setMappingErrorAllowed(PARAMS.validityRangesPrecision);
+        ellsa.data.PARAM_modelErrorMargin = PARAMS.modelErrorMargin;
+        ellsa.data.PARAM_bootstrapCycle = PARAMS.setbootstrapCycle;
+        ellsa.data.PARAM_exogenousLearningWeight = PARAMS.exogenousLearningWeight;
+        ellsa.data.PARAM_endogenousLearningWeight = PARAMS.endogenousLearningWeight;
+
+        ellsa.data.PARAM_neighborhoodRadiusCoefficient = PARAMS.neighborhoodRadiusCoefficient;
+        ellsa.data.PARAM_influenceRadiusCoefficient = PARAMS.influenceRadiusCoefficient;
+        ellsa.data.PARAM_maxRangeRadiusCoefficient = PARAMS.maxRangeRadiusCoefficient;
+        ellsa.data.PARAM_rangeSimilarityCoefficient = PARAMS.rangeSimilarityCoefficient;
+        ellsa.data.PARAM_minimumRangeCoefficient = PARAMS.minimumRangeCoefficient;
+
+        ellsa.data.PARAM_creationNeighborNumberForVoidDetectionInSelfLearning = PARAMS.nbOfNeighborForVoidDetectionInSelfLearning;
+        ellsa.data.PARAM_creationNeighborNumberForContexCreationWithouOracle = PARAMS.nbOfNeighborForContexCreationWithouOracle;
+
+        ellsa.data.PARAM_perceptionsGenerationCoefficient = PARAMS.perceptionsGenerationCoefficient
+        ;
+        ellsa.data.PARAM_modelSimilarityThreshold = PARAMS.modelSimilarityThreshold;
+
+        ellsa.data.PARAM_LEARNING_WEIGHT_ACCURACY = PARAMS.LEARNING_WEIGHT_ACCURACY;
+        ellsa.data.PARAM_LEARNING_WEIGHT_PROXIMITY = PARAMS.LEARNING_WEIGHT_PROXIMITY;
+        ellsa.data.PARAM_LEARNING_WEIGHT_EXPERIENCE = PARAMS.LEARNING_WEIGHT_EXPERIENCE;
+        ellsa.data.PARAM_LEARNING_WEIGHT_GENERALIZATION = PARAMS.LEARNING_WEIGHT_GENERALIZATION;
+
+        ellsa.data.PARAM_EXPLOITATION_WEIGHT_PROXIMITY = PARAMS.EXPLOITATION_WEIGHT_PROXIMITY;
+        ellsa.data.PARAM_EXPLOITATION_WEIGHT_EXPERIENCE = PARAMS.EXPLOITATION_WEIGHT_EXPERIENCE;
+        ellsa.data.PARAM_EXPLOITATION_WEIGHT_GENERALIZATION = PARAMS.EXPLOITATION_WEIGHT_GENERALIZATION;
+
+
+        ellsa.data.PARAM_isActiveLearning = PARAMS.setActiveLearning;
+        ellsa.data.PARAM_isSelfLearning = PARAMS.setSelfLearning;
+
+        ellsa.data.PARAM_NCS_isConflictDetection = PARAMS.setConflictDetection;
+        ellsa.data.PARAM_NCS_isConcurrenceDetection = PARAMS.setConcurrenceDetection;
+        ellsa.data.PARAM_NCS_isVoidDetection = PARAMS.setIncompetenceDetection;
+        ellsa.data.PARAM_NCS_isSubVoidDetection = PARAMS.setSubIncompetencedDetection;
+        ellsa.data.PARAM_NCS_isConflictResolution = PARAMS.setConflictResolution;
+        ellsa.data.PARAM_NCS_isConcurrenceResolution = PARAMS.setConcurrenceResolution;
+        ellsa.data.PARAM_NCS_isFrontierRequest = PARAMS.setRangeAmbiguityDetection;
+        ellsa.data.PARAM_NCS_isSelfModelRequest = PARAMS.setModelAmbiguityDetection;
+        ellsa.data.PARAM_NCS_isFusionResolution = PARAMS.setCompleteRedundancyDetection;
+        ellsa.data.PARAM_NCS_isRetrucstureResolution = PARAMS.setPartialRedundancyDetection;
+
+        ellsa.data.PARAM_NCS_isCreationWithNeighbor = PARAMS.setisCreationWithNeighbor;
+
+
+        ellsa.data.PARAM_isLearnFromNeighbors = PARAMS.setCooperativeNeighborhoodLearning;
+        ellsa.data.PARAM_nbOfNeighborForLearningFromNeighbors = PARAMS.nbOfNeighborForLearningFromNeighbors;
+        ellsa.data.PARAM_isDream = PARAMS.setDream;
+        ellsa.data.PARAM_DreamCycleLaunch = PARAMS.setDreamCycleLaunch;
+
+
+        ellsa.data.PARAM_isAutonomousMode = PARAMS.setAutonomousMode;
+
+        ellsa.data.PARAM_NCS_isAllContextSearchAllowedForLearning = PARAMS.isAllContextSearchAllowedForLearning;
+        ellsa.data.PARAM_NCS_isAllContextSearchAllowedForExploitation = PARAMS.isAllContextSearchAllowedForExploitation;
+
+        ellsa.data.PARAM_probabilityOfRangeAmbiguity = PARAMS.probabilityOfRangeAmbiguity;
+
+
+
+        ellsa.getEnvironment().PARAM_minTraceLevel = PARAMS.traceLevel;
+
+
+
+        ellsa.setSubPercepts(experiments.roboticArmDistributedControl.PARAMS.subPercepts);
+
+
+        ArrayList<Double> allLearningCycleTimes = new ArrayList<>();
+        ArrayList<Double> allExploitationCycleTimes = new ArrayList<>();
+
+        for (int i = 0; i < PARAMS.nbLearningCycle; ++i) {
+            double start = System.nanoTime();
+            studiedSystem.playOneStep();
+            perceptions = studiedSystem.getOutput();
+            ellsa.learn(perceptions);
+            allLearningCycleTimes.add((System.nanoTime()- start)/1000000);
+
+        }
+		/*while(ellsa.getContexts().size()>5 || ellsa.getCycle()<50){
+			ellsa.cycle();
+		}
+		System.out.println(ellsa.getCycle());*/
+
+		/*while(ellsa.data.STATE_DreamCompleted!=1){
+			ellsa.cycle();
+		}*/
+
+        HashMap<String, Double> mappingScores;
+        HashMap<REQUEST, Integer> requestCounts;
+        HashMap<SITUATION, Integer> situationsCounts;
+        double[] executionTimes;
+
+        ArrayList<Double> allPredictionErrors;
+
+        if(PARAMS.setActiveExploitation){
+
+            ellsa.data.PARAM_isExploitationActive = true;
+
+            for (int i = 0; i < PARAMS.nbEndoExploitationCycle; ++i) {
+                //studiedSystem.getErrorOnRequest(ellsa);
+
+                studiedSystem.playOneStep();
+                perceptions = studiedSystem.getOutput();
+                ellsa.request(perceptions);
+            }
+
+            ellsa.data.PARAM_isExploitationActive = false;
+
+            mappingScores = ellsa.getHeadAgent().getMappingScores();
+            requestCounts = ellsa.data.requestCounts;
+            situationsCounts = ellsa.data.situationsCounts;
+            executionTimes = ellsa.data.executionTimesSums;
+            allPredictionErrors = new ArrayList<>();
+
+            for (int i = 0; i < PARAMS.nbExploitationCycle; ++i) {
+                double start = System.nanoTime();
+                allPredictionErrors.add(new Double(studiedSystem.getErrorOnRequest(ellsa)));
+                allExploitationCycleTimes.add((System.nanoTime()- start)/1000000);
+
+            }
+
+        }else{
+
+            mappingScores = ellsa.getHeadAgent().getMappingScores();
+            requestCounts = ellsa.data.requestCounts;
+            situationsCounts = ellsa.data.situationsCounts;
+            executionTimes = ellsa.data.executionTimesSums;
+            allPredictionErrors = new ArrayList<>();
+
+            for (int i = 0; i < PARAMS.nbExploitationCycle; ++i) {
+                double start = System.nanoTime();
+                allPredictionErrors.add(new Double(studiedSystem.getErrorOnRequest(ellsa)));
+                allExploitationCycleTimes.add((System.nanoTime()- start)/1000000);
+
+            }
+        }
+
+
+
+        OptionalDouble averageError = allPredictionErrors.stream().mapToDouble(a->a).average();
+        Double errorDispersion = allPredictionErrors.stream().mapToDouble(a->Math.pow((a- averageError.getAsDouble()),2)).sum();
+        double predictionError = averageError.getAsDouble();
+        double predictionDispersion = Math.sqrt(errorDispersion /allPredictionErrors.size());
+
+        OptionalDouble averageLearningCycleTime = allLearningCycleTimes.stream().mapToDouble(a->a).average();
+        Double learningcycleTimeDispersion = allLearningCycleTimes.stream().mapToDouble(a->Math.pow((a- averageLearningCycleTime.getAsDouble()),2)).sum();
+        double averageLearningCycleTimeDouble = averageLearningCycleTime.getAsDouble();
+        double learningcycleTimeDispersionDouble = Math.sqrt(learningcycleTimeDispersion /allLearningCycleTimes.size());
+
+        OptionalDouble averageExploitationCycleTime = allExploitationCycleTimes.stream().mapToDouble(a->a).average();
+        Double ExploitationcycleTimeDispersion = allExploitationCycleTimes.stream().mapToDouble(a->Math.pow((a- averageExploitationCycleTime.getAsDouble()),2)).sum();
+        double averageExploitationCycleTimeDouble = averageExploitationCycleTime.getAsDouble();
+        double ExploitationcycleTimeDispersionDouble = Math.sqrt(ExploitationcycleTimeDispersion /allExploitationCycleTimes.size());
+
+        /*System.out.println(mappingScores);
+        System.out.println(requestCounts);
+        System.out.println(predictionError*100 + " [+-" + predictionDispersion*100 + "]");
+        System.out.println(ellsa.getContexts().size() + " Agents");*/
+
+        WRITER.setData(data, ellsa, mappingScores, requestCounts, situationsCounts, executionTimes, predictionError, predictionDispersion, averageLearningCycleTimeDouble, learningcycleTimeDispersionDouble, averageExploitationCycleTimeDouble, ExploitationcycleTimeDispersionDouble);
+
+
+        ellsa = null;
+        studiedSystem = null;
+
+    }
+
+
+
+
+}
\ No newline at end of file
diff --git a/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LaunchExampleXPWithArgsManualy.java b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LaunchExampleXPWithArgsManualy.java
new file mode 100644
index 0000000000000000000000000000000000000000..154e32d6c16a4365732b7a484034cf8f019d1147
--- /dev/null
+++ b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LaunchExampleXPWithArgsManualy.java
@@ -0,0 +1,406 @@
+package experiments.simpleLauncherExternalSource;
+
+
+
+
+
+import agents.head.REQUEST;
+import agents.head.SITUATION;
+import experiments.mathematicalModels.Model_Manager;
+import fr.irit.smac.amak.Configuration;
+import kernel.ELLSA;
+import kernel.StudiedSystem;
+import kernel.backup.BackupSystem;
+import kernel.backup.IBackupSystem;
+import utils.*;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+public class LaunchExampleXPWithArgsManualy {
+
+    private static CSVWriter xpCSV;
+    private static HashMap<String, Double> perceptions = new HashMap<String, Double>();
+
+    public static void main (String[] args)  {
+
+
+
+
+
+        PARAMS.dimension = 2;
+        PARAMS.configFile =  "twoDimensionsLauncher" +".xml";
+
+        /*PARAMS.dimension = 3;
+        PARAMS.configFile =  "threeDimensionsLauncher" +".xml";*/
+
+/*        PARAMS.dimension = 10;
+        PARAMS.configFile =  "tenDimensionsLauncher" +".xml";*/
+
+        /*PARAMS.dimension = 4;
+        PARAMS.configFile =  "fourDimensionsLauncher" +".xml";*/
+
+        /*PARAMS.dimension = 5;
+        PARAMS.configFile =  "fiveDimensionsLauncher" +".xml";*/
+
+        PARAMS.nbLearningCycle = 500;
+        PARAMS.nbExploitationCycle = 250;
+        PARAMS.nbEpisodes = 1;
+
+        // Neighborhood
+        PARAMS.validityRangesPrecision =  0.04;
+        PARAMS.neighborhoodRadiusCoefficient = 2;
+        PARAMS.influenceRadiusCoefficient = 0.50;
+        PARAMS.modelErrorMargin = 1.0;
+
+        // Learning
+        PARAMS.setActiveLearning = false;
+        PARAMS.setSelfLearning = true;
+        PARAMS.setCooperativeNeighborhoodLearning = true;
+
+//        PARAMS.setActiveLearning = true;
+//        PARAMS.setSelfLearning = false;
+//        PARAMS.setLearnFromNeighbors = false;
+
+        //NCS
+
+        PARAMS.setModelAmbiguityDetection = true;
+        PARAMS.setConflictDetection = true;
+        PARAMS.setConcurrenceDetection = true;
+        PARAMS.setIncompetenceDetection = true;
+        PARAMS.setCompleteRedundancyDetection = true;
+        PARAMS.setPartialRedundancyDetection = true;
+        PARAMS.setRangeAmbiguityDetection = true;
+
+        PARAMS.setSubIncompetencedDetection = false;
+
+        PARAMS.setDream = false;
+        PARAMS.setDreamCycleLaunch = 1500;
+
+
+        PARAMS.setisCreationWithNeighbor = true;
+
+        PARAMS.nbOfNeighborForLearningFromNeighbors = 1;
+        PARAMS.nbOfNeighborForContexCreationWithouOracle = 7;
+        PARAMS.nbOfNeighborForVoidDetectionInSelfLearning =  PARAMS.nbOfNeighborForContexCreationWithouOracle;
+
+
+//        PARAMS.model = "multi";
+//        PARAMS.model = "disc";
+//        PARAMS.model = "square";
+        PARAMS.model = "squareFixed";
+//        PARAMS.model = "triangle";
+//        PARAMS.model = "gaussian";
+//        PARAMS.model = "polynomial";
+//        PARAMS.model = "gaussianCos2";
+//        PARAMS.model = "cosX";
+//        PARAMS.model = "cosSinX";
+//        PARAMS.model = "rosenbrock";
+//        PARAMS.model = "squareSplitTriangle";
+//        PARAMS.model = "squareSplitFixed";
+//        PARAMS.model = "squareDiscLos";
+
+
+        String dateAndHour = new SimpleDateFormat("ddMMyyyy_HHmmss").format(new Date());
+        PARAMS.extension = dateAndHour;
+
+        PARAMS.setbootstrapCycle = 10;
+
+        PARAMS.exogenousLearningWeight = 0.1;
+        PARAMS.endogenousLearningWeight = 0.1;
+
+        PARAMS.LEARNING_WEIGHT_ACCURACY = 1.0;
+        PARAMS.LEARNING_WEIGHT_PROXIMITY = 0.0;
+        PARAMS.LEARNING_WEIGHT_EXPERIENCE = 1.0;
+        PARAMS.LEARNING_WEIGHT_GENERALIZATION = 1.0;
+
+        PARAMS.EXPLOITATION_WEIGHT_PROXIMITY = 1.0;
+        PARAMS.EXPLOITATION_WEIGHT_EXPERIENCE = 1.0;
+        PARAMS.EXPLOITATION_WEIGHT_GENERALIZATION = 1.0;
+
+        PARAMS.perceptionsGenerationCoefficient = 0.1;
+
+        PARAMS.modelSimilarityThreshold = 0.001;
+
+        PARAMS.maxRangeRadiusCoefficient = 2.0;
+        PARAMS.rangeSimilarityCoefficient = 0.375;
+        PARAMS.minimumRangeCoefficient = 0.25;
+
+        PARAMS.isAllContextSearchAllowedForLearning = true;
+        PARAMS.isAllContextSearchAllowedForExploitation = true;
+
+        PARAMS.probabilityOfRangeAmbiguity = 0.1;
+
+        PARAMS.transferCyclesRatio = 0.3;//0.429;
+
+        PARAMS.nbEndoExploitationCycle = 0;
+        PARAMS.setActiveExploitation = false;
+
+        PARAMS.noiseRange = 0.0;
+
+        TRACE.minLevel = TRACE_LEVEL.CYCLE;
+
+
+
+
+
+        experimentation();
+
+        System.out.print(" DONE");
+
+        System.exit(1);
+    }
+
+
+    public static void experimentation() {
+
+        xpCSV = new CSVWriter(
+                PARAMS.model
+                        +"_PARAMS_" + PARAMS.extension
+
+        );
+
+        // Set AMAK configuration before creating an ELLSA
+        Configuration.multiUI=true;
+        Configuration.commandLineMode = true;
+        Configuration.allowedSimultaneousAgentsExecution = 1;
+        Configuration.waitForGUI = false;
+        Configuration.plotMilliSecondsUpdate = 20000;
+
+
+        Pair<ArrayList<List<String>>,HashMap<String, ArrayList<Double>>> dataPair = WRITER.getData();
+        ArrayList<List<String>> dataStrings = dataPair.getA();
+        HashMap<String, ArrayList<Double>> data = dataPair.getB();
+
+        double start = System.nanoTime();
+
+        for (int i = 0; i < PARAMS.nbEpisodes; ++i) {
+            //System.out.print(i + " ");
+
+            learningEpisode(data, i);
+
+        }
+        //System.out.println(" ");
+        double total = (System.nanoTime()- start)/1000000000;
+        double mean = total/ PARAMS.nbEpisodes;
+        System.out.println("[TIME MEAN] " + mean + " s");
+        System.out.println("[TIME TOTAL] " + total + " s");
+
+        WRITER.writeData(xpCSV,data, dataStrings, total, mean);
+
+        data = null;
+    }
+
+
+
+
+    private static void learningEpisode(HashMap<String, ArrayList<Double>> data, int episodeIndice) {
+
+        RAND_REPEATABLE.setSeed(0);
+        ELLSA ellsa = new ELLSA(null,  null);
+        StudiedSystem studiedSystem = new Model_Manager(PARAMS.spaceSize, PARAMS.dimension, PARAMS.nbOfModels, PARAMS.normType, PARAMS.randomExploration, PARAMS.explorationIncrement, PARAMS.explorationWidht, PARAMS.limitedToSpaceZone, PARAMS.noiseRange);
+        ellsa.setStudiedSystem(studiedSystem);
+        IBackupSystem backupSystem = new BackupSystem(ellsa);
+        File file = new File("resources/"+ PARAMS.configFile);
+        backupSystem.load(file);
+        ellsa.getEnvironment().setSeed(0);
+
+
+        ellsa.allowGraphicalScheduler(false);
+        ellsa.setRenderUpdate(false);
+
+
+        ellsa.getEnvironment().setMappingErrorAllowed(PARAMS.validityRangesPrecision);
+        ellsa.data.PARAM_modelErrorMargin = PARAMS.modelErrorMargin;
+        ellsa.data.PARAM_bootstrapCycle = PARAMS.setbootstrapCycle;
+        ellsa.data.PARAM_exogenousLearningWeight = PARAMS.exogenousLearningWeight;
+        ellsa.data.PARAM_endogenousLearningWeight = PARAMS.endogenousLearningWeight;
+
+        ellsa.data.PARAM_neighborhoodRadiusCoefficient = PARAMS.neighborhoodRadiusCoefficient;
+        ellsa.data.PARAM_influenceRadiusCoefficient = PARAMS.influenceRadiusCoefficient;
+        ellsa.data.PARAM_maxRangeRadiusCoefficient = PARAMS.maxRangeRadiusCoefficient;
+        ellsa.data.PARAM_rangeSimilarityCoefficient = PARAMS.rangeSimilarityCoefficient;
+        ellsa.data.PARAM_minimumRangeCoefficient = PARAMS.minimumRangeCoefficient;
+
+        ellsa.data.PARAM_creationNeighborNumberForVoidDetectionInSelfLearning = PARAMS.nbOfNeighborForVoidDetectionInSelfLearning;
+        ellsa.data.PARAM_creationNeighborNumberForContexCreationWithouOracle = PARAMS.nbOfNeighborForContexCreationWithouOracle;
+
+        ellsa.data.PARAM_perceptionsGenerationCoefficient = PARAMS.perceptionsGenerationCoefficient
+        ;
+        ellsa.data.PARAM_modelSimilarityThreshold = PARAMS.modelSimilarityThreshold;
+
+        ellsa.data.PARAM_LEARNING_WEIGHT_ACCURACY = PARAMS.LEARNING_WEIGHT_ACCURACY;
+        ellsa.data.PARAM_LEARNING_WEIGHT_PROXIMITY = PARAMS.LEARNING_WEIGHT_PROXIMITY;
+        ellsa.data.PARAM_LEARNING_WEIGHT_EXPERIENCE = PARAMS.LEARNING_WEIGHT_EXPERIENCE;
+        ellsa.data.PARAM_LEARNING_WEIGHT_GENERALIZATION = PARAMS.LEARNING_WEIGHT_GENERALIZATION;
+
+        ellsa.data.PARAM_EXPLOITATION_WEIGHT_PROXIMITY = PARAMS.EXPLOITATION_WEIGHT_PROXIMITY;
+        ellsa.data.PARAM_EXPLOITATION_WEIGHT_EXPERIENCE = PARAMS.EXPLOITATION_WEIGHT_EXPERIENCE;
+        ellsa.data.PARAM_EXPLOITATION_WEIGHT_GENERALIZATION = PARAMS.EXPLOITATION_WEIGHT_GENERALIZATION;
+
+
+        ellsa.data.PARAM_isActiveLearning = PARAMS.setActiveLearning;
+        ellsa.data.PARAM_isSelfLearning = PARAMS.setSelfLearning;
+
+        ellsa.data.PARAM_NCS_isConflictDetection = PARAMS.setConflictDetection;
+        ellsa.data.PARAM_NCS_isConcurrenceDetection = PARAMS.setConcurrenceDetection;
+        ellsa.data.PARAM_NCS_isVoidDetection = PARAMS.setIncompetenceDetection;
+        ellsa.data.PARAM_NCS_isSubVoidDetection = PARAMS.setSubIncompetencedDetection;
+        ellsa.data.PARAM_NCS_isConflictResolution = PARAMS.setConflictResolution;
+        ellsa.data.PARAM_NCS_isConcurrenceResolution = PARAMS.setConcurrenceResolution;
+        ellsa.data.PARAM_NCS_isFrontierRequest = PARAMS.setRangeAmbiguityDetection;
+        ellsa.data.PARAM_NCS_isSelfModelRequest = PARAMS.setModelAmbiguityDetection;
+        ellsa.data.PARAM_NCS_isFusionResolution = PARAMS.setCompleteRedundancyDetection;
+        ellsa.data.PARAM_NCS_isRetrucstureResolution = PARAMS.setPartialRedundancyDetection;
+
+        ellsa.data.PARAM_NCS_isCreationWithNeighbor = PARAMS.setisCreationWithNeighbor;
+
+
+        ellsa.data.PARAM_isLearnFromNeighbors = PARAMS.setCooperativeNeighborhoodLearning;
+        ellsa.data.PARAM_nbOfNeighborForLearningFromNeighbors = PARAMS.nbOfNeighborForLearningFromNeighbors;
+        ellsa.data.PARAM_isDream = PARAMS.setDream;
+        ellsa.data.PARAM_DreamCycleLaunch = PARAMS.setDreamCycleLaunch;
+
+
+        ellsa.data.PARAM_isAutonomousMode = PARAMS.setAutonomousMode;
+
+        ellsa.data.PARAM_NCS_isAllContextSearchAllowedForLearning = PARAMS.isAllContextSearchAllowedForLearning;
+        ellsa.data.PARAM_NCS_isAllContextSearchAllowedForExploitation = PARAMS.isAllContextSearchAllowedForExploitation;
+
+        ellsa.data.PARAM_probabilityOfRangeAmbiguity = PARAMS.probabilityOfRangeAmbiguity;
+
+
+
+        ellsa.getEnvironment().PARAM_minTraceLevel = PARAMS.traceLevel;
+
+
+
+        ellsa.setSubPercepts(PARAMS.subPercepts);
+
+
+        ArrayList<Double> allLearningCycleTimes = new ArrayList<>();
+        ArrayList<Double> allExploitationCycleTimes = new ArrayList<>();
+
+        for (int i = 0; i < PARAMS.nbLearningCycle; ++i) {
+            double start = System.nanoTime();
+
+            studiedSystem.playOneStep();
+            perceptions = studiedSystem.getOutput();
+            ellsa.learn(perceptions);
+            double end = System.nanoTime()- start;
+
+            allLearningCycleTimes.add(end/1000000);
+
+            //System.out.println(ellsa.getCycle() + " " + ellsa.getContexts().size());
+
+            if(ellsa.getCycle()%200 == 0){
+                // Get maximum size of heap in bytes. The heap cannot grow beyond this size.// Any attempt will result in an OutOfMemoryException.
+                long heapMaxSize = (long)Runtime.getRuntime().maxMemory();
+                //System.out.println("heapMaxSize\t\t" + heapMaxSize);
+
+                // Get current size of heap in bytes
+                long heapSize = (Runtime.getRuntime().totalMemory());
+                //System.out.println("heapSize\t\t" + heapSize);
+
+                // Get amount of free memory within the heap in bytes. This size will increase // after garbage collection and decrease as new objects are created.
+                long heapFreeSize = Runtime.getRuntime().freeMemory();;
+                //System.out.println("heapFreeSize\t" + heapFreeSize + "\n");
+            }
+        }
+		/*while(ellsa.getContexts().size()>5 || ellsa.getCycle()<50){
+			ellsa.cycle();
+		}
+		System.out.println(ellsa.getCycle());*/
+
+		/*while(ellsa.data.STATE_DreamCompleted!=1){
+			ellsa.cycle();
+		}*/
+
+        HashMap<String, Double> mappingScores = ellsa.getHeadAgent().getMappingScores();
+        HashMap<REQUEST, Integer> requestCounts = ellsa.data.requestCounts;
+        HashMap<SITUATION, Integer> situationsCounts = ellsa.data.situationsCounts;
+        double[] executionTimes = ellsa.data.executionTimesSums;
+
+        ArrayList<Double> allPredictionErrors = new ArrayList<>();
+
+        if(PARAMS.setActiveExploitation){
+
+            ellsa.data.PARAM_isExploitationActive = true;
+
+            for (int i = 0; i < PARAMS.nbEndoExploitationCycle; ++i) {
+                //studiedSystem.getErrorOnRequest(ellsa);
+                studiedSystem.playOneStep();
+                perceptions = studiedSystem.getOutput();
+                ellsa.request(perceptions);
+            }
+
+            ellsa.data.PARAM_isExploitationActive = false;
+
+            for (int i = 0; i < PARAMS.nbExploitationCycle; ++i) {
+                double start = System.nanoTime();
+                allPredictionErrors.add(new Double(studiedSystem.getErrorOnRequest(ellsa)));
+                allExploitationCycleTimes.add((System.nanoTime()- start)/1000000);
+
+            }
+
+        }else{
+            for (int i = 0; i < PARAMS.nbExploitationCycle; ++i) {
+                double start = System.nanoTime();
+                allPredictionErrors.add(new Double(studiedSystem.getErrorOnRequest(ellsa)));
+                allExploitationCycleTimes.add((System.nanoTime()- start)/1000000);
+
+            }
+        }
+
+        OptionalDouble averageError = allPredictionErrors.stream().mapToDouble(a->a).average();
+        Double errorDispersion = allPredictionErrors.stream().mapToDouble(a->Math.pow((a- averageError.getAsDouble()),2)).sum();
+        double predictionError = averageError.getAsDouble();
+        double predictionDispersion = Math.sqrt(errorDispersion /allPredictionErrors.size());
+
+        OptionalDouble averageLearningCycleTime = allLearningCycleTimes.stream().mapToDouble(a->a).average();
+        Double learningcycleTimeDispersion = allLearningCycleTimes.stream().mapToDouble(a->Math.pow((a- averageLearningCycleTime.getAsDouble()),2)).sum();
+        double averageLearningCycleTimeDouble = averageLearningCycleTime.getAsDouble();
+        double learningcycleTimeDispersionDouble = Math.sqrt(learningcycleTimeDispersion /allLearningCycleTimes.size());
+
+        OptionalDouble averageExploitationCycleTime = allExploitationCycleTimes.stream().mapToDouble(a->a).average();
+        Double ExploitationcycleTimeDispersion = allExploitationCycleTimes.stream().mapToDouble(a->Math.pow((a- averageExploitationCycleTime.getAsDouble()),2)).sum();
+        double averageExploitationCycleTimeDouble = averageExploitationCycleTime.getAsDouble();
+        double ExploitationcycleTimeDispersionDouble = Math.sqrt(ExploitationcycleTimeDispersion /allExploitationCycleTimes.size());
+
+        System.out.println(mappingScores);
+        System.out.println(requestCounts);
+        System.out.println(situationsCounts);
+        System.out.println(predictionError*100 + " [+-" + predictionDispersion*100 + "]");
+        System.out.println(ellsa.getContexts().size() + " Agents");
+
+        System.out.println(ellsa.getContexts().get(0).getVolume() + " Vol");
+        System.out.println(ellsa.getHeadAgent().getMinMaxVolume()+ " MinMaxVol");
+
+        System.out.println(ellsa.data.minMaxPerceptsStatesAfterBoostrap);
+        System.out.println("RAND_REPEATABLE.requestsCounts " + RAND_REPEATABLE.requestsCounts);
+        System.out.println("RAND_REPEATABLE.requestsCountsRandom " + RAND_REPEATABLE.requestsCountsRandom);
+        System.out.println("RAND_REPEATABLE.requestsCountsRandomGauss " + RAND_REPEATABLE.requestsCountsRandomGauss);
+        System.out.println("RAND_REPEATABLE.requestsCountsRandomInt " + RAND_REPEATABLE.requestsCountsRandomInt);
+
+
+
+        WRITER.setData(data, ellsa, mappingScores, requestCounts, situationsCounts, executionTimes, predictionError, predictionDispersion, averageLearningCycleTimeDouble, learningcycleTimeDispersionDouble, averageExploitationCycleTimeDouble, ExploitationcycleTimeDispersionDouble);
+
+
+
+
+
+
+
+
+        ellsa = null;
+        studiedSystem = null;
+
+    }
+
+
+
+
+
+}
diff --git a/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LauncherUI.java b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LauncherUI.java
new file mode 100644
index 0000000000000000000000000000000000000000..5257d7dd3b031834af408385105416ae751c9b18
--- /dev/null
+++ b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/LauncherUI.java
@@ -0,0 +1,171 @@
+package experiments.simpleLauncherExternalSource;
+
+import experiments.mathematicalModels.Model_Manager;
+import fr.irit.smac.amak.Configuration;
+import fr.irit.smac.amak.ui.VUIMulti;
+import gui.EllsaMultiUIWindow;
+import javafx.application.Application;
+import javafx.stage.Stage;
+import kernel.ELLSA;
+import kernel.StudiedSystem;
+import kernel.backup.BackupSystem;
+import kernel.backup.IBackupSystem;
+import utils.RAND_REPEATABLE;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashMap;
+
+
+/**
+ * The Class BadContextLauncherEasy.
+ */
+public class LauncherUI extends Application implements Serializable {
+
+	private HashMap<String, Double> perceptions = new HashMap<String, Double>();
+
+	public static void main(String[] args) throws IOException {
+		
+		
+		Application.launch(args);
+
+
+	}
+	
+
+	@Override
+	public void start(Stage arg0) throws Exception {
+
+
+		// Set AMAK configuration before creating an AMOEBA
+		Configuration.multiUI=true;
+		Configuration.commandLineMode = false;
+		Configuration.allowedSimultaneousAgentsExecution = 1;
+		Configuration.waitForGUI = true;
+		Configuration.plotMilliSecondsUpdate = 20000;
+		RAND_REPEATABLE.setSeed(0);
+
+
+
+		StudiedSystem studiedSystem = new Model_Manager(PARAMS.spaceSize, PARAMS.dimension, PARAMS.nbOfModels, PARAMS.normType, PARAMS.randomExploration, PARAMS.explorationIncrement, PARAMS.explorationWidht, PARAMS.limitedToSpaceZone, PARAMS.noiseRange);
+		VUIMulti amoebaVUI = new VUIMulti("2D");
+		EllsaMultiUIWindow amoebaUI = new EllsaMultiUIWindow("ELLSA", amoebaVUI, studiedSystem);
+		ELLSA ellsa = new ELLSA(amoebaUI,  amoebaVUI);
+		ellsa.setStudiedSystem(studiedSystem);
+		IBackupSystem backupSystem = new BackupSystem(ellsa);
+		File file = new File("resources/"+ PARAMS.configFile);
+		backupSystem.load(file);
+		ellsa.getEnvironment().setSeed(0);
+
+
+		//ellsa.saver = new SaveHelperImpl(ellsa, amoebaUI);
+		
+		ellsa.allowGraphicalScheduler(true);
+		ellsa.setRenderUpdate(false);
+
+		ellsa.getEnvironment().setMappingErrorAllowed(PARAMS.validityRangesPrecision);
+		ellsa.data.PARAM_modelErrorMargin = PARAMS.modelErrorMargin;
+		ellsa.data.PARAM_bootstrapCycle = PARAMS.setbootstrapCycle;
+		ellsa.data.PARAM_exogenousLearningWeight = PARAMS.exogenousLearningWeight;
+		ellsa.data.PARAM_endogenousLearningWeight = PARAMS.endogenousLearningWeight;
+
+		ellsa.data.PARAM_neighborhoodRadiusCoefficient = PARAMS.neighborhoodRadiusCoefficient;
+		ellsa.data.PARAM_influenceRadiusCoefficient = PARAMS.influenceRadiusCoefficient;
+		ellsa.data.PARAM_maxRangeRadiusCoefficient = PARAMS.maxRangeRadiusCoefficient;
+		ellsa.data.PARAM_rangeSimilarityCoefficient = PARAMS.rangeSimilarityCoefficient;
+		ellsa.data.PARAM_minimumRangeCoefficient = PARAMS.minimumRangeCoefficient;
+
+		ellsa.data.PARAM_creationNeighborNumberForVoidDetectionInSelfLearning = PARAMS.nbOfNeighborForVoidDetectionInSelfLearning;
+		ellsa.data.PARAM_creationNeighborNumberForContexCreationWithouOracle = PARAMS.nbOfNeighborForContexCreationWithouOracle;
+
+		ellsa.data.PARAM_perceptionsGenerationCoefficient = PARAMS.perceptionsGenerationCoefficient;
+		ellsa.data.PARAM_modelSimilarityThreshold = PARAMS.modelSimilarityThreshold;
+
+		ellsa.data.PARAM_LEARNING_WEIGHT_ACCURACY = PARAMS.LEARNING_WEIGHT_ACCURACY;
+		ellsa.data.PARAM_LEARNING_WEIGHT_PROXIMITY = PARAMS.LEARNING_WEIGHT_PROXIMITY;
+		ellsa.data.PARAM_LEARNING_WEIGHT_EXPERIENCE = PARAMS.LEARNING_WEIGHT_EXPERIENCE;
+		ellsa.data.PARAM_LEARNING_WEIGHT_GENERALIZATION = PARAMS.LEARNING_WEIGHT_GENERALIZATION;
+
+		ellsa.data.PARAM_EXPLOITATION_WEIGHT_PROXIMITY = PARAMS.EXPLOITATION_WEIGHT_PROXIMITY;
+		ellsa.data.PARAM_EXPLOITATION_WEIGHT_EXPERIENCE = PARAMS.EXPLOITATION_WEIGHT_EXPERIENCE;
+		ellsa.data.PARAM_EXPLOITATION_WEIGHT_GENERALIZATION = PARAMS.EXPLOITATION_WEIGHT_GENERALIZATION;
+
+
+		ellsa.data.PARAM_isActiveLearning = PARAMS.setActiveLearning;
+		ellsa.data.PARAM_isSelfLearning = PARAMS.setSelfLearning;
+
+		ellsa.data.PARAM_NCS_isConflictDetection = PARAMS.setConflictDetection;
+		ellsa.data.PARAM_NCS_isConcurrenceDetection = PARAMS.setConcurrenceDetection;
+		ellsa.data.PARAM_NCS_isVoidDetection = PARAMS.setIncompetenceDetection;
+		ellsa.data.PARAM_NCS_isSubVoidDetection = PARAMS.setSubIncompetencedDetection;
+		ellsa.data.PARAM_NCS_isConflictResolution = PARAMS.setConflictResolution;
+		ellsa.data.PARAM_NCS_isConcurrenceResolution = PARAMS.setConcurrenceResolution;
+		ellsa.data.PARAM_NCS_isFrontierRequest = PARAMS.setRangeAmbiguityDetection;
+		ellsa.data.PARAM_NCS_isSelfModelRequest = PARAMS.setModelAmbiguityDetection;
+		ellsa.data.PARAM_NCS_isFusionResolution = PARAMS.setCompleteRedundancyDetection;
+		ellsa.data.PARAM_NCS_isRetrucstureResolution = PARAMS.setPartialRedundancyDetection;
+
+		ellsa.data.PARAM_NCS_isCreationWithNeighbor = PARAMS.setisCreationWithNeighbor;
+
+
+		ellsa.data.PARAM_isLearnFromNeighbors = PARAMS.setCooperativeNeighborhoodLearning;
+		ellsa.data.PARAM_nbOfNeighborForLearningFromNeighbors = PARAMS.nbOfNeighborForLearningFromNeighbors;
+		ellsa.data.PARAM_isDream = PARAMS.setDream;
+        ellsa.data.PARAM_DreamCycleLaunch = PARAMS.setDreamCycleLaunch;
+
+
+		ellsa.data.PARAM_isAutonomousMode = PARAMS.setAutonomousMode;
+
+		ellsa.data.PARAM_NCS_isAllContextSearchAllowedForLearning = PARAMS.isAllContextSearchAllowedForLearning;
+		ellsa.data.PARAM_NCS_isAllContextSearchAllowedForExploitation = PARAMS.isAllContextSearchAllowedForExploitation;
+
+		ellsa.data.PARAM_probabilityOfRangeAmbiguity = PARAMS.probabilityOfRangeAmbiguity;
+
+		ellsa.data.PARAM_isExploitationActive = PARAMS.setActiveExploitation;
+
+		ellsa.getEnvironment().PARAM_minTraceLevel = PARAMS.traceLevel;
+
+
+
+		ellsa.setSubPercepts(experiments.roboticArmDistributedControl.PARAMS.subPercepts);
+
+
+
+		ellsa.data.STOP_UI = PARAMS.STOP_UI;
+		ellsa.data.STOP_UI_cycle = PARAMS.STOP_UI_cycle;
+
+		ellsa.data.PARAM_numberOfPointsForRegression_ASUPPRIMER = PARAMS.regressionPoints;
+
+		amoebaUI.toggleRender.setSelected(false);
+
+		for (int i=0;i<PARAMS.nbLearningCycle;i++){
+
+			studiedSystem.playOneStep();
+			perceptions = studiedSystem.getOutput();
+			ellsa.learn(perceptions);
+
+
+		}
+
+		for (int i=0;i<PARAMS.nbExploitationCycle;i++){
+
+			studiedSystem.playOneStep();
+			perceptions = studiedSystem.getOutput();
+			ellsa.request(perceptions);
+
+
+		}
+
+
+		
+	}
+
+	
+	
+
+
+
+
+	
+}
diff --git a/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/PARAMS.java b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/PARAMS.java
new file mode 100644
index 0000000000000000000000000000000000000000..d32f444fd736c904493bc7282d790730447ff48a
--- /dev/null
+++ b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/PARAMS.java
@@ -0,0 +1,177 @@
+package experiments.simpleLauncherExternalSource;
+
+import utils.TRACE_LEVEL;
+
+import java.util.ArrayList;
+
+public class PARAMS {
+
+
+//    public static String model = "los";
+//    public static String model = "squareDisc";
+//    public static String model = "squareDiscLos";
+//    public static String model = "multi";
+//    public static String model = "disc";
+//    public static String model = "square";
+    public static String model = "squareFixed";
+//    public static String model = "triangle";
+//    public static String model = "gaussian";
+//    public static String model = "polynomial";
+//    public static String model = "gaussianCos2";
+//    public static String model = "cosX";
+//    public static String model = "cosSinX";
+//    public static String model = "rosenbrock";
+//    public static String model = "squareSplitTriangle";
+//    public static String model = "squareSplitFixed";
+
+
+
+
+
+    public static String extension = "";
+
+    public static ArrayList subPercepts = new ArrayList<>();
+
+    public static  String configFile = "twoDimensionsLauncher.xml";
+    public static  int dimension = 2;
+
+    /*public static  String configFile = "tenDimensionsLauncher.xml";
+    public static  int dimension = 10;*/
+
+/*    public static  String configFile = "threeDimensionsLauncher.xml";
+    public static  int dimension = 3;*/
+
+    /*public static  String configFile = "fourDimensionsLauncher.xml";
+    public static  int dimension = 4;*/
+
+    /*public static  String configFile = "fiveDimensionsLauncher.xml";
+    public static  int dimension = 5;*/
+
+    public static  int nbLearningCycle = 500;
+    public static  int nbEndoExploitationCycle = 2000;
+    public static  int nbExploitationCycle = 250;
+    public static  boolean setActiveExploitation = false ;
+    public static  int nbEpisodes = 1;
+    public static  double transferCyclesRatio = 0.3;//0.429;
+
+    public static  double spaceSize = 50.0	;
+    public static double validityRangesPrecision = 0.02 ;
+
+    /* LEARNING */
+
+    public static double LEARNING_WEIGHT_ACCURACY = 1.0;
+    public static double LEARNING_WEIGHT_PROXIMITY = 0.0;
+    public static double LEARNING_WEIGHT_EXPERIENCE = 1.0;
+    public static double LEARNING_WEIGHT_GENERALIZATION = 1.0;
+
+    /* EXPLOITATION */
+
+    public static double EXPLOITATION_WEIGHT_PROXIMITY = 1.0;
+    public static double EXPLOITATION_WEIGHT_EXPERIENCE = 1.0;
+    public static double EXPLOITATION_WEIGHT_GENERALIZATION = 1.0;
+
+
+    /* NEIGHBORHOOD */
+
+    public static  double neighborhoodRadiusCoefficient = 2;
+    public static  double influenceRadiusCoefficient = 0.5;
+    //public static double neighborhoodRadiusCoefficient = 2;
+    //public static double influenceRadiusCoefficient = 0.5;
+    public static double maxRangeRadiusCoefficient = 2.0;
+    public static double rangeSimilarityCoefficient = 0.375;
+    public static double minimumRangeCoefficient = 0.25;
+
+    /* PREDICTION */
+    public static  double modelErrorMargin = 1; //Multi
+//    public static  double modelErrorMargin = 0.05; //SinCos
+//    public static  double modelErrorMargin = 1; // Goutte
+    //public static  double modelErrorMargin = 1; // Carré
+
+
+    /* REGRESSION */
+    public static  double noiseRange = 0.0;
+    public static  double exogenousLearningWeight = 0.1;
+    public static  double endogenousLearningWeight = 0.1;
+
+    public static double perceptionsGenerationCoefficient = 0.1;
+
+    public static double modelSimilarityThreshold = 0.001;
+
+
+    public static  int regressionPoints = (int)(1/ exogenousLearningWeight);
+
+    /* XP */
+    public static  int nbOfModels = 2	;
+    public static  int normType = 2	;
+
+    /* EXPLORATION */
+    public static  boolean continousExploration = false;
+    public static  boolean randomExploration = !continousExploration;
+    public static  boolean limitedToSpaceZone = true;
+    public static  double explorationIncrement = 2.0	;
+    public static  double explorationWidht = 0.75	;
+
+    public static  int setbootstrapCycle = 50;
+
+
+    /* LEARNING */
+    /*public static  boolean setActiveLearning = false	;
+    public static  boolean setSelfLearning = true;
+    public static  boolean setLearnFromNeighbors = true ;*/
+
+    public static  boolean setActiveLearning = false;
+    public static  boolean setSelfLearning = false;
+    public static  boolean setCooperativeNeighborhoodLearning = false ;
+
+
+
+    /*NCS*/
+
+    public static  boolean setModelAmbiguityDetection = false ;
+    public static  boolean setConflictDetection = false ;
+    public static  boolean setConcurrenceDetection = false ;
+    public static  boolean setIncompetenceDetection = false ;
+    public static  boolean setCompleteRedundancyDetection = false ;
+    public static  boolean setPartialRedundancyDetection = false ;
+    public static  boolean setRangeAmbiguityDetection = false ; // ONLY ACTIVE LEARNING
+
+
+    public static  boolean setisCreationWithNeighbor = false;
+
+    public static boolean isAllContextSearchAllowedForLearning = true;
+    public static boolean isAllContextSearchAllowedForExploitation = true;
+
+    public static  boolean setConflictResolution = setConflictDetection ;
+    public static  boolean setConcurrenceResolution = setConcurrenceDetection ;
+    public static  boolean setSubIncompetencedDetection = false ;
+
+
+    public static  boolean setDream = true ;
+    public static  int setDreamCycleLaunch = 5000 ;
+
+
+
+
+
+
+    public static  int nbOfNeighborForLearningFromNeighbors = 1;
+    public static  int nbOfNeighborForContexCreationWithouOracle = 7;
+    public static  int nbOfNeighborForVoidDetectionInSelfLearning = 7;
+
+    public static double probabilityOfRangeAmbiguity = 0.1;
+
+    public static   boolean setAutonomousMode = true;
+
+
+    public static TRACE_LEVEL traceLevel = TRACE_LEVEL.CYCLE;
+
+
+
+    /* UI */
+    public static boolean STOP_UI = true;
+//    public static int STOP_UI_cycle = (int) (nbLearningCycle -  (nbLearningCycle*transferCyclesRatio));
+//    public static int STOP_UI_cycle = setDreamCycleLaunch;
+    public static int STOP_UI_cycle = nbLearningCycle;
+
+
+}
diff --git a/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/WRITER.java b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/WRITER.java
new file mode 100644
index 0000000000000000000000000000000000000000..e2e6aba40e70e4afb656a15f3f9ec2e1a5d3d2a7
--- /dev/null
+++ b/ELLSAonAMAK/src/experiments/simpleLauncherExternalSource/WRITER.java
@@ -0,0 +1,364 @@
+package experiments.simpleLauncherExternalSource;
+
+import agents.head.REQUEST;
+import agents.head.SITUATION;
+import agents.percept.Percept;
+import kernel.ELLSA;
+import utils.CSVWriter;
+import utils.Pair;
+
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.*;
+
+public class WRITER {
+
+
+
+
+    public static Pair<ArrayList<List<String>>,HashMap<String, ArrayList<Double>>> getData() {
+
+        HashMap<String, ArrayList<Double>> data = new HashMap<>();
+
+
+        List<String> dataStringsVolumes = Arrays.asList("mappingScore", "imprecisionScore", "conflictVol", "concurrenceVol", "voidVol");
+
+        List<String> dataStringsPrediction = Arrays.asList("predictionError", "predictionErrorDeviation");
+
+        List<String> dataStringsEndoRequests = Arrays.asList("rdmLearning", "rdmExploitation",
+                "activeLearning", "activeExploitation",
+                "exogenousExploitation","endogenousExploitation",
+                "exogenousLearning","endogenousLearning",
+                "conflictRequests", "concurrenceRequests", "frontierRequests", "voidRequests","subvoidRequests", "modelRequests", "dreamRequests",
+                "neighborsRequest","fusionRequests","restructureRequests",
+
+                "NCS_BAD_PREDICTION","NCS_USELESSNESS","NCS_CONFLICT","NCS_CONCURRENCY","NCS_UNPRODUCTIVITY","NCS_EXPANSION","NCS_CREATION");
+
+
+
+        //List<String> dataStringsNCS =
+
+        List<String> dataStringsTimeExecution = Arrays.asList("learningCycleExecutionTime","exploitationCycleExecutionTime", "learningCycleExecutionTimeDeviation","exploitationCycleExecutionTimeDeviation",
+                "perceptsTimeExecution", "contextsTimeExecution" , "headTimeExecution", "NCSTimeExecution"
+                , "NCS_UselessnessTimeExecution", "NCS_IncompetendHeadTimeExecution", "NCS_ConcurrenceAndConflictTimeExecution", "NCS_Create_New_ContextTimeExecution", "NCS_OvermappingTimeExecution", "NCS_ChildContextTimeExecution", "NCS_PotentialRequestTimeExecution", "NCS_DreamPotentialRequestTimeExecution");
+
+        List<String> dataStringsOther = Arrays.asList("localMinima","nbAgents","generalizationScore","neighborsCounts",
+                "minPerceptionsExperiencedAverage","maxPerceptionsExperiencedAverage","minPerceptionsExperiencedDeviation","maxPerceptionsExperiencedDeviation",
+                "rangeExperienceAverage", "rangeExperienceDeviation","creationsWithNeighbor");
+
+
+        ArrayList<List<String>> dataStrings = new ArrayList<>(Arrays.asList(dataStringsVolumes, dataStringsEndoRequests, dataStringsTimeExecution, dataStringsOther, dataStringsPrediction ));
+
+        for(List<String> dataString : dataStrings){
+            for (String dataName : dataString){
+                data.put(dataName, new ArrayList<>());
+            }
+        }
+
+
+        return new Pair<>(dataStrings,data);
+    }
+
+
+    public static void writeData(CSVWriter xpCSV,HashMap<String, ArrayList<Double>> data, ArrayList<List<String>> dataStrings, double total, double mean) {
+        writeParams(xpCSV);
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("meanTime", ""+mean/1000000)));
+        xpCSV.write(new ArrayList<>(Arrays.asList("totalTime",""+total/1000000 )));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        for(List<String> dataString : dataStrings){
+
+            xpCSV.write(new ArrayList<>(Arrays.asList("#")));
+            xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+            for (String dataName : dataString){
+
+                OptionalDouble averageScore = data.get(dataName).stream().mapToDouble(a->a).average();
+                Double deviationScore = data.get(dataName).stream().mapToDouble(a->Math.pow((a-averageScore.getAsDouble()),2)).sum();
+
+                OptionalDouble minScore = data.get(dataName).stream().mapToDouble(a->a).min();
+                OptionalDouble maxScore = data.get(dataName).stream().mapToDouble(a->a).max();
+
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Average",averageScore.getAsDouble()+"")));
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Min" ,"" + minScore.getAsDouble())));
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Max" ,"" + maxScore.getAsDouble())));
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Deviation" ,"" + Math.sqrt(deviationScore/data.get(dataName).size()))));
+                xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+            }
+
+            xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+        }
+
+        /*for (String dataName : dataStringsVolumes){
+            OptionalDouble averageScore = data.get(dataName).stream().mapToDouble(a->a).average();
+            Double deviationScore = data.get(dataName).stream().mapToDouble(a->Math.pow((a-averageScore.getAsDouble()),2)).sum();
+            //.println(dataName +" [AVERAGE] " + averageScore.getAsDouble()*100 + " - " + "[DEVIATION] " +100*Math.sqrt(deviationScore/data.get(dataName).size()));
+            xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"Average",averageScore.getAsDouble()*100+"")));
+            xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"Deviation" ,"" + 100*Math.sqrt(deviationScore/data.get(dataName).size()))));
+
+
+
+        }*/
+
+
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        //Create the formatter for round the values of scores
+        Locale currentLocale = Locale.getDefault();
+        DecimalFormatSymbols otherSymbols = new DecimalFormatSymbols(currentLocale);
+        otherSymbols.setDecimalSeparator('.');
+        DecimalFormat df = new DecimalFormat("##.##", otherSymbols);
+        //System.out.println("ROUNDED");
+        xpCSV.write(new ArrayList<>(Arrays.asList("ROUNDED")));
+
+        for(List<String> dataString : dataStrings){
+
+            xpCSV.write(new ArrayList<>(Arrays.asList("#")));
+            xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+
+            for (String dataName : dataString){
+
+                OptionalDouble averageScore = data.get(dataName).stream().mapToDouble(a->a).average();
+                Double deviationScore = data.get(dataName).stream().mapToDouble(a->Math.pow((a-averageScore.getAsDouble()),2)).sum();
+                OptionalDouble minScore = data.get(dataName).stream().mapToDouble(a->a).min();
+                OptionalDouble maxScore = data.get(dataName).stream().mapToDouble(a->a).max();
+
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Average_Rounded",df.format(averageScore.getAsDouble())+"")));
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Min" ,"" + df.format(minScore.getAsDouble()))));
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Max" ,"" + df.format(maxScore.getAsDouble()))));
+                xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"_Deviation_Rounded" , df.format(Math.sqrt(deviationScore/data.get(dataName).size())))));
+                xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+            }
+        }
+
+
+        /*for (String dataName : dataStringsVolumes){
+            OptionalDouble averageScore = data.get(dataName).stream().mapToDouble(a->a).average();
+            Double deviationScore = data.get(dataName).stream().mapToDouble(a->Math.pow((a-averageScore.getAsDouble()),2)).sum();
+            //System.out.println(dataName +" [AVERAGE] " + df.format(averageScore.getAsDouble()*100) + " - " + "[DEVIATION] " +df.format(100*Math.sqrt(deviationScore/data.get(dataName).size())));
+            xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"Average",df.format(averageScore.getAsDouble()*100)+"")));
+            xpCSV.write(new ArrayList<>(Arrays.asList(dataName+"Deviation" , df.format(100*Math.sqrt(deviationScore/data.get(dataName).size())))));
+
+
+        }*/
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+
+
+        xpCSV.close();
+    }
+
+    private static void writeParams(CSVWriter xpCSV) {
+        xpCSV.write(new ArrayList<>(Arrays.asList("PARAMS")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("SET")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("dimension", PARAMS.dimension+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("model", PARAMS.model)));
+        xpCSV.write(new ArrayList<>(Arrays.asList("learningCycles", PARAMS.nbLearningCycle +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("exploitatingCycles", PARAMS.nbExploitationCycle +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("episodes", PARAMS.nbEpisodes +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("activeExploitationCycles", PARAMS.nbEndoExploitationCycle +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("transferCycles", PARAMS.transferCyclesRatio +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("spaceSize", PARAMS.spaceSize*4+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("validityRangesPrecision", PARAMS.validityRangesPrecision +"")));
+
+
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("LEARNING")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isActiveLearning", PARAMS.setActiveLearning+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isSelfLearning", PARAMS.setSelfLearning+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("LEARNING_WEIGHT_ACCURACY", PARAMS.LEARNING_WEIGHT_ACCURACY+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("LEARNING_WEIGHT_PROXIMITY", PARAMS.LEARNING_WEIGHT_PROXIMITY+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("LEARNING_WEIGHT_EXPERIENCE", PARAMS.LEARNING_WEIGHT_EXPERIENCE+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("LEARNING_WEIGHT_GENERALIZATION", PARAMS.LEARNING_WEIGHT_GENERALIZATION+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("EXPLOITATION_WEIGHT_PROXIMITY", PARAMS.EXPLOITATION_WEIGHT_PROXIMITY+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("EXPLOITATION_WEIGHT_EXPERIENCE", PARAMS.EXPLOITATION_WEIGHT_EXPERIENCE+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("EXPLOITATION_WEIGHT_GENERALIZATION", PARAMS.EXPLOITATION_WEIGHT_GENERALIZATION+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("goalXYError")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("errorMargin", PARAMS.modelErrorMargin +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("REGRESSION")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("noise", PARAMS.noiseRange +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("exogenousLearningWeight", PARAMS.exogenousLearningWeight +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("endogenousLearningWeight", PARAMS.endogenousLearningWeight +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("perceptionsGenerationCoefficient", PARAMS.perceptionsGenerationCoefficient+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("regressionPoints", PARAMS.regressionPoints+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("modelSimilarityThreshold", PARAMS.modelSimilarityThreshold+"")));
+
+
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("EXPLORATION")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isRandomExploration", PARAMS.randomExploration+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isContinuousExploration", PARAMS.continousExploration+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isLimitedToSpaceZone", PARAMS.limitedToSpaceZone+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("explorationIncrement", PARAMS.explorationIncrement+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("explorationWidth", PARAMS.explorationWidht+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("bootstrapCycle", PARAMS.setbootstrapCycle+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("RANGES")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("neighborhoodRadiusCoefficient", PARAMS.neighborhoodRadiusCoefficient+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("influenceRadiusCoefficient", PARAMS.influenceRadiusCoefficient+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("maxRangeRadiusCoefficient", PARAMS.maxRangeRadiusCoefficient+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("rangeSimilarityCoefficient", PARAMS.rangeSimilarityCoefficient+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("minimumRangeCoefficient", PARAMS.minimumRangeCoefficient+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("NCS")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isModelNCS", PARAMS.setModelAmbiguityDetection +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isConflictNCS", PARAMS.setConflictDetection+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isConcurenceNCS", PARAMS.setConcurrenceDetection+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isIncompetenceNCS", PARAMS.setIncompetenceDetection +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isSubVoidDetection", PARAMS.setSubIncompetencedDetection +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isFusionResolution", PARAMS.setCompleteRedundancyDetection +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isRetructureResolution", PARAMS.setPartialRedundancyDetection +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isAmbiguityNCS", PARAMS.setRangeAmbiguityDetection +"")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isLearnFromNeighbors", PARAMS.setCooperativeNeighborhoodLearning +"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isDream", PARAMS.setDream+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isActiveExploitation", PARAMS.setActiveExploitation+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("NCS PARAMS")));
+
+        xpCSV.write(new ArrayList<>(Arrays.asList("dreamLaunch", PARAMS.setDreamCycleLaunch+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("nbOfNeighborForLearningFromNeighbors", PARAMS.nbOfNeighborForLearningFromNeighbors+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("nbOfNeighborForContexCreationWithouOracle", PARAMS.nbOfNeighborForContexCreationWithouOracle+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("nbOfNeighborForVoidDetectionInSelfLearning", PARAMS.nbOfNeighborForVoidDetectionInSelfLearning+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isCreationFromNeighbor", PARAMS.setisCreationWithNeighbor+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isAllContextSearchAllowedForLearning", PARAMS.isAllContextSearchAllowedForLearning+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("isAllContextSearchAllowedForExploitation", PARAMS.isAllContextSearchAllowedForExploitation+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList("probabilityOfRangeAmbiguity", PARAMS.probabilityOfRangeAmbiguity+"")));
+        xpCSV.write(new ArrayList<>(Arrays.asList(" ")));
+    }
+
+    public static void setData(HashMap<String, ArrayList<Double>> data, ELLSA ellsa, HashMap<String, Double> mappingScores, HashMap<REQUEST, Integer> requestCounts, HashMap<SITUATION, Integer> situationCounts, double[] executionTimes, double predictionError, double predictionDispersion, double averageLearningCycleTimeDouble, double learningcycleTimeDispersionDouble, double averageExploitationCycleTimeDouble, double exploitationcycleTimeDispersionDouble) {
+        // Volumes
+        data.get("mappingScore").add(mappingScores.get("CTXT"));
+        data.get("imprecisionScore").add(mappingScores.get("CONF") + mappingScores.get("CONC") + mappingScores.get("VOIDS"));
+        data.get("conflictVol").add(mappingScores.get("CONF"));
+        data.get("concurrenceVol").add(mappingScores.get("CONC"));
+        data.get("voidVol").add(mappingScores.get("VOIDS"));
+
+        // Predictions
+        data.get("predictionError").add(predictionError);
+        data.get("predictionErrorDeviation").add(predictionDispersion);
+
+
+
+
+        // Situations
+        data.get("rdmLearning").add((double)situationCounts.get(SITUATION.RDM_LEARNING));
+        data.get("rdmExploitation").add((double)situationCounts.get(SITUATION.RDM_EXPLOITATION));
+
+        data.get("activeLearning").add((double)situationCounts.get(SITUATION.ACTIVE_LEARNING));
+        data.get("activeExploitation").add((double)situationCounts.get(SITUATION.ACTIVE_EXPLOITATION));
+
+        data.get("exogenousExploitation").add((double)situationCounts.get(SITUATION.EXOGENOUS_EXPLOITATION));
+        data.get("endogenousExploitation").add((double)situationCounts.get(SITUATION.ENDOGENOUS_EXPLOITATION));
+
+        data.get("exogenousLearning").add((double)situationCounts.get(SITUATION.EXOGENOUS_LEARNING));
+        data.get("endogenousLearning").add((double)situationCounts.get(SITUATION.ENDOGENOUS_LEARNING));
+
+        // Endo Requests
+        data.get("conflictRequests").add((double)requestCounts.get(REQUEST.CONFLICT));
+        data.get("concurrenceRequests").add((double)requestCounts.get(REQUEST.CONCURRENCE));
+        data.get("frontierRequests").add((double)requestCounts.get(REQUEST.FRONTIER));
+        data.get("voidRequests").add((double)requestCounts.get(REQUEST.VOID));
+        data.get("subvoidRequests").add((double)requestCounts.get(REQUEST.SUBVOID));
+        data.get("modelRequests").add((double)requestCounts.get(REQUEST.MODEL));
+        data.get("dreamRequests").add((double)requestCounts.get(REQUEST.DREAM));
+        data.get("neighborsRequest").add((double)requestCounts.get(REQUEST.NEIGHBOR));
+        data.get("fusionRequests").add((double)requestCounts.get(REQUEST.FUSION));
+        data.get("restructureRequests").add((double)requestCounts.get(REQUEST.RESTRUCTURE));
+
+        data.get("NCS_BAD_PREDICTION").add((double)requestCounts.get(REQUEST.NCS_BAD_PREDICTION));
+        data.get("NCS_USELESSNESS").add((double)requestCounts.get(REQUEST.NCS_USELESSNESS));
+        data.get("NCS_CONFLICT").add((double)requestCounts.get(REQUEST.NCS_CONFLICT));
+        data.get("NCS_CONCURRENCY").add((double)requestCounts.get(REQUEST.NCS_CONCURRENCY));
+        data.get("NCS_UNPRODUCTIVITY").add((double)requestCounts.get(REQUEST.NCS_UNPRODUCTIVITY));
+        data.get("NCS_EXPANSION").add((double)requestCounts.get(REQUEST.NCS_EXPANSION));
+        data.get("NCS_CREATION").add((double)requestCounts.get(REQUEST.NCS_CREATION));
+
+
+
+
+
+        // Executions times
+        data.get("learningCycleExecutionTime").add(averageLearningCycleTimeDouble);
+        data.get("exploitationCycleExecutionTime").add(averageExploitationCycleTimeDouble);
+        data.get("learningCycleExecutionTimeDeviation").add(learningcycleTimeDispersionDouble);
+        data.get("exploitationCycleExecutionTimeDeviation").add(exploitationcycleTimeDispersionDouble);
+
+        data.get("perceptsTimeExecution").add(executionTimes[1]);
+        data.get("contextsTimeExecution").add(executionTimes[2]);
+        data.get("headTimeExecution").add(executionTimes[3]);
+
+        data.get("NCSTimeExecution").add(executionTimes[8]);
+        data.get("NCS_UselessnessTimeExecution").add(executionTimes[9]);
+        data.get("NCS_IncompetendHeadTimeExecution").add(executionTimes[10]);
+        data.get("NCS_ConcurrenceAndConflictTimeExecution").add(executionTimes[11]);
+        data.get("NCS_Create_New_ContextTimeExecution").add(executionTimes[12]);
+        data.get("NCS_OvermappingTimeExecution").add(executionTimes[13]);
+        data.get("NCS_ChildContextTimeExecution").add(executionTimes[14]);
+        data.get("NCS_PotentialRequestTimeExecution").add(executionTimes[15]);
+        data.get("NCS_DreamPotentialRequestTimeExecution").add(executionTimes[16]);
+
+        // Other
+        data.get("nbAgents").add((double) ellsa.getContexts().size());
+        data.get("generalizationScore").add((double) (mappingScores.get("CTXT") / ellsa.getContexts().size()));
+        data.get("localMinima").add((double) ellsa.data.countLocalMinina);
+        data.get("neighborsCounts").add((double)ellsa.data.neighborsCounts/ellsa.getCycle());
+        data.get("creationsWithNeighbor").add((double)ellsa.data.requestCounts.get(REQUEST.CREATION_WITH_NEIGHBOR));
+
+
+        ArrayList<Double> allPerceptMin = new ArrayList<>();
+        ArrayList<Double> allPerceptMax = new ArrayList<>();
+        ArrayList<Double> allPerceptRangeExp = new ArrayList<>();
+        for(Percept pct : ellsa.getPercepts()){
+            double min = ellsa.data.minMaxPerceptsStatesAfterBoostrap.get(pct).getA();
+            double max = ellsa.data.minMaxPerceptsStatesAfterBoostrap.get(pct).getB();
+            allPerceptMin.add(new Double(min));
+            allPerceptMax.add(new Double(max));
+            allPerceptRangeExp.add(new Double(Math.abs(max-min)));
+        }
+
+        OptionalDouble averageMin = allPerceptMin.stream().mapToDouble(a->a).average();
+        Double minDispersion = allPerceptMin.stream().mapToDouble(a->Math.pow((a- averageMin.getAsDouble()),2)).sum();
+        double minAverageValue = averageMin.getAsDouble();
+        double minDispersionValue = Math.sqrt(minDispersion /allPerceptMin.size());
+
+        OptionalDouble averageMax = allPerceptMax.stream().mapToDouble(a->a).average();
+        Double MaxDispersion = allPerceptMax.stream().mapToDouble(a->Math.pow((a- averageMax.getAsDouble()),2)).sum();
+        double MaxAverageValue = averageMax.getAsDouble();
+        double MaxDispersionValue = Math.sqrt(MaxDispersion /allPerceptMax.size());
+
+        OptionalDouble averageRange = allPerceptRangeExp.stream().mapToDouble(a->a).average();
+        Double rangDispersion = allPerceptRangeExp.stream().mapToDouble(a->Math.pow((a- averageRange.getAsDouble()),2)).sum();
+        double rangeAverageValue = averageRange.getAsDouble();
+        double rangeDispersionValue = Math.sqrt(rangDispersion /allPerceptRangeExp.size());
+
+        data.get("minPerceptionsExperiencedAverage").add(minAverageValue);
+        data.get("maxPerceptionsExperiencedAverage").add(MaxAverageValue);
+        data.get("minPerceptionsExperiencedDeviation").add(minDispersionValue);
+        data.get("maxPerceptionsExperiencedDeviation").add(MaxDispersionValue);
+        data.get("rangeExperienceAverage").add(rangeAverageValue);
+        data.get("rangeExperienceDeviation").add(rangeDispersionValue);
+
+    }
+
+}