Skip to content
Snippets Groups Projects
Commit a1077939 authored by root's avatar root
Browse files

rajout des classes psc.hpp/cpp (vide) et modification du main pour ajouter...

rajout des classes psc.hpp/cpp (vide) et modification du main pour ajouter l'algo psc, fait par Karim/Adem
parent 687b653f
Branches
No related tags found
No related merge requests found
Pipeline #8492 failed
{
"nb_res": 3,
"jobs": [
{
"id": "job_0",
"type": "carbon_co2",
"subtime": 150,
"timestamp": 150,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 65,
"dcB": 643,
"dcC": 380
}
},
{
"id": "job_1",
"type": "carbon_co2",
"subtime": 300,
"timestamp": 300,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 65,
"dcB": 641,
"dcC": 379
}
},
{
"id": "job_2",
"type": "carbon_co2",
"subtime": 450,
"timestamp": 450,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 73,
"dcB": 643,
"dcC": 377
}
},
{
"id": "job_3",
"type": "carbon_co2",
"subtime": 600,
"timestamp": 600,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 66,
"dcB": 643,
"dcC": 373
}
},
{
"id": "job_4",
"type": "carbon_co2",
"subtime": 750,
"timestamp": 750,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 70,
"dcB": 627,
"dcC": 380
}
},
{
"id": "job_5",
"type": "carbon_co2",
"subtime": 900,
"timestamp": 900,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 74,
"dcB": 630,
"dcC": 374
}
},
{
"id": "job_6",
"type": "carbon_co2",
"subtime": 1050,
"timestamp": 1050,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 73,
"dcB": 632,
"dcC": 364
}
},
{
"id": "job_7",
"type": "carbon_co2",
"subtime": 1200,
"timestamp": 1200,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 74,
"dcB": 633,
"dcC": 376
}
},
{
"id": "job_8",
"type": "carbon_co2",
"subtime": 1350,
"timestamp": 1350,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 59,
"dcB": 641,
"dcC": 377
}
},
{
"id": "job_9",
"type": "carbon_co2",
"subtime": 1500,
"timestamp": 1500,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 63,
"dcB": 640,
"dcC": 374
}
},
{
"id": "job_10",
"type": "carbon_co2",
"subtime": 1650,
"timestamp": 1650,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 65,
"dcB": 634,
"dcC": 374
}
},
{
"id": "job_11",
"type": "carbon_co2",
"subtime": 1800,
"timestamp": 1800,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 70,
"dcB": 633,
"dcC": 372
}
},
{
"id": "job_12",
"type": "carbon_co2",
"subtime": 1950,
"timestamp": 1950,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 68,
"dcB": 637,
"dcC": 364
}
},
{
"id": "job_13",
"type": "carbon_co2",
"subtime": 2100,
"timestamp": 2100,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 61,
"dcB": 632,
"dcC": 370
}
},
{
"id": "job_14",
"type": "carbon_co2",
"subtime": 2250,
"timestamp": 2250,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 67,
"dcB": 642,
"dcC": 380
}
},
{
"id": "job_15",
"type": "carbon_co2",
"subtime": 2400,
"timestamp": 2400,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 72,
"dcB": 627,
"dcC": 366
}
},
{
"id": "job_16",
"type": "carbon_co2",
"subtime": 2550,
"timestamp": 2550,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 68,
"dcB": 634,
"dcC": 369
}
},
{
"id": "job_17",
"type": "carbon_co2",
"subtime": 2700,
"timestamp": 2700,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 67,
"dcB": 641,
"dcC": 374
}
},
{
"id": "job_18",
"type": "carbon_co2",
"subtime": 2850,
"timestamp": 2850,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 74,
"dcB": 638,
"dcC": 370
}
},
{
"id": "job_19",
"type": "carbon_co2",
"subtime": 3000,
"timestamp": 3000,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": {
"dcA": 62,
"dcB": 638,
"dcC": 375
}
}
],
"profiles": {
"carbon_co2": {
"type": "delay",
"delay": 0.0001
}
}
}
\ No newline at end of file
{"type": "carbon_co2", "timestamp": 150, "some_field_dcA": 65, "another_field_dcB": 643, "additional_field_dcC": 380}
{"type": "carbon_co2", "timestamp": 300, "some_field_dcA": 65, "another_field_dcB": 641, "additional_field_dcC": 379}
{"type": "carbon_co2", "timestamp": 450, "some_field_dcA": 73, "another_field_dcB": 643, "additional_field_dcC": 377}
{"type": "carbon_co2", "timestamp": 600, "some_field_dcA": 66, "another_field_dcB": 643, "additional_field_dcC": 373}
{"type": "carbon_co2", "timestamp": 750, "some_field_dcA": 70, "another_field_dcB": 627, "additional_field_dcC": 380}
{"type": "carbon_co2", "timestamp": 900, "some_field_dcA": 74, "another_field_dcB": 630, "additional_field_dcC": 374}
{"type": "carbon_co2", "timestamp": 1050, "some_field_dcA": 73, "another_field_dcB": 632, "additional_field_dcC": 364}
{"type": "carbon_co2", "timestamp": 1200, "some_field_dcA": 74, "another_field_dcB": 633, "additional_field_dcC": 376}
{"type": "carbon_co2", "timestamp": 1350, "some_field_dcA": 59, "another_field_dcB": 641, "additional_field_dcC": 377}
{"type": "carbon_co2", "timestamp": 1500, "some_field_dcA": 63, "another_field_dcB": 640, "additional_field_dcC": 374}
{"type": "carbon_co2", "timestamp": 1650, "some_field_dcA": 65, "another_field_dcB": 634, "additional_field_dcC": 374}
{"type": "carbon_co2", "timestamp": 1800, "some_field_dcA": 70, "another_field_dcB": 633, "additional_field_dcC": 372}
{"type": "carbon_co2", "timestamp": 1950, "some_field_dcA": 68, "another_field_dcB": 637, "additional_field_dcC": 364}
{"type": "carbon_co2", "timestamp": 2100, "some_field_dcA": 61, "another_field_dcB": 632, "additional_field_dcC": 370}
{"type": "carbon_co2", "timestamp": 2250, "some_field_dcA": 67, "another_field_dcB": 642, "additional_field_dcC": 380}
{"type": "carbon_co2", "timestamp": 2400, "some_field_dcA": 72, "another_field_dcB": 627, "additional_field_dcC": 366}
{"type": "carbon_co2", "timestamp": 2550, "some_field_dcA": 68, "another_field_dcB": 634, "additional_field_dcC": 369}
{"type": "carbon_co2", "timestamp": 2700, "some_field_dcA": 67, "another_field_dcB": 641, "additional_field_dcC": 374}
{"type": "carbon_co2", "timestamp": 2850, "some_field_dcA": 74, "another_field_dcB": 638, "additional_field_dcC": 370}
{"type": "carbon_co2", "timestamp": 3000, "some_field_dcA": 62, "another_field_dcB": 638, "additional_field_dcC": 375}
import json import json
import random import random
#Author : Karim /Adem
data = { # Fonction pour générer une valeur aléatoire de carbone dans une plage spécifique
"nb_res": 3, def generate_random_carbon(base, margin=8):
"jobs": [ return random.randint(base - margin, base + margin)
{"id": 1, "subtime": 0, "walltime": 100, "res": 1, "profile": "France_2018"},
{"id": 2, "subtime": 100, "walltime": 200, "res": 1, "profile": "France_2019"},
{"id": 3, "subtime": 200, "walltime": 300, "res": 1, "profile": "France_2020"},
{"id": 4, "subtime": 300, "walltime": 400, "res": 1, "profile": "France_2021"},
{"id": 5, "subtime": 400, "walltime": 500, "res": 2, "profile": "Morocco_2018"},
{"id": 6, "subtime": 500, "walltime": 600, "res": 2, "profile": "Morocco_2019"},
{"id": 7, "subtime": 600, "walltime": 700, "res": 2, "profile": "Morocco_2020"},
{"id": 8, "subtime": 700, "walltime": 800, "res": 2, "profile": "Morocco_2021"},
{"id": 9, "subtime": 800, "walltime": 900, "res": 3, "profile": "USA_2018"},
{"id": 10, "subtime": 900, "walltime": 1000, "res": 3, "profile": "USA_2019"},
{"id": 11, "subtime": 1000, "walltime": 1100, "res": 3, "profile": "USA_2020"},
{"id": 12, "subtime": 1100, "walltime": 1200, "res": 3, "profile": "USA_2021"}
],
"profiles": {
} # Données de base pour chaque datacenter
} base_values = {"dcA": 67, "dcB": 635, "dcC": 372}
margin = 8 # Marge pour les variations aléatoires
# On génère des valeurs aléatoires avec une marge de 2 en partant sur de "vrai" données
def generate_random_carbon(base, margin=2):
return f"{random.randint(base - margin, base + margin)}kg"
# Profils avec les émissions de carbone spécifiques et la marge de ±2kg # Initialisation de la liste des jobs
specific_carbon_emissions = { jobs = []
"France_2018": 67, "France_2019": 69, "France_2020": 67, "France_2021": 65,
"Morocco_2018": 613, "Morocco_2019": 627, "Morocco_2020": 642, "Morocco_2021": 631,
"USA_2018": 412, "USA_2019": 393, "USA_2020": 369, "USA_2021": 379
}
# Générer les jobs carbon_co2
for i in range(20): # 20 itérations
subtime = timestamp = 150 * (i + 1) # Utilisation du même valeur pour subtime et timestamp
carbon_data = {
"dcA": generate_random_carbon(base_values["dcA"], margin),
"dcB": generate_random_carbon(base_values["dcB"], margin),
"dcC": generate_random_carbon(base_values["dcC"], margin)
}
job = {
"id": f"job_{i}",
"type": "carbon_co2",
"subtime": subtime,
"timestamp": timestamp,
"walltime": 60,
"res": 1,
"profile": "carbon_co2",
"carbon_data": carbon_data
}
jobs.append(job)
for profile, base_emission in specific_carbon_emissions.items(): # Structure globale du fichier
data["profiles"][profile] = { data = {
"type": "delay", "nb_res": 3,
"delay": 1, "jobs": jobs,
"carbon_emission": generate_random_carbon(base_emission) "profiles": {
"carbon_co2": {
"type": "delay",
"delay": 0.0001
}
} }
}
# nom du fichier en sortie # Définir le nom de fichier
filename = 'jsonInput.json' filename = 'datacenterBatsim.json'
# On écrit les données dans le fichier Json # Écriture des données JSON dans le fichier
with open(filename, 'w') as file: with open(filename, 'w') as file:
json.dump(data, file, indent=4) json.dump(data, file, indent=4)
print(filename) print(f"Le fichier JSON a été généré : {filename}")
batsim_version,consumed_joules,makespan,max_slowdown,max_turnaround_time,max_waiting_time,mean_slowdown,mean_turnaround_time,mean_waiting_time,nb_computing_machines,nb_grouped_switches,nb_jobs,nb_jobs_finished,nb_jobs_killed,nb_jobs_rejected,nb_jobs_success,nb_machine_switches,scheduling_time,simulation_time,success_rate,time_computing,time_idle,time_sleeping,time_switching_off,time_switching_on,time_unavailable
commit a30f93c840f13d3ffd17afcc67b67dc9e8385821 (built by Nix from master branch),0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1536,0,20,0,0,20,0,0,3.187254,3.223722,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000
This diff is collapsed.
import json
# Charger le fichier JSON source
with open('datacenterBatsim.json', 'r') as file:
data = json.load(file)
# Ouvrir le fichier de sortie .txt
with open('events.txt', 'w') as file:
# Parcourir chaque job dans le fichier JSON
for job in data["jobs"]:
# Créer un dictionnaire pour l'événement basé sur les données du job
event = {
"type": job["type"],
"timestamp": job["timestamp"],
"some_field_dcA": job["carbon_data"]["dcA"], # Données pour dcA
"another_field_dcB": job["carbon_data"]["dcB"], # Données pour dcB
"additional_field_dcC": job["carbon_data"]["dcC"] # Données pour dcC
}
# Convertir l'événement en chaîne JSON et l'écrire dans le fichier, une ligne par événement
event_str = json.dumps(event)
file.write(event_str + "\n")
print("Le fichier d'événements a été généré : events.txt")
#include <iostream>
#include "psc.hpp"
//Author : Karim/Adem
Psc::Psc(Workload *workload,
SchedulingDecision *decision,
Queue *queue,
ResourceSelector *selector,
double rjms_delay,
rapidjson::Document *variant_options) :
ISchedulingAlgorithm(workload, decision, queue, selector, rjms_delay, variant_options)
{
}
Psc::~Psc()
{
}
#pragma once
#include "../isalgorithm.hpp"
#include "../locality.hpp"
#include <intervalset.hpp>
//Author : Karim/Adem
class Workload;
class SchedulingDecision;
class Psc : public ISchedulingAlgorithm
{
public:
Psc(Workload * workload, SchedulingDecision * decision, Queue * queue, ResourceSelector * selector,
double rjms_delay, rapidjson::Document * variant_options);
virtual ~Psc();
};
...@@ -23,12 +23,14 @@ ...@@ -23,12 +23,14 @@
#include "algo/fcfs.hpp" #include "algo/fcfs.hpp"
#include "algo/rejecter.hpp" #include "algo/rejecter.hpp"
#include "algo/sequencer.hpp" #include "algo/sequencer.hpp"
#include "algo/psc.hpp" // Inclusion de l'algorithme Psc
using namespace std; using namespace std;
using namespace boost; using namespace boost;
namespace n = network; namespace n = network;
namespace r = rapidjson; namespace r = rapidjson;
//Modification : Karim/Adem
void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision &d, void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision &d,
Workload &workload, bool call_make_decisions_on_single_nop = true); Workload &workload, bool call_make_decisions_on_single_nop = true);
...@@ -54,12 +56,13 @@ void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision &d, ...@@ -54,12 +56,13 @@ void run(Network & n, ISchedulingAlgorithm * algo, SchedulingDecision &d,
int main(int argc, char ** argv) int main(int argc, char ** argv)
{ {
const set<string> variants_set = {"easy_bf", "fcfs", "rejecter", "sequencer" }; const set<string> variants_set = {"easy_bf", "fcfs", "rejecter", "sequencer", "psc" }; // Ajout de Psc
const set<string> policies_set = {"basic", "contiguous"}; const set<string> policies_set = {"basic", "contiguous"};
const set<string> queue_orders_set = {"fcfs", "lcfs", "desc_bounded_slowdown", "desc_slowdown", const set<string> queue_orders_set = {"fcfs", "lcfs", "desc_bounded_slowdown", "desc_slowdown",
"asc_size", "desc_size", "asc_walltime", "desc_walltime"}; "asc_size", "desc_size", "asc_walltime", "desc_walltime"};
const set<string> verbosity_levels_set = {"debug", "info", "quiet", "silent"}; const set<string> verbosity_levels_set = {"debug", "info", "quiet", "silent"};
const string variants_string = "{" + boost::algorithm::join(variants_set, ", ") + "}"; const string variants_string = "{" + boost::algorithm::join(variants_set, ", ") + "}";
const string policies_string = "{" + boost::algorithm::join(policies_set, ", ") + "}"; const string policies_string = "{" + boost::algorithm::join(policies_set, ", ") + "}";
const string queue_orders_string = "{" + boost::algorithm::join(queue_orders_set, ", ") + "}"; const string queue_orders_string = "{" + boost::algorithm::join(queue_orders_set, ", ") + "}";
...@@ -240,6 +243,8 @@ int main(int argc, char ** argv) ...@@ -240,6 +243,8 @@ int main(int argc, char ** argv)
algo = new Rejecter(&w, &decision, queue, selector, rjms_delay, &json_doc_variant_options); algo = new Rejecter(&w, &decision, queue, selector, rjms_delay, &json_doc_variant_options);
else if (scheduling_variant == "sequencer") else if (scheduling_variant == "sequencer")
algo = new Sequencer(&w, &decision, queue, selector, rjms_delay, &json_doc_variant_options); algo = new Sequencer(&w, &decision, queue, selector, rjms_delay, &json_doc_variant_options);
else if (scheduling_variant == "psc") // Ajout de psc
algo = new Psc(&w, &decision, queue, selector, rjms_delay, &json_doc_variant_options);
// Network // Network
Network n; Network n;
......
import json
import random
# Fonction pour générer une valeur aléatoire de carbone dans une plage spécifique
def generate_random_carbon(base, margin=8):
return random.randint(base - margin, base + margin)
# Données de base pour chaque pays
base_values = {"France": 67, "Morocco": 635, "USA": 372}
margin = 8 # Marge pour les variations aléatoires
# Initialisation de la liste des événements
events = []
# Générer les événements carbon_co2
for i in range(20): # 20 itérations
timestamp = 150 * (i + 1)
carbon_data = {
"dcA": generate_random_carbon(base_values["France"], margin),
"dcB": generate_random_carbon(base_values["Morocco"], margin),
"dcC": generate_random_carbon(base_values["USA"], margin)
}
event = {
"type": "carbon_co2",
"timestamp": timestamp,
"carbon_data": carbon_data
}
events.append(event)
# Définir le nom de fichier
filename = 'datacenterBatsim.json'
# Écriture des données JSON dans le fichier
with open(filename, 'w') as file:
json.dump(events, file, indent=4)
print(filename)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment