Skip to content
Snippets Groups Projects
Commit 4f517a40 authored by Maël Madon's avatar Maël Madon
Browse files

refac: simplified the repo only for campaign2. Rename and delete files

parent 0af586ce
Branches
Tags
1 merge request!1Extension europar2022
...@@ -2,9 +2,9 @@ ...@@ -2,9 +2,9 @@
This repository contains the scripts and files needed to reproduce the experiments presented in the paper "Characterization of different user behaviors for demand response in data centers" submitted to [Euro-Par conference 2022](https://2022.euro-par.org/). This repository contains the scripts and files needed to reproduce the experiments presented in the paper "Characterization of different user behaviors for demand response in data centers" submitted to [Euro-Par conference 2022](https://2022.euro-par.org/).
## Description of the main files ## Description of the main files
- `0_prepare_workload.ipynb`: Jupyter notebook downloading and preparing the workload trace used in the experiments - `prepare_workload.ipynb`: Jupyter notebook downloading and preparing the workload trace used in the experiments
- `campaign2.py`: Python script preparing and launching in parallel the 105 experiments. Each experiment corresponds to one instance of `instance2.py`. - `campaign.py`: Python script preparing and launching in parallel the 105 experiments. Each experiment corresponds to one instance of `instance.py`.
- `analyse_campaign2.ipynb`: Jupyter notebook analysing the results and plotting the graphs shown in the article - `analyse_campaign.ipynb`: Jupyter notebook analysing the results and plotting the graphs shown in the article
(campaign1 is another set of experiements not discussed in the article. We kept it here but beware that some path might be broken...) (campaign1 is another set of experiements not discussed in the article. We kept it here but beware that some path might be broken...)
......
File moved
This diff is collapsed.
File moved
#!/usr/bin/env python3
import random
from time import *
import concurrent.futures
from instance1 import start_instance
# Prepare the start date samples
begin_trace = 1356994806 # according to original SWF header
jun1_unix_time, nov30_unix_time = mktime(strptime(
'Sun Jun 1 00:00:00 2014')), mktime(strptime('Sun Nov 30 23:59:59 2014'))
jun1, nov30 = (int)(jun1_unix_time -
begin_trace), (int)(nov30_unix_time - begin_trace)
random.seed(1997)
nb_expe = 50
start_dates = [random.randint(jun1, nov30 - 72 * 3600)
for _ in range(nb_expe)]
#xp_a_relancer = [7, 26]
with concurrent.futures.ProcessPoolExecutor() as executor:
instances = []
for i in range(nb_expe):
# for i in xp_a_relancer:
print(f"Submit expe {i}")
# start_instance(expe_num, start_date, prepare_workload, clean_log)
instances.append(executor.submit(start_instance, i, start_dates[i], False, True))
for instance in concurrent.futures.as_completed(instances):
print(f"Expe {instance.result()} terminated")
{
kapack ? import ( fetchTarball "https://github.com/oar-team/nur-kapack/archive/master.tar.gz") {}
}:
with kapack.pkgs;
let self = rec {
my-simgrid = kapack.simgrid-light.overrideAttrs(old: rec {
version = "33035e7eeaee7a34efd75d675555eec32d34856a";
src = kapack.pkgs.fetchgit rec {
url = "https://framagit.org/simgrid/simgrid.git";
rev = version;
sha256 = "0a2w8qzw5sa7rccn6m0r6ifjcvzyadijfg8p3g2j6nhpav2rm2l7";
};
patches = [];
doCheck = false;
});
my-batsim = (kapack.batsim.override { simgrid = my-simgrid; })
.overrideAttrs(old: rec {
version = "ccd208711985af2b10d726a29bd9396e4da8d6a6";
src = kapack.pkgs.fetchgit rec {
url = "https://framagit.org/batsim/batsim.git";
rev = version;
sha256 = "0sf6i70qh8jfl2vhmrvm9bjrwda4cf15v5xbpnq5gfb57fq87n8v";
};
patches = [];
});
my_batsched = kapack.batsched.overrideAttrs (attr: rec {
name = "batmen";
version = "b63e95fad46962572b1f1d6db5bda979ca20dae0"; # commit "clean nix file and update README"
src = kapack.pkgs.fetchgit rec {
url = "https://gitlab.irit.fr/sepia-pub/mael/batmen.git";
rev = version;
sha256 = "1q2s1f68j4wd8vzzzirpjp7851l342hq6dvd9ky5g62zdps5mqlj";
};
});
exp_env = mkShell rec {
name = "exp_env";
buildInputs = [
# simulator
my-batsim
# scheduler implementations
my_batsched
# misc. tools to execute instances
kapack.batexpe
kapack.evalys
(python3.withPackages
(ps: with ps; with python3Packages; [jupyter ipython pandas numpy matplotlib
plotly pip tabulate pytz isodate ordered-set yattag])
)
curl
];
};
};
in
self
File moved
#!/usr/bin/env python3
import time
# import sys
import os
import subprocess
import argparse
import json
# sys.path.insert(0, '/scripts')
import scripts.swf_to_batsim_split_by_user as split_user
from scripts.util import *
#import swf_moulinette
def prepare_input_data(expe_num, start_date):
"""Cut the original trace to extract 72h starting from this start date"""
end_date = start_date + 72*3600
to_keep = f"submit_time >= {start_date} and submit_time <= {end_date}"
if not os.path.exists(f'{WL_DIR}/expe{expe_num}'):
os.makedirs(f'{WL_DIR}/expe{expe_num}')
split_user.generate_workload(
input_swf=f'{WL_DIR}/MC_selection_article.swf',
output_folder=f'{WL_DIR}/expe{expe_num}',
keep_only=to_keep,
job_grain=10,
job_walltime_factor=8)
def run_expe(expe_num, user_category, window_size, clean_log):
"""Run batmen with given behavior and demand response window.
Expe_num should be a small integer (eg < 100)"""
# Useful vars and output folder
if window_size==0.5:
w_size = '05'
else:
w_size = f"{window_size}"
EXPE_DIR = f"{ROOT_DIR}/out/expe{expe_num}/{user_category}_window{w_size}"
create_dir_rec_if_needed(EXPE_DIR)
create_dir_rec_if_needed(f"{EXPE_DIR}/cmd")
EXPE_FILE = f"{EXPE_DIR}/cmd/robinfile.yaml"
wl_folder = f'{WL_DIR}/expe{expe_num}'
pf = f"{ROOT_DIR}/platform/average_metacentrum.xml"
wl = f"{WL_DIR}/empty_workload.json"
uf = f"{EXPE_DIR}/cmd/user_description_file.json"
# Demand response window, from 12 to (12 + window_size) on day2
dm_window = [(24+12)*3600, (int) ((24+12+window_size)*3600)]
# User description file
def user_description(user):
return {
"name": user,
"category": user_category,
"param": {"input_json": f"{wl_folder}/{user}.json"}
}
user_names = [user_file.split('.')[0] for user_file in os.listdir(wl_folder)]
data = {}
data["dm_window"] = dm_window
data["log_user_stats"] = True
data["log_folder"] = EXPE_DIR
data["users"] = [user_description(user) for user in user_names]
with open(uf, 'w') as user_description_file:
json.dump(data, user_description_file)
# Generate and run robin instance
socket_batsim = f"tcp://localhost:280{expe_num:02d}"
socket_batsched = f"tcp://*:280{expe_num:02d}"
batcmd = gen_batsim_cmd(
pf, wl, EXPE_DIR, f"--socket-endpoint={socket_batsim} --energy --enable-compute-sharing --enable-dynamic-jobs --acknowledge-dynamic-jobs --enable-profile-reuse")
schedcmd = f"batsched --socket-endpoint={socket_batsched} -v bin_packing_energy --queue_order=desc_size --variant_options_filepath={uf}"
instance = RobinInstance(output_dir=EXPE_DIR,
batcmd=batcmd,
schedcmd=schedcmd,
simulation_timeout=604800, ready_timeout=10,
success_timeout=3600, failure_timeout=5
)
instance.to_file(EXPE_FILE)
print(f"Run robin {EXPE_FILE}")
ret = run_robin(EXPE_FILE)
print(f"Robin {EXPE_FILE} finished")
# Remove the log files that can quickly become heavy...
if clean_log:
os.remove(f"{EXPE_DIR}/log/batsim.log")
os.remove(f"{EXPE_DIR}/log/sched.err.log")
os.remove(f"{EXPE_DIR}/log/sched.out.log")
def start_instance(expe_num, start_date, prepare_workload=True, clean_log=False):
# Prepare workload
if prepare_workload:
prepare_input_data(expe_num, start_date)
# Create expe folder
create_dir_rec_if_needed(f"{ROOT_DIR}/out/expe{expe_num}")
# Run with Rigid behavior (the demand response window has no influence here)
run_expe(expe_num=expe_num,
user_category="replay_user_rigid",
window_size=1, clean_log=clean_log)
# 4*4 = 16 expe
for behavior in ["dm_user_reconfig","dm_user_degrad",
"dm_user_renonce","dm_user_delay"]:
for window_size in [0.5, 1, 2, 4]:
run_expe(expe_num, behavior, window_size, clean_log=clean_log)
###### Output data treatment ######
# Produce the utilisation viz?
return expe_num
def main():
parser = argparse.ArgumentParser(
description='One expe instance. To launch for example with `oarsub -l walltime=2 "./1_one_instance arg1 arg2 arg3"`')
parser.add_argument('expe_num', type=int, help='The expe ID')
parser.add_argument('start_date', type=int,
help='Start of the 3-day window (in seconds since the start of the original trace')
args = parser.parse_args()
start_instance(args.expe_num, args.start_date)
if __name__ == "__main__":
main()
File moved
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment