Skip to content
Snippets Groups Projects
Commit 1e9ec643 authored by Millian Poquet's avatar Millian Poquet
Browse files

script: compute max observed watts & instance gen

parent 4ba9b319
No related branches found
No related tags found
No related merge requests found
#!/usr/bin/env python3
import argparse
import os
import pandas as pd
def main():
parser = argparse.ArgumentParser()
parser.add_argument("input_power_timeseries_prefix", help="filepath prefix to the location of the parquet files that contain node power consumption time series")
parser.add_argument("month", nargs='+', help="the month to aggregate. example value: '22-07'")
args = parser.parse_args()
overall_max = -1
for month in args.month:
month_prefix = f'{args.input_power_timeseries_prefix}{month}'
power_file = f'{month_prefix}_power_total.parquet'
df = pd.read_parquet(power_file)
max_month_power = df.groupby('timestamp')['value'].sum().max()
print(f'max power reached during month {month}: {max_month_power} W')
overall_max = max(overall_max, max_month_power)
print(f'max power reached over all months: {overall_max} W')
#!/usr/bin/env python3
import argparse
import itertools
import json
import sys
from functools import reduce
from hashlib import sha1
def main():
parser = argparse.ArgumentParser()
parser.add_argument("workload_params", type=str, help='path to the workload params JSON file')
parser.add_argument("-o", "--output_file", type=str, help="if set, write output to this file instead of stdout")
args = parser.parse_args()
max_observed_total_power = 955080
max_power_per_node = 2100.0
min_power_per_node = 240.0
nb_nodes = 980
max_dynamic_power = max_observed_total_power - min_power_per_node * nb_nodes
powercaps = [{
'powercap_dynamic_value_ratio': f'{i * 0.1:.1f}',
'powercap_dynamic_watts': int(i * 0.1 * max_dynamic_power),
'normal_dynamic_watts': max_dynamic_power,
'idle_watts': nb_nodes * min_power_per_node,
} for i in range(2,11,2)]
powercap_durations = [
{'powercap_end_time_seconds': 60*60*3},
]
algo_name = 'easypower'
predictors = [
{'algo_name': algo_name, 'predictor_name': 'zero', 'job_power_estimation_field': 'zero_power_estimation'},
{'algo_name': algo_name, 'predictor_name': 'mean', 'job_power_estimation_field': 'mean_power_estimation'},
{'algo_name': algo_name, 'predictor_name': 'max', 'job_power_estimation_field': 'max_power_estimation'},
{'algo_name': algo_name, 'predictor_name': 'upper_bound', 'job_power_estimation_field': 'upper_bound_power_estimation'},
{'algo_name': algo_name, 'predictor_name': 'real_mean', 'job_power_estimation_field': 'real_mean_power_estimation'},
{'algo_name': algo_name, 'predictor_name': 'real_max', 'job_power_estimation_field': 'real_max_power_estimation'},
]
with open(args.workload_params) as wf:
wl_params = json.load(wf)
nb_instances = 0
instances = dict()
for instance_t in itertools.product(predictors, powercaps, powercap_durations, wl_params):
instance = reduce(lambda a,b: {**a, **b}, instance_t)
encoded_without_hash = json.dumps(instance, sort_keys=True).encode('utf-8')
instance_hash = sha1(encoded_without_hash).hexdigest()
instances[instance_hash] = instance
nb_instances += 1
assert nb_instances == len(instances), 'collision: two instances have the same hash'
f = sys.stdout
if args.output_file is not None:
f = open(args.output_file, 'w')
print(json.dumps(instances, sort_keys=True), file=f)
......@@ -19,3 +19,5 @@ m100-generate-batsim-workload = "expe_energumen.m100_generate_batsim_workload:ma
m100-compute-gantt-power-consumption = "expe_energumen.m100_compute_gantt_power_consumption:main"
m100-generate-expe-workload-params = "expe_energumen.m100_generate_expe_workload_params:main"
m100-generate-expe-workloads = "expe_energumen.m100_generate_expe_workloads:main"
m100-generate-expe-params = "expe_energumen.m100_generate_expe_params:main"
m100-find-max-power = "expe_energumen.m100_find_max_power:main"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment