#!/usr/bin/env python3 import argparse import itertools import json import sys from functools import reduce from hashlib import sha1 def main(): parser = argparse.ArgumentParser() parser.add_argument("workload_params", type=str, help='path to the workload params JSON file') parser.add_argument("platform_file", type=str, help='path to the SimGrid platform file') parser.add_argument("-o", "--output_file", type=str, help="if set, write output to this file instead of stdout") args = parser.parse_args() max_observed_total_power = 955080 max_power_per_node = 2100.0 min_power_per_node = 240.0 nb_nodes = 980 max_dynamic_power = max_observed_total_power - min_power_per_node * nb_nodes powercaps = [{ 'powercap_dynamic_value_ratio': f'{i * 0.01:.2f}', 'powercap_dynamic_watts': int(i * 0.01 * max_dynamic_power), 'normal_dynamic_watts': max_dynamic_power, 'idle_watts': min_power_per_node, } for i in range(10,71,5)] powercap_durations = [ {'powercap_end_time_seconds': 60*60*3}, ] algo_name = 'easypower' predictors = [ {'algo_name': algo_name, 'predictor_name': 'zero', 'job_power_estimation_field': 'zero_power_estimation'}, {'algo_name': algo_name, 'predictor_name': 'mean', 'job_power_estimation_field': 'mean_power_estimation'}, {'algo_name': algo_name, 'predictor_name': 'max', 'job_power_estimation_field': 'max_power_estimation'}, {'algo_name': algo_name, 'predictor_name': 'upper_bound', 'job_power_estimation_field': 'upper_bound_power_estimation'}, {'algo_name': algo_name, 'predictor_name': 'real_mean', 'job_power_estimation_field': 'real_mean_power_estimation'}, {'algo_name': algo_name, 'predictor_name': 'real_max', 'job_power_estimation_field': 'real_max_power_estimation'}, ] platforms = [ {'platform_filepath': args.platform_file}, ] with open(args.workload_params) as wf: wl_params = json.load(wf) nb_instances = 0 instances = dict() for instance_t in itertools.product(predictors, powercaps, powercap_durations, platforms, wl_params): instance = reduce(lambda a,b: {**a, **b}, instance_t) encoded_without_hash = json.dumps(instance, sort_keys=True).encode('utf-8') instance_hash = sha1(encoded_without_hash).hexdigest() instances[instance_hash] = instance nb_instances += 1 assert nb_instances == len(instances), 'collision: two instances have the same hash' f = sys.stdout if args.output_file is not None: f = open(args.output_file, 'w') print(json.dumps(instances, sort_keys=True, indent=2), file=f)