diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..d5fc5f8 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.tar.* filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore index ed7863a..3c0ad8b 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,43 @@ # Added by cargo /target +/data/ + + + +## Python +# Byte-compiled / optimized / DLL files +analysis/__pycache__/ +analysis/*.py[codz] +analysis/*$py.class + +# C extensions +*.so + +# Distribution / packaging +analysis/.Python +analysis/build/ +analysis/develop-eggs/ +analysis/dist/ +analysis/downloads/ +analysis/eggs/ +analysis/.eggs/ +analysis/lib/ +analysis/bin/ +analysis/lib64 +analysis/share +analysis/parts/ +analysis/sdist/ +analysis/var/ +analysis/wheels/ +analysis/share/python-wheels/ +analysis/*.egg-info/ +analysis/.installed.cfg +analysis/*.egg +analysis/MANIFEST + +.idea/ +jobs.yaml +results*.d/ +scripts*.d/ +inventories*.d/ diff --git a/analysis/after.pdf b/analysis/after.pdf new file mode 100644 index 0000000..6024c1b Binary files /dev/null and b/analysis/after.pdf differ diff --git a/analysis/baseline_consumption_clusters.pdf b/analysis/baseline_consumption_clusters.pdf new file mode 100644 index 0000000..70c0177 Binary files /dev/null and b/analysis/baseline_consumption_clusters.pdf differ diff --git a/analysis/baseline_consumption_clusters.png b/analysis/baseline_consumption_clusters.png new file mode 100644 index 0000000..19f472e Binary files /dev/null and b/analysis/baseline_consumption_clusters.png differ diff --git a/analysis/baseline_consumption_clusters_processors.pdf b/analysis/baseline_consumption_clusters_processors.pdf new file mode 100644 index 0000000..98f20da Binary files /dev/null and b/analysis/baseline_consumption_clusters_processors.pdf differ diff --git a/analysis/cv_per_tool_per_cluster.pdf b/analysis/cv_per_tool_per_cluster.pdf new file mode 100644 index 0000000..8427091 Binary files /dev/null and b/analysis/cv_per_tool_per_cluster.pdf differ diff --git a/analysis/cv_per_tool_per_cluster.png b/analysis/cv_per_tool_per_cluster.png new file mode 100644 index 0000000..a680489 Binary files /dev/null and b/analysis/cv_per_tool_per_cluster.png differ diff --git a/analysis/cv_per_tool_per_cluster_b.pdf b/analysis/cv_per_tool_per_cluster_b.pdf new file mode 100644 index 0000000..4cb1b14 Binary files /dev/null and b/analysis/cv_per_tool_per_cluster_b.pdf differ diff --git a/analysis/cv_per_tool_per_cluster_cores.pdf b/analysis/cv_per_tool_per_cluster_cores.pdf new file mode 100644 index 0000000..31fcbb8 Binary files /dev/null and b/analysis/cv_per_tool_per_cluster_cores.pdf differ diff --git a/analysis/cv_per_tool_per_cluster_pkg.pdf b/analysis/cv_per_tool_per_cluster_pkg.pdf new file mode 100644 index 0000000..83e23ba Binary files /dev/null and b/analysis/cv_per_tool_per_cluster_pkg.pdf differ diff --git a/analysis/cv_per_tool_per_cluster_ram.pdf b/analysis/cv_per_tool_per_cluster_ram.pdf new file mode 100644 index 0000000..b68ce77 Binary files /dev/null and b/analysis/cv_per_tool_per_cluster_ram.pdf differ diff --git a/analysis/cv_per_tool_per_domain.pdf b/analysis/cv_per_tool_per_domain.pdf new file mode 100644 index 0000000..6fa235f Binary files /dev/null and b/analysis/cv_per_tool_per_domain.pdf differ diff --git a/analysis/cv_per_tool_per_proc_.pdf b/analysis/cv_per_tool_per_proc_.pdf new file mode 100644 index 0000000..80ad457 Binary files /dev/null and b/analysis/cv_per_tool_per_proc_.pdf differ diff --git a/analysis/cv_per_tool_per_proc_cores.pdf b/analysis/cv_per_tool_per_proc_cores.pdf new file mode 100644 index 0000000..79b063c Binary files /dev/null and b/analysis/cv_per_tool_per_proc_cores.pdf differ diff --git a/analysis/cv_per_tool_per_proc_pkg.pdf b/analysis/cv_per_tool_per_proc_pkg.pdf new file mode 100644 index 0000000..9bbab95 Binary files /dev/null and b/analysis/cv_per_tool_per_proc_pkg.pdf differ diff --git a/analysis/cv_per_tool_per_proc_ram.pdf b/analysis/cv_per_tool_per_proc_ram.pdf new file mode 100644 index 0000000..c93e622 Binary files /dev/null and b/analysis/cv_per_tool_per_proc_ram.pdf differ diff --git a/analysis/data_analysis.py b/analysis/data_analysis.py index 5d658b7..d6ea538 100644 --- a/analysis/data_analysis.py +++ b/analysis/data_analysis.py @@ -1,16 +1,33 @@ # IMPORTS import os +import argparse import sys import polars as pl +import numpy as np +import gc + import schemas -import extract import load import rq1 import rq2 import rq3 import rq34 +import utils import visualization +import matplotlib.pyplot as plt +import seaborn as sns +from pprint import pprint +import re +import test_file_load +TOOLS = ["hwpc", "codecarbon", "alumet", "scaphandre", "vjoule"] +palette_for_tools = { + "hwpc": "#ef5552", + "codecarbon": "#c9fb36", + "alumet": "#00cdfe", + "scaphandre": "#fcaf3f", + "vjoule": "#9f2281", +} vendor_generation_map = { "E5-2620 v4": { @@ -18,269 +35,403 @@ "vendor": "Intel", "generation": 6, "launch_date": "Q1 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2630L v4": { "architecture": "Broadwell-E", "vendor": "Intel", "generation": 6, "launch_date": "Q1 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2698 v4": { "architecture": "Broadwell-E", "vendor": "Intel", "generation": 6, "launch_date": "Q1 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2630 v3": { "architecture": "Haswell-E", "vendor": "Intel", "generation": 5, "launch_date": "Q3 2014", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "Gold 5220": { "architecture": "Cascade Lake-SP", "vendor": "Intel", "generation": 10, "launch_date": "Q2 2019", + "numa_nodes_number": "1", + "numa_nodes_first_cpus": [0], }, "Gold 5218": { "architecture": "Cascade Lake-SP", "vendor": "Intel", "generation": 10, "launch_date": "Q2 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "i7-9750H": { "architecture": "Coffee Lake", "vendor": "Intel", "generation": 9, "launch_date": "Q2 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "Silver 4314": { "architecture": "Ice Lake-SP", "vendor": "Intel", "generation": 10, "launch_date": "Q2 2021", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "Gold 5320": { "architecture": "Ice Lake-SP", "vendor": "Intel", "generation": 10, "launch_date": "Q2 2021", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "Gold 6126": { "architecture": "Skylake-SP", "vendor": "Intel", "generation": 6, "launch_date": "Q3 2017", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "Gold 6130": { "architecture": "Skylake-SP", "vendor": "Intel", "generation": 6, "launch_date": "Q3 2017", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2620": { "architecture": "Sandy Bridge-EP", "vendor": "Intel", "generation": 3, "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2630": { "architecture": "Sandy Bridge-EP", "vendor": "Intel", "generation": 3, "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2630L": { "architecture": "Sandy Bridge-EP", "vendor": "Intel", "generation": 3, "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "E5-2660": { "architecture": "Sandy Bridge-EP", "vendor": "Intel", "generation": 3, "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "X5670": { + "architecture": "Westmere-EP", + "vendor": "Intel", + "generation": 1, + "launch_date": "Q1 2010", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "7301": { "architecture": "Zen", "vendor": "AMD", "generation": 1, "launch_date": "Q2 2017", + "numa_nodes_number": "8", + "numa_nodes_first_cpus": [0, 1, 2, 3, 4, 5, 6, 7], }, "7352": { "architecture": "Zen 2", "vendor": "AMD", "generation": 2, "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "7452": { "architecture": "Zen 2", "vendor": "AMD", "generation": 2, "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "7642": { "architecture": "Zen 2", "vendor": "AMD", "generation": 2, "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, "7742": { "architecture": "Zen 2", "vendor": "AMD", "generation": 2, "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "250": { + "architecture": "Opteron", + "vendor": "AMD", + "generation": 1, + "launch_date": "Q4 2004", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "99xx": { + "architecture": "ThunderX2", + "vendor": "Cavium", + "generation": 1, + "launch_date": "Q2 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], }, } -def main(): - - test = sys.argv[1] - if test == "test": - test = True - else: - test = False - - debian11_energy_stats_df = energy_for_os( - "debian11-5.10-0", - r"batches/debian11-5\.10-0\.d/results-debian11-5\.10-0\.d/([^/]+)/([^/]+)/([^/]+)/[^_]*_([^_]+).*", - test, - ) - ubuntu2404_energy_stats_df = energy_for_os( - "ubuntu2404nfs-6.8-0", - r"batches/ubuntu2404nfs-6\.8-0\.d/results-ubuntu2404nfs-6\.8-0\.d/([^/]+)/([^/]+)/([^/]+)/[^_]*_([^_]+).*", - test, +def main(batch_identifier=""): + print("Starting") + test_file_load.test_all_files( + results_dir="../data/ubuntu2404nfs-6.8-6.d/results-ubuntu2404nfs-6.8-6.d/rennes/parasilo/parasilo-24", + nb_core=32, + nb_ops=25_000, ) - powerapi_energy_stats_df = energy_for_os( - "powerapi", - r"batches/powerapi\.d/results-powerapi\.d/([^/]+)/([^/]+)/([^/]+)/[^_]*_([^_]+).*", - test, + inventories_directory = ( + f"../data/{batch_identifier}.d/inventories-{batch_identifier}.d" + ) + results_directory = f"../data/{batch_identifier}.d/results-{batch_identifier}.d" + ( + perf_frequency, + hwpc_frequency, + codecarbon_frequency, + alumet_frequency, + scaphandre_frequency, + vjoule_frequency, + ) = load.load_frequency( + batch_identifier=batch_identifier, results_directory=results_directory ) - - rq3.correlation_perf_perf_hwpc_hwpc_cv_os(ubuntu2404_energy_stats_df, debian11_energy_stats_df, "alone") - rq1.correlation_perf_hwpc_cv(debian11_energy_stats_df, "alone", "debian11 Kernel 5.10") - rq1.correlation_perf_hwpc_cv(debian11_energy_stats_df, "not_alone", "debian11 Kernel 5.10") - rq1.correlation_perf_hwpc_cv(ubuntu2404_energy_stats_df, "alone", "ubuntu2404 Kernel 6.8") - rq1.correlation_perf_hwpc_cv(ubuntu2404_energy_stats_df, "not_alone", "ubuntu2404 Kernel 6.8") - - rq2.boxplots_perf_hwpc_cv_processor(debian11_energy_stats_df, "processor_detail", "pkg_coefficient_of_variation", "job", "25 000 Operations") + print("Alumet frequency", alumet_frequency.describe()) + print("Alumet columns", alumet_frequency.columns) + print("Perf columns", perf_frequency.columns) + perf_and_alumet = perf_frequency.sql("SELECT * FROM self WHERE tool = 'alumet'") + perf_and_alumet=perf_and_alumet.join( + other=alumet_frequency, + left_on=["node", "g5k_cluster", "frequency", "iteration"], + right_on=["node", "g5k_cluster", "frequency", "iteration"], + how="left", + validate="1:1" + ) + print("Joined perf and alumet", perf_and_alumet.describe()) - concatenated_dfs = pl.concat([debian11_energy_stats_df, ubuntu2404_energy_stats_df]) - concatenated_dfs = concatenated_dfs.sql( - "SELECT * FROM self WHERE nb_ops_per_core > 25" + baseline_consumptions = load.load_baseline( + batch_identifier=batch_identifier, results_directory=results_directory ) - joined_df = ubuntu2404_energy_stats_df.join( - debian11_energy_stats_df, - on=["node", "nb_ops_per_core", "nb_core", "job"], - suffix="_debian", + baseline_consumptions = baseline_consumptions.sql( + """SELECT + g5k_cluster, + floor(average_temperature / 5.0)*5.0 as range_temperature_low, + floor(average_temperature / 5.0)*5.0 + 5.0 as range_temperature_high, + avg(pkg) as average_pkg, + avg(ram) as average_ram + FROM + self + GROUP BY + g5k_cluster, + floor(average_temperature / 5.0)*5.0, + ORDER BY + g5k_cluster, + """ + ) + node = "parasilo-24" + separator = "-" + cluster = node.split(separator)[0] + temperature = 53.4 + print( + f"Average consumptions of cluster containing {node} : ", + baseline_consumptions.sql( + f"""SELECT g5k_cluster, range_temperature_high, range_temperature_low, average_pkg, average_ram + FROM self + WHERE g5k_cluster = '{cluster}' + """ + ), ) + print( + f"Average consumptions of cluster containing {node} at 50°C : ", + baseline_consumptions.sql( + f""" + SELECT + g5k_cluster, + average_pkg, + average_ram + FROM self + WHERE + g5k_cluster = '{cluster}' + AND {temperature} between range_temperature_low and range_temperature_high """ + ), + ) + sns.scatterplot( + data=baseline_consumptions, x="average_temperature", y="pkg", hue="g5k_cluster" + ) + plt.show() - # Get rid of 25 OPS as it may be unrelevant - joined_df = joined_df.sql("SELECT * FROM self WHERE nb_ops_per_core > 25") + # TODO + # 1 Manque de référence, raisonnement et méthodologie + # TODO + ## a. Recherche de consensus et comparaison des approches + # TODO + ## b. Inventaire des outils et leurs caractéristiques + # TODO + #### Table 1.b.1 Tableau des outils, approche, langage, philosophie, stade, couverture matériel + # TODO + # 2 Quelle influence de l'environnement sur les outils étudiés ? + # TODO + ## a. Paramètres d'environnement + # TODO + ### i. Hardware + # TODO + ### ii. Distribution & kernel + # TODO + ### iii. Governor + # TODO + ### iv. Turbo-boost + # TODO + #### v. Pinning ? + # TODO + ## b. Critères évalués + # TODO + ### i. Coefficient de variation + # TODO + ### ii. Déployabilité - # RQ3/4 - rq34.os_comparison_boxplots_processor_versions_pkg_all( - [debian11_energy_stats_df, ubuntu2404_energy_stats_df] + # TODO + # 3 Quelle influence de la fréquence de mesure sur les outils étudiés ? + # TODO + ## a. Changes in source code + # TODO + ## b. Référence + # TODO + ## c. Fréquence atteinte + # TODO + ### i. Critères évalués + # TODO + ### ii. Fréquence atteinte + # DONE + #### Figure 3.a.ii.1 (lineplot f(target_frequency) = reached_frequency + optionnel : distrib interval ? + target_vs_reached_frequency( + hwpc_frequency, [1, 10, 100, 1000], {"tool": "hwpc", "unit": "milliseconds"} ) - rq34.os_comparison_boxplots_processor_versions_ram_all( - [debian11_energy_stats_df, ubuntu2404_energy_stats_df] + target_vs_reached_frequency( + codecarbon_frequency, + [1, 10, 100, 1000], + {"tool": "codecarbon", "unit": "seconds"}, ) - print("Heatmaps pkg perf alone") - rq34.os_comparison_heatmap_processor_versions_pkg_nb_ops(joined_df.sql("SELECT * FROM self WHERE job = 'perf_alone'"), "PERF") - print("Heatmaps pkg hwpc alone") - rq34.os_comparison_heatmap_processor_versions_pkg_nb_ops(joined_df.sql("SELECT * FROM self WHERE job = 'hwpc_alone'"), "HWPC") - print("Heatmaps ram perf alone") - rq34.os_comparison_heatmap_processor_versions_ram_nb_ops(joined_df.sql("SELECT * FROM self WHERE job = 'perf_alone'"), "PERF") - print("Heatmaps ram hwpc alone") - rq34.os_comparison_heatmap_processor_versions_ram_nb_ops(joined_df.sql("SELECT * FROM self WHERE job = 'hwpc_alone'"), "HWPC") - rq34.os_comparison_heatmap_processor_versions_pkg_percent_used(joined_df) - rq34.os_comparison_heatmap_processor_versions_ram_percent_used(joined_df) - - rq34.debian_facetgrid_processor_versions_pkg_cv_nb_ops(debian11_energy_stats_df.sql("SELECT * FROM self WHERE nb_ops_per_core > 25")) - rq34.debian_facetgrid_processor_versions_ram_cv_nb_ops(debian11_energy_stats_df.sql("SELECT * FROM self WHERE nb_ops_per_core > 25")) - rq34.ubuntu_facetgrid_processor_versions_pkg_cv_nb_ops(ubuntu2404_energy_stats_df.sql("SELECT * FROM self WHERE nb_ops_per_core > 25")) - rq34.ubuntu_facetgrid_processor_versions_ram_cv_nb_ops(ubuntu2404_energy_stats_df.sql("SELECT * FROM self WHERE nb_ops_per_core > 25")) + target_vs_reached_frequency( + alumet_frequency, [1, 10, 100, 1000], {"tool": "alumet", "unit": "seconds"} + ) + target_vs_reached_frequency( + scaphandre_frequency, + [1, 10, 100, 1000], + {"tool": "scaphandre", "unit": "seconds"}, + ) + target_vs_reached_frequency( + vjoule_frequency, [1, 10, 100, 1000], {"tool": "vjoule", "unit": "seconds"} + ) + # TODO + #### Figure 3.a.ii.2 Heatmap ratio |(perf - tool)/((perf+tool)/2)| + # TODO + ## d. Influence sur la consommation énergétique + # TODO + ### i. Protocole de mesure de baseline + # TODO + # Utilisation du travail -def energy_for_os(os_flavor, results_directory_match, test): - if test: - energy_stats_csv_file = ( - f"batches/{os_flavor}.d/{os_flavor}_energy_stats_sample.csv" - ) - else: - energy_stats_csv_file = f"batches/{os_flavor}.d/{os_flavor}_energy_stats.csv" - if os.path.exists(energy_stats_csv_file): - return pl.read_csv(energy_stats_csv_file) - results_directory: str = f"batches/{os_flavor}.d/results-{os_flavor}.d/" - inventories_directory: str = f"batches/{os_flavor}.d/inventories-{os_flavor}.d/" - (hwpc_files, perf_files) = extract.extract_csv_files(results_directory) + # TODO + # Discussions - nodes_df = extract.extract_json_files( - directory=inventories_directory, schema=schemas.nodes_configuration_columns - ) + # TODO + # Conclusion - nodes_df = nodes_df.with_columns( - [ - # (pl.col("processor_version").map_elements(lambda x: f"{x}\nGen: {vendor_generation_map[x]['architecture']}\nRelease: {vendor_generation_map[x]['launch_date']}", return_dtype=pl.String).alias("processor_detail")), - ( - pl.col("processor_version") - .map_elements( - lambda x: f"{x}\n{vendor_generation_map[x]['architecture']}", - return_dtype=pl.String, - ) - .alias("processor_detail") - ), - ( - pl.col("processor_version") - .map_elements( - lambda x: vendor_generation_map[x]["generation"], - return_dtype=pl.String, - ) - .alias("processor_generation") - ), - ( - pl.col("processor_version") - .map_elements( - lambda x: vendor_generation_map[x]["vendor"], return_dtype=pl.String - ) - .alias("processor_vendor") - ), - ] - ) - print("Nodes Configuration glimpse:\n", nodes_df.head()) +def target_vs_reached_frequency(frequency_df, frequencies, metadatada): + target_frequencies = [] + reached_frequencies = [] + for frequency in frequencies: + timestamps = frequency_df.sql( + f"SELECT timestamp FROM self WHERE frequency = {frequency} AND iteration = 1 AND node = 'parasilo-24'" + ).to_numpy() + shapes = timestamps.shape + timestamps = np.sort(timestamps.reshape(1, shapes[0])) + # t2 - t1 + intervals = timestamps[0, 1:] - timestamps[0, :-1] + print(f"Intervals {metadatada['tool']}: \n", intervals[:5]) + if metadatada["unit"] == "milliseconds": + instant_frequencies = 1_000 / intervals + elif metadatada["unit"] == "seconds": + instant_frequencies = 1 / intervals + print(f"Instant frequencies {metadatada['tool']}: \n", instant_frequencies[:5]) + reached_frequencies += instant_frequencies.tolist() + target_frequencies += [frequency] * len(instant_frequencies) + del timestamps + gc.collect() - # Data Exploration - (hwpc_results, perf_results) = load.load_results( - hwpc_files, perf_files, results_directory_match, test - ) print( - "HWPC Results glimpse:\n", - hwpc_results.head(), - "\nHWPC Results stats:\n", - hwpc_results.describe(), + "target_frequencies : ", + target_frequencies[:10], + target_frequencies[100:110], + target_frequencies[1000:1010], + target_frequencies[4000:4010], ) - print(hwpc_results.sql("select energy_pkg from self").describe()) print( - "Perf Results glimpse:\n", - perf_results.head(), - "\nPerf Results stats:\n", - perf_results.describe(), + "reached_frequencies : ", + reached_frequencies[:10], + reached_frequencies[100:110], + reached_frequencies[1000:1010], + reached_frequencies[4000:4010], ) + sns.lineplot(x=target_frequencies, y=reached_frequencies, errorbar="pi") + sns.lineplot(x=[1, 1_000], y=[1, 1_000], label=f"f(x)=x", linestyle="dashed") + plt.xscale("log") + plt.xlabel("Target frequency (Hz)") + plt.yscale("log") + plt.ylabel("Reached frequency (Hz)") - energy_stats_df = load.load_energy(hwpc_results, perf_results, nodes_df, os_flavor) - energy_stats_df.write_csv(energy_stats_csv_file, separator=",") - - return energy_stats_df + plt.title(f"Target vs Reached Frequencies for {metadatada['tool']}") + plt.show() if __name__ == "__main__": - main() + main(batch_identifier="ubuntu2404nfs-6.8-6") diff --git a/analysis/execution_time.py b/analysis/execution_time.py deleted file mode 100644 index e110f8d..0000000 --- a/analysis/execution_time.py +++ /dev/null @@ -1,71 +0,0 @@ -import os -import glob -import pandas as pd - -def compute_mean_std(directory, nb_ops): - # Define the file pattern to search for - pattern = os.path.join(directory, f"**/perf_*_{nb_ops}.csv") - files = glob.glob(pattern, recursive=True) - - if not files: - print(f"No files found for NB_OPS={nb_ops}") - return - - time_elapsed_values = [] - - # Loop through all matching files - for file in files: - try: - # Read the CSV file - df = pd.read_csv(file) - # Append the time_elapsed column to the list - time_elapsed_values.extend(df["time_elapsed"].dropna()) - except Exception as e: - print(f"Error reading file {file}: {e}") - - if not time_elapsed_values: - print(f"No valid time_elapsed values found in files for NB_OPS={nb_ops}") - return - - # Compute mean and standard deviation - mean_time = sum(time_elapsed_values) / len(time_elapsed_values) - std_dev_time = (sum((x - mean_time) ** 2 for x in time_elapsed_values) / len(time_elapsed_values)) ** 0.5 - - print(f"Results for NB_OPS={nb_ops}:") - print(f" Mean time_elapsed: {mean_time:.6f} seconds") - print(f" Standard deviation: {std_dev_time:.6f} seconds") - -# Example usage -# Replace "your_directory_path" with the actual path to the directory containing the files -print("For Ubuntu") -directory = "./batches/ubuntu2404nfs-6.8-0.d/results-ubuntu2404nfs-6.8-0.d/" -nb_ops = 25 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 250 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 2500 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 25000 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) - -print("For Debian") -directory = "./batches/debian11-5.10-0.d/results-debian11-5.10-0.d/" -nb_ops = 25 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 250 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 2500 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 25000 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) - -print("For Powerapi") -directory = "./results_powerapi2u" -nb_ops = 25 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 250 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 2500 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) -nb_ops = 25000 # Change this to 250, 2500, or 25000 as needed -compute_mean_std(directory, nb_ops) diff --git a/analysis/extract.py b/analysis/extract.py deleted file mode 100644 index 25abefd..0000000 --- a/analysis/extract.py +++ /dev/null @@ -1,147 +0,0 @@ -import os -import json -import csv -import re -from typing import Tuple, List -import polars as pl - - -# Extract CSV to Polars DataFrames -# Extract HWPC & PERF CSVs -def extract_csv_files(directory: str) -> Tuple[List[str], List[str]]: - hwpc_files = [] - perf_files = [] - for site in os.scandir(directory): - for cluster in os.scandir(site.path): - for node in os.scandir(cluster.path): - if node.is_dir(): - for filename in os.scandir(node.path): - - if filename.path.endswith(".csv"): - if filename.name.startswith("hwpc"): - hwpc_files.append(filename.path) - elif filename.name.startswith("perf"): - perf_files.append(filename.path) - return hwpc_files, perf_files - - -def read_hwpc_csv(file_path: str, results_directory_match: str): - (site, cluster, node, task) = re.match(results_directory_match, file_path).groups() - with_perf = False - if task == "and": - with_perf = True - rows = [] - with open(file_path, "r") as csv_file: - reader = csv.reader(csv_file) - next(reader) # Skip header - for row in reader: - parsed_row = ( - int(row[0]), - row[1], - row[2], - int(row[3]), - int(row[4]), - int(row[5]) if row[5] else None, - int(row[6]) if row[6] else None, - int(row[7]) if row[7] else None, - int(row[8]), - int(row[9]), - int(row[10]), - int(row[11]), - int(row[12]), - with_perf, - site, - cluster, - node, - ) - rows.append(parsed_row) - - return rows - - -def read_perf_csv(file_path: str, results_directory_match: str): - (site, clstr, node, task) = re.match(results_directory_match, file_path).groups() - with_hwpc = False - if task == "and": - with_hwpc = True - rows = [] - with open(file_path, "r") as csv_file: - reader = csv.DictReader(csv_file) - for row in reader: - try: - power_energy_pkg = float(row["power_energy_pkg"]) - except ValueError: - power_energy_pkg = 0.0 - try: - power_energy_ram = float(row["power_energy_ram"]) - except ValueError: - power_energy_ram = 0.0 - try: - power_energy_cores = (float(row["power_energy_cores"]),) - except ValueError: - power_energy_cores = 0.0 - parsed_row = ( - float(power_energy_pkg), - float(power_energy_ram), - float(power_energy_cores), - float(row["time_elapsed"]), - int(row["nb_core"]), - int(row["nb_ops_per_core"]), - int(row["iteration"]), - bool(with_hwpc), - site, - clstr, - node, - ) - rows.append(parsed_row) - return rows - - -# Extract JSON nodes information - - -def extract_json_files(directory: str, schema: str): - - nodes_df = pl.DataFrame(schema=schema, strict=True) - - for site in os.scandir(directory): - for cluster in os.scandir(site.path): - - for node in os.scandir(cluster.path): - if node.name.endswith(".json"): - with open(node.path, "r") as json_file: - data = json.load(json_file) - # Assuming proper parsing and casting here - node = ( - data["uid"], - data["cluster"], - bool(data["exotic"]), - int(data["architecture"]["nb_cores"]), - int(data["architecture"]["nb_threads"]), - data["processor"]["vendor"], - int(data["processor"]["clock_speed"]), - data["processor"]["instruction_set"], - bool(data["processor"]["ht_capable"]), - data["processor"]["microarchitecture"], - data["processor"]["microcode"], - data["processor"]["model"], - data["processor"]["version"], - data["operating_system"]["cstate_driver"], - data["operating_system"]["cstate_governor"], - data["operating_system"]["pstate_driver"], - data["operating_system"]["pstate_governor"], - bool(data["operating_system"]["turboboost_enabled"]), - ) - - nodes_df = pl.concat( - [ - nodes_df, - pl.from_records( - schema=schema, - data=[node], - strict=True, - orient="row", - ), - ] - ) - return nodes_df diff --git a/analysis/frequency_measurements_diff.pdf b/analysis/frequency_measurements_diff.pdf new file mode 100644 index 0000000..30ae060 Binary files /dev/null and b/analysis/frequency_measurements_diff.pdf differ diff --git a/analysis/frequency_measurements_diff.png b/analysis/frequency_measurements_diff.png new file mode 100644 index 0000000..7aa9fee Binary files /dev/null and b/analysis/frequency_measurements_diff.png differ diff --git a/analysis/frequency_measurements_diff_pkg.pdf b/analysis/frequency_measurements_diff_pkg.pdf new file mode 100644 index 0000000..d50553b Binary files /dev/null and b/analysis/frequency_measurements_diff_pkg.pdf differ diff --git a/analysis/frequency_measurements_diff_ram.pdf b/analysis/frequency_measurements_diff_ram.pdf new file mode 100644 index 0000000..23f9e6c Binary files /dev/null and b/analysis/frequency_measurements_diff_ram.pdf differ diff --git a/analysis/load.py b/analysis/load.py index 76449d7..30e566a 100644 --- a/analysis/load.py +++ b/analysis/load.py @@ -1,114 +1,565 @@ import schemas -import extract from typing import * import polars as pl from tqdm import tqdm from math import ldexp +import numpy as np +import os +import json +import csv +import re +from typing import Tuple, List +import utils +from datetime import datetime +import polars.selectors as cs +TOOLS = ["hwpc", "codecarbon", "alumet", "scaphandre", "vjoule"] -def load_hwpc_results(hwpc_df): - print(hwpc_df.sql("select rapl_energy_pkg from self").describe()) - hwpc_results = pl.sql( - """ - SELECT node, nb_core, nb_ops_per_core, iteration, alone, - SUM(rapl_energy_pkg) as energy_pkg_int, - SUM(rapl_energy_cores) as energy_cores_int, - SUM(rapl_energy_dram) as energy_ram_int - FROM hwpc_df - GROUP BY sensor, target, socket, cpu, node, nb_core, - nb_ops_per_core, iteration, alone + +# Extract CSV to Polars DataFrames +# Extract HWPC, Perf, Codecarbon, alumet, vjoule and scaphandre files + + +def extract_csv_files(directory: str) -> Tuple[List[str], List[str]]: + """ + TODO """ - ).collect() + hwpc_files = [] + perf_files = [] + codecarbon_files = [] + alumet_files = [] + vjoule_files = [] + scaphandre_files = [] + for site in os.scandir(directory): + for g5k_g5k_cluster in os.scandir(site.path): + for node in os.scandir(g5k_g5k_cluster.path): + if node.is_dir(): + for filename in os.scandir(node.path): + if filename.path.endswith(".csv"): + if filename.name.startswith("hwpc"): + hwpc_files.append(filename.path) + elif filename.name.startswith("perf"): + perf_files.append(filename.path) + elif filename.name.startswith("codecarbon"): + codecarbon_files.append(filename.path) + elif filename.name.startswith("alumet"): + alumet_files.append(filename.path) + elif filename.name.startswith("vjoule"): + vjoule_files.append(filename.path) + elif filename.name.startswith("scaphandre"): + scaphandre_files.append(filename.path) + return ( + hwpc_files, + perf_files, + codecarbon_files, + alumet_files, + scaphandre_files, + vjoule_files, + ) + + +# Parse HWPC files, PKG, Cores or RAM can be missing, if so, we put a 0.0 value +# Conversions are done later because measures as fixed point arithmetic (32.32) (needs to be ldexp(x, -32)ed) + + +def read_hwpc_csv(file_path: str, results_directory_match: str): + """ """ + # Paths follow the following format : + # ////__.csv + (site, g5k_g5k_cluster, node, task, nb_cores, nb_ops_per_core) = re.match( + results_directory_match, file_path + ).groups() + parsed_and_converted_rows = [] + with open(file_path, "r") as csv_file: + reader = csv.DictReader(csv_file) + for raw_row in reader: + parsed_row = ( + int(raw_row["timestamp"]), + raw_row["sensor"], + raw_row["target"], + int(raw_row["socket"]), + int(raw_row["cpu"]), + int(raw_row["rapl_energy_pkg"]) if raw_row["rapl_energy_pkg"] else 0, + int(raw_row["rapl_energy_dram"]) if raw_row["rapl_energy_dram"] else 0, + int(raw_row["rapl_energy_cores"]) + if raw_row["rapl_energy_cores"] + else 0, + int(raw_row["time_enabled"]), + int(raw_row["time_running"]), + int(raw_row["nb_core"]), + int(raw_row["nb_ops_per_core"]), + int(raw_row["iteration"]), + task, + site, + g5k_g5k_cluster, + node, + ) + parsed_and_converted_rows.append(parsed_row) + return parsed_and_converted_rows + + +def load_hwpc_results(hwpc_df): + """ """ + # HWPC, by default, produces reports with 1 row for each combination of (socket, cpu) + # Considering some system counters (such are RAPL PKG) are shared for all cpu of a given socket + # We do have to filter out redundant values to prevent counting something twice. + # We chose to keep only the first + hwpc_df = hwpc_df.filter(pl.col("cpu").is_in("numa_nodes_first_cpus")) + print( + "HWPC rows :", + hwpc_df.sql( + "SELECT node, cpu, numa_nodes_first_cpus, rapl_energy_pkg, rapl_energy_cores, rapl_energy_dram FROM self" + ).head(), + ) + hwpc_results = hwpc_df.sql(""" + SELECT + SUM(rapl_energy_cores) AS energy_cores_int, + SUM(rapl_energy_pkg) AS energy_pkg_int, + SUM(rapl_energy_dram) AS energy_ram_int, + nb_core, + nb_ops_per_core, + iteration, + task, + site, + g5k_g5k_cluster, + node, + exotic, + architecture_nb_cores, + architecture_nb_threads, + processor_vendor, + processor_clock_speed, + processor_instruction_set, + processor_ht_capable, + processor_microarchitecture, + processor_microcode, + processor_model, + processor_version, + os_cstate_driver, + os_cstate_governor, + os_pstate_driver, + os_pstate_governor, + os_turboboost_enabled, + processor_detail, + processor_generation, + FROM self + GROUP BY + nb_core, + nb_ops_per_core, + iteration, + task, + site, + g5k_g5k_cluster, + node, + exotic, + architecture_nb_cores, + architecture_nb_threads, + processor_vendor, + processor_clock_speed, + processor_instruction_set, + processor_ht_capable, + processor_microarchitecture, + processor_microcode, + processor_model, + processor_version, + os_cstate_driver, + os_cstate_governor, + os_pstate_driver, + os_pstate_governor, + os_turboboost_enabled, + processor_detail, + processor_generation, + """) hwpc_results = hwpc_results.with_columns( pl.col("energy_pkg_int") - .map_elements(lambda x: ldexp(x, -32) * 10e6, return_dtype=pl.Float64) + .map_elements(lambda x: ldexp(x, -32), return_dtype=pl.Float64) .alias("energy_pkg"), ) hwpc_results = hwpc_results.with_columns( pl.col("energy_cores_int") - .map_elements(lambda x: ldexp(x, -32) * 10e6, return_dtype=pl.Float64) + .map_elements(lambda x: ldexp(x, -32), return_dtype=pl.Float64) .alias("energy_cores"), ) hwpc_results = hwpc_results.with_columns( pl.col("energy_ram_int") - .map_elements(lambda x: ldexp(x, -32) * 10e6, return_dtype=pl.Float64) + .map_elements(lambda x: ldexp(x, -32), return_dtype=pl.Float64) .alias("energy_ram"), ) hwpc_results = hwpc_results.drop( ["energy_pkg_int", "energy_cores_int", "energy_ram_int"] ) + return hwpc_results.sql(""" + SELECT + energy_cores, + energy_pkg, + energy_ram, + nb_core, + nb_ops_per_core, + iteration, + task, + site, + g5k_g5k_cluster, + node, + exotic, + architecture_nb_cores, + architecture_nb_threads, + processor_vendor, + processor_clock_speed, + processor_instruction_set, + processor_ht_capable, + processor_microarchitecture, + processor_microcode, + processor_model, + processor_version, + os_cstate_driver, + os_cstate_governor, + os_pstate_driver, + os_pstate_governor, + os_turboboost_enabled, + processor_detail, + processor_generation, + FROM self + """) - task = pl.Series("task", ["hwpc" for i in range(hwpc_results.shape[0])]) - hwpc_results.insert_column(1, task) - return hwpc_results +# Parse Perf files, again PKG, Cores or RAM can be missing, +def read_perf_csv(file_path: str, results_directory_match: str): + # Paths follow the following format : + # ////__.csv + (site, g5k_g5k_cluster, node, task, nb_cores, nb_ops_per_core) = re.match( + results_directory_match, file_path + ).groups() + parsed_and_converted_rows = [] + with open(file_path, "r") as csv_file: + reader = csv.DictReader(csv_file) + for raw_row in reader: + parsed_row = ( + float(raw_row["power_energy_pkg"]) + if raw_row["power_energy_pkg"] + else 0.0, + float(raw_row["power_energy_ram"]) + if raw_row["power_energy_ram"] + else 0.0, + float(raw_row["power_energy_cores"]) + if raw_row["power_energy_cores"] + else 0.0, + int(raw_row["nb_core"]), + int(raw_row["nb_ops_per_core"]), + int(raw_row["iteration"]), + task, + site, + g5k_g5k_cluster, + node, + ) + print("\tparsed_row :", parsed_row) + parsed_and_converted_rows.append(parsed_row) + return parsed_and_converted_rows def load_perf_results(perf_df): - perf_results = pl.sql( - """ - SELECT node, nb_core, nb_ops_per_core, iteration, alone, - power_energy_pkg as energy_pkg, - power_energy_cores as energy_cores, - power_energy_ram as energy_ram FROM perf_df - """ - ).collect() - perf_results = perf_results.with_columns(pl.col("energy_pkg") * 10e6) - perf_results = perf_results.with_columns(pl.col("energy_cores") * 10e6) - perf_results = perf_results.with_columns(pl.col("energy_ram") * 10e6) - task = pl.Series("task", ["perf" for i in range(perf_results.shape[0])]) - perf_results.insert_column(1, task) + return perf_df.sql(""" + SELECT + energy_cores, + energy_pkg, + energy_ram, + nb_core, + nb_ops_per_core, + iteration, + task, + site, + g5k_g5k_cluster, + node, + exotic, + architecture_nb_cores, + architecture_nb_threads, + processor_vendor, + processor_clock_speed, + processor_instruction_set, + processor_ht_capable, + processor_microarchitecture, + processor_microcode, + processor_model, + processor_version, + os_cstate_driver, + os_cstate_governor, + os_pstate_driver, + os_pstate_governor, + os_turboboost_enabled, + processor_detail, + processor_generation, + FROM self + """) - return perf_results +def read_codecarbon_csv(file_path: str, results_directory_match: str): + # Paths follow the following format : + # ////__.csv + (site, g5k_g5k_cluster, node, task, nb_cores, nb_ops_per_core) = re.match( + results_directory_match, file_path + ).groups() + parsed_and_converted_rows = [] + with open(file_path, "r") as csv_file: + reader = csv.DictReader(csv_file) + for raw_row in reader: + parsed_row = ( + float(raw_row["energy_cores"]) * 3_600_000 + if raw_row["energy_cores"] + else 0.0, + float(raw_row["energy_pkg"]) * 3_600_000 + if raw_row["energy_pkg"] + else 0.0, + float(raw_row["energy_ram"]) * 3_600_000 + if raw_row["energy_ram"] + else 0.0, + int(raw_row["nb_core"]), + int(raw_row["nb_ops_per_core"]), + int(raw_row["iteration"]), + task, + site, + g5k_g5k_cluster, + node, + ) + parsed_and_converted_rows.append(parsed_row) + return parsed_and_converted_rows -def load_results(hwpc_files, perf_files, results_directory_match, test): - hwpc_df = pl.DataFrame(schema=schemas.hwpc_columns, strict=True) - perf_df = pl.DataFrame(schema=schemas.perf_columns, strict=True) +def read_alumet_csv(file_path: str, results_directory_match: str): + # Paths follow the following format : + # ////__.csv + (site, g5k_g5k_cluster, node, task, nb_cores, nb_ops_per_core) = re.match( + results_directory_match, file_path + ).groups() + parsed_and_converted_rows = [] + with open(file_path, "r") as csv_file: + reader = csv.DictReader(csv_file) + for raw_row in reader: + parsed_row = ( + float(raw_row["energy_cores"]) if raw_row["energy_cores"] else 0.0, + float(raw_row["energy_pkg"]) if raw_row["energy_pkg"] else 0.0, + float(raw_row["energy_ram"]) if raw_row["energy_ram"] else 0.0, + int(raw_row["nb_core"]), + int(raw_row["nb_ops_per_core"]), + int(raw_row["iteration"]), + task, + site, + g5k_g5k_cluster, + node, + ) + parsed_and_converted_rows.append(parsed_row) + return parsed_and_converted_rows + + +def read_scaphandre_csv(file_path: str, results_directory_match: str): + # Paths follow the following format : + # ////__.csv + (site, g5k_g5k_cluster, node, task, nb_cores, nb_ops_per_core) = re.match( + results_directory_match, file_path + ).groups() + parsed_and_converted_rows = [] + with open(file_path, "r") as csv_file: + reader = csv.DictReader(csv_file) + for raw_row in reader: + parsed_row = ( + float(raw_row["energy_cores"]) if raw_row["energy_cores"] else 0.0, + float(raw_row["energy_pkg"]) if raw_row["energy_pkg"] else 0.0, + float(raw_row["energy_ram"]) if raw_row["energy_ram"] else 0.0, + int(raw_row["nb_core"]), + int(raw_row["nb_ops_per_core"]), + int(raw_row["iteration"]), + task, + site, + g5k_g5k_cluster, + node, + ) + parsed_and_converted_rows.append(parsed_row) + return parsed_and_converted_rows + + +def read_vjoule_csv(file_path: str, results_directory_match: str): + # Paths follow the following format : + # ////__.csv + (site, g5k_g5k_cluster, node, task, nb_cores, nb_ops_per_core) = re.match( + results_directory_match, file_path + ).groups() + parsed_and_converted_rows = [] + with open(file_path, "r") as csv_file: + reader = csv.DictReader(csv_file) + for raw_row in reader: + parsed_row = ( + float(raw_row["energy_cores"]) if raw_row["energy_cores"] else 0.0, + float(raw_row["energy_pkg"]) if raw_row["energy_pkg"] else 0.0, + float(raw_row["energy_ram"]) if raw_row["energy_ram"] else 0.0, + int(raw_row["nb_core"]), + int(raw_row["nb_ops_per_core"]), + int(raw_row["iteration"]), + task, + site, + g5k_cluster, + node, + ) + parsed_and_converted_rows.append(parsed_row) + return parsed_and_converted_rows + - if test: - count = 0 - for hwpc_file, perf_file in tqdm(zip(hwpc_files, perf_files)): - if test: - count += 1 - if count == 100: - break +def load_results( + hwpc_files, + perf_files, + codecarbon_files, + alumet_files, + scaphandre_files, + vjoule_files, + results_directory_match, + nodes_df, +): + perf_df = pl.DataFrame(schema=schemas.raw_perf_columns, strict=True) + hwpc_df = pl.DataFrame(schema=schemas.hwpc_columns, strict=True) + codecarbon_df = pl.DataFrame(schema=schemas.raw_energy_columns, strict=True) + alumet_df = pl.DataFrame(schema=schemas.raw_energy_columns, strict=True) + scaphandre_df = pl.DataFrame(schema=schemas.raw_energy_columns, strict=True) + vjoule_df = pl.DataFrame(schema=schemas.raw_energy_columns, strict=True) + + for hwpc_file in hwpc_files: hwpc_df = pl.concat( [ hwpc_df, pl.from_records( schema=schemas.hwpc_columns, - data=extract.read_hwpc_csv(hwpc_file, results_directory_match), + data=read_hwpc_csv(hwpc_file, results_directory_match), strict=True, orient="row", ), ] ) + + hwpc_df = hwpc_df.join( + other=nodes_df, + left_on=["node", "g5k_cluster"], + right_on=["uid", "g5k_cluster"], + how="left", + validate="m:1", + ) + hwpc_df = load_hwpc_results(hwpc_df) + for perf_file in perf_files: perf_df = pl.concat( [ perf_df, pl.from_records( - schema=schemas.perf_columns, - data=extract.read_perf_csv(perf_file, results_directory_match), + schema=schemas.raw_perf_columns, + data=read_perf_csv(perf_file, results_directory_match), strict=True, orient="row", ), ] ) + print("perf", perf_df.head()) - hwpc_results = load_hwpc_results(hwpc_df) - perf_results = load_perf_results(perf_df) + for codecarbon_file in codecarbon_files: + codecarbon_df = pl.concat( + [ + codecarbon_df, + pl.from_records( + schema=schemas.raw_energy_columns, + data=read_codecarbon_csv(codecarbon_file, results_directory_match), + strict=True, + orient="row", + ), + ] + ) + for alumet_file in alumet_files: + alumet_df = pl.concat( + [ + alumet_df, + pl.from_records( + schema=schemas.raw_energy_columns, + data=read_alumet_csv(alumet_file, results_directory_match), + strict=True, + orient="row", + ), + ] + ) + for scaphandre_file in scaphandre_files: + scaphandre_df = pl.concat( + [ + scaphandre_df, + pl.from_records( + schema=schemas.raw_energy_columns, + data=read_scaphandre_csv(scaphandre_file, results_directory_match), + strict=True, + orient="row", + ), + ] + ) + for vjoule_file in vjoule_files: + vjoule_df = pl.concat( + [ + vjoule_df, + pl.from_records( + schema=schemas.raw_energy_columns, + data=read_vjoule_csv(vjoule_file, results_directory_match), + strict=True, + orient="row", + ), + ] + ) - return (hwpc_results, perf_results) + perf_df = perf_df.join( + other=nodes_df, + left_on=["node", "g5k_cluster"], + right_on=["uid", "g5k_cluster"], + how="left", + validate="m:1", + ) + perf_df = perf_df.drop(["numa_nodes_first_cpus"]) + perf_df = load_perf_results(perf_df) + print("Perf columns :", perf_df.columns) + codecarbon_df = codecarbon_df.join( + other=nodes_df, + left_on=["node", "g5k_cluster"], + right_on=["uid", "g5k_cluster"], + how="left", + validate="m:1", + ) + codecarbon_df = codecarbon_df.drop(["numa_nodes_first_cpus"]) + print("codecarbon columns :", codecarbon_df.columns) + alumet_df = alumet_df.join( + other=nodes_df, + left_on=["node", "g5k_cluster"], + right_on=["uid", "g5k_cluster"], + how="left", + validate="m:1", + ) + alumet_df = alumet_df.drop(["numa_nodes_first_cpus"]) + print("alumet columns :", alumet_df.columns) + scaphandre_df = scaphandre_df.join( + other=nodes_df, + left_on=["node", "g5k_cluster"], + right_on=["uid", "g5k_cluster"], + how="left", + validate="m:1", + ) + scaphandre_df = scaphandre_df.drop(["numa_nodes_first_cpus"]) + print("scaphandre columns :", scaphandre_df.columns) + vjoule_df = vjoule_df.join( + other=nodes_df, + left_on=["node", "g5k_cluster"], + right_on=["uid", "g5k_cluster"], + how="left", + validate="m:1", + ) + vjoule_df = vjoule_df.drop(["numa_nodes_first_cpus"]) + print("vjoule columns :", vjoule_df.columns) + # perf_df = load_perf_results(perf_df) + return (hwpc_df, perf_df, codecarbon_df, alumet_df, scaphandre_df, vjoule_df) -def load_energy(hwpc_results, perf_results, nodes_df, os): - energy_df = pl.concat([hwpc_results, perf_results]) + +def load_energy(hwpc_df, perf_df, codecarbon_df, alumet_df, scaphandre_df, vjoule_df): + energy_df = pl.concat( + [hwpc_df, perf_df, codecarbon_df, alumet_df, scaphandre_df, vjoule_df] + ) energy_df = pl.DataFrame(schema=schemas.energy_columns, data=energy_df) energy_stats_df = energy_df.sql( @@ -118,7 +569,6 @@ def load_energy(hwpc_results, perf_results, nodes_df, os): task, nb_core, nb_ops_per_core, - alone, avg(energy_pkg) as pkg_average, median(energy_pkg) as pkg_median, min(energy_pkg) as pkg_minimum, @@ -143,63 +593,723 @@ def load_energy(hwpc_results, perf_results, nodes_df, os): quantile_cont(energy_ram, 0.25) as ram_quantile_25, quantile_cont(energy_ram, 0.75) as ram_quantile_75, (stddev(energy_ram) / avg(energy_ram)) as ram_coefficient_of_variation, + exotic, + architecture_nb_cores, + architecture_nb_threads, + processor_vendor, + processor_clock_speed, + processor_instruction_set, + processor_ht_capable, + processor_microarchitecture, + processor_microcode, + processor_model, + processor_version, + os_cstate_driver, + os_cstate_governor, + os_pstate_driver, + os_pstate_governor, + os_turboboost_enabled, + processor_detail, + processor_generation, FROM self - GROUP BY node, task, nb_core, nb_ops_per_core, alone + GROUP BY + node, + task, + nb_core, + nb_ops_per_core, + exotic, + architecture_nb_cores, + architecture_nb_threads, + processor_vendor, + processor_clock_speed, + processor_instruction_set, + processor_ht_capable, + processor_microarchitecture, + processor_microcode, + processor_model, + processor_version, + os_cstate_driver, + os_cstate_governor, + os_pstate_driver, + os_pstate_governor, + os_turboboost_enabled, + processor_detail, + processor_generation, """ ) energy_stats_df = pl.DataFrame(energy_stats_df, schema=schemas.stats_columns) - energy_stats_df = energy_stats_df.join( - other=nodes_df, left_on="node", right_on="uid", how="left", validate="m:1" + + return energy_df, energy_stats_df + + +# Extract JSON nodes information +def extract_inventory_json_files(directory: str, schema: str): + nodes_df = pl.DataFrame(schema=schema, strict=True) + for site in os.scandir(directory): + for g5k_cluster in os.scandir(site.path): + for node in os.scandir(g5k_cluster.path): + if node.name.endswith(".json"): + with open(node.path, "r") as json_file: + data = json.load(json_file) + node = ( + data["uid"], + data["cluster"], + bool(data["exotic"]), + int(data["architecture"]["nb_cores"]), + int(data["architecture"]["nb_threads"]), + data["processor"]["vendor"], + int(data["processor"]["clock_speed"]), + data["processor"]["instruction_set"], + bool(data["processor"]["ht_capable"]), + data["processor"]["microarchitecture"], + data["processor"]["microcode"], + data["processor"]["model"], + data["processor"]["version"], + data["operating_system"]["cstate_driver"], + data["operating_system"]["cstate_governor"], + data["operating_system"]["pstate_driver"], + data["operating_system"]["pstate_governor"], + bool(data["operating_system"]["turboboost_enabled"]), + ) + nodes_df = pl.concat( + [ + nodes_df, + pl.from_records( + schema=schema, + data=[node], + strict=True, + orient="row", + ), + ] + ) + return nodes_df + + +def frequency_file_metadata(filename): + frequency, tool1, _and_, tool2 = filename.split("/")[-1].split("_")[1:5] + site, g5k_cluster, node = filename.split("/")[4:7] + return site, g5k_cluster, node, int(frequency), tool1, tool2.split(".")[0] + + +def load_frequency(batch_identifier="", results_directory=""): + print("Loading Frequency Results") + frequency_csv_file = f"../data/{batch_identifier}/frequency.csv" + if os.path.exists(frequency_csv_file): + print("Returning content from :", frequency_csv_file) + return pl.read_csv(frequency_csv_file) + # TODO + + perf_frequency_df = load_perf_frequency( + batch_identifier=batch_identifier, results_directory=results_directory + ) + hwpc_frequency_df = load_hwpc_frequency( + batch_identifier=batch_identifier, results_directory=results_directory + ) + codecarbon_frequency_df = load_codecarbon_frequency( + batch_identifier=batch_identifier, results_directory=results_directory + ) + alumet_frequency_df = load_alumet_frequency( + batch_identifier=batch_identifier, results_directory=results_directory + ) + scaphandre_frequency_df = load_scaphandre_frequency( + batch_identifier=batch_identifier, results_directory=results_directory ) - energy_stats_df = energy_stats_df.with_columns([ - (pl.col("nb_core") / pl.col("architecture_nb_cores")).alias("percent_cores_used"), - (pl.col("nb_core") / pl.col("architecture_nb_threads")).alias("percent_threads_used"), - - ]) - print("New columns :", energy_stats_df.sql("SELECT percent_cores_used, percent_threads_used FROM self").describe()) + vjoule_frequency_df = load_vjoule_frequency( + batch_identifier=batch_identifier, results_directory=results_directory + ) + + return ( + perf_frequency_df, + hwpc_frequency_df, + codecarbon_frequency_df, + alumet_frequency_df, + scaphandre_frequency_df, + vjoule_frequency_df, + ) + + +def load_perf_frequency(batch_identifier="", results_directory=""): + print("Loading Perf Frequency Results") + perf_frequency_csv_file = f"../data/{batch_identifier}.d/perf_frequency.csv" + if os.path.exists(perf_frequency_csv_file): + print("Returning content from :", perf_frequency_csv_file) + perf_df = pl.read_csv(perf_frequency_csv_file) + return perf_df + else: + regex = "frequency.*perf_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + perf_frequency_raw_files = utils.find_files( + root_dir=results_directory, regex=regex + ) + perf_dfs = [] + for file in perf_frequency_raw_files: + print("Reading perf file :", file) + site, g5k_cluster, node, frequency, tool1, tool2 = frequency_file_metadata( + file + ) + matching_temperature_file = f"{results_directory}/{site}/{g5k_cluster}/{node}/temperatures_frequency_{frequency}_perf_and_{tool2}.csv" + perf_df = pl.read_csv(file).with_columns( + tool=pl.lit(tool2), + node=pl.lit(node), + g5k_cluster=pl.lit(g5k_cluster), + target_frequency=pl.lit(frequency), + ) + temperature_df = pl.read_csv(matching_temperature_file) + perf_df = pl.sql( + "SELECT * FROM perf_df JOIN temperature_df ON perf_df.iteration = temperature_df.iteration" + ).collect() + perf_dfs.append(perf_df) + perf_df = pl.concat(perf_dfs) + perf_df.write_csv(perf_frequency_csv_file) + return perf_df.sql( + "SELECT g5k_cluster, node, tool, power_energy_cores as cores, power_energy_pkg as pkg, power_energy_ram as ram, target_frequency, temperature_start, temperature_stop, iteration FROM self" + ) + + +def load_hwpc_frequency(batch_identifier="", results_directory=""): + print("Loading HWPC Frequency Results") + hwpc_frequency_csv_file = f"../data/{batch_identifier}.d/hwpc_frequency.csv" + if os.path.exists(hwpc_frequency_csv_file): + print("Returning content from :", hwpc_frequency_csv_file) + hwpc_df = pl.read_csv(hwpc_frequency_csv_file) + return hwpc_df + else: + regex = "frequency.*hwpc_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + hwpc_frequency_raw_files = utils.find_files( + root_dir=results_directory, regex=regex + ) + hwpc_dfs = [] + for file in hwpc_frequency_raw_files: + site, g5k_cluster, node, frequency, _tool1, _tool2 = ( + frequency_file_metadata(file) + ) + hwpc_df = pl.read_csv(file) + hwpc_df = hwpc_df.with_columns( + node=pl.lit(node), + g5k_cluster=pl.lit(g5k_cluster), + ) + hwpc_dfs.append(hwpc_df) + hwpc_df = pl.concat(hwpc_dfs) + hwpc_df = hwpc_df.drop(["sensor", "target", "time_enabled", "time_running"]) + hwpc_df = hwpc_df.sql(""" + SELECT g5k_cluster, node, timestamp, SUM(rapl_energy_cores) as cores, SUM(rapl_energy_pkg) as pkg, SUM(rapl_energy_dram) as ram, iteration, frequency + FROM self + GROUP BY timestamp, frequency, iteration, node, g5k_cluster + """) + hwpc_df.write_csv(hwpc_frequency_csv_file) + return hwpc_df + + +def load_codecarbon_frequency(batch_identifier="", results_directory=""): + print("Loading Codecarbon Frequency Results") + codecarbon_frequency_csv_file = ( + f"../data/{batch_identifier}.d/codecarbon_frequency.csv" + ) + if os.path.exists(codecarbon_frequency_csv_file): + print("Returning content from :", codecarbon_frequency_csv_file) + codecarbon_df = pl.read_csv(codecarbon_frequency_csv_file) + return codecarbon_df + else: + regex = "frequency.*codecarbon_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + codecarbon_frequency_raw_files = utils.find_files( + root_dir=results_directory, regex=regex + ) + + codecarbon_dfs = [] + for file in codecarbon_frequency_raw_files: + site, g5k_cluster, node, frequency, _tool1, _tool2 = ( + frequency_file_metadata(file) + ) + codecarbon_df = pl.read_csv( + source=file, + ) + codecarbon_df = codecarbon_df.unique(keep="any") + codecarbon_df = codecarbon_df.with_columns( + [ + ( + pl.col("timestamp") + .map_elements( + lambda x: datetime.timestamp(datetime.fromisoformat(x)), + return_dtype=pl.Float64, + ) + .alias("timestamp") + ) + ] + ) + codecarbon_df = codecarbon_df.pivot( + on="domain", + index="timestamp", + values=["energy", "iteration"], + aggregate_function="sum", + ) + codecarbon_df = codecarbon_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), + node=pl.lit(node), + frequency=pl.lit(frequency), + pkg=pl.lit(0.0), + ) + codecarbon_dfs.append( + codecarbon_df.sql( + "SELECT g5k_cluster, node, timestamp, energy_CPU as cores, pkg, energy_RAM as ram, iteration_CPU as iteration, frequency FROM self" + ) + ) + + codecarbon_df = pl.concat(codecarbon_dfs) + codecarbon_df.write_csv(codecarbon_frequency_csv_file) + return codecarbon_df + + +def load_alumet_frequency(batch_identifier="", results_directory=""): + print("Loading alumet Frequency Results") + alumet_frequency_csv_file = f"../data/{batch_identifier}.d/alumet_frequency.csv" + if os.path.exists(alumet_frequency_csv_file): + print("Returning content from :", alumet_frequency_csv_file) + alumet_df = pl.read_csv(alumet_frequency_csv_file) + return alumet_df + else: + regex = "frequency.*alumet_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + alumet_frequency_raw_files = utils.find_files( + root_dir=results_directory, regex=regex + ) + alumet_dfs = [] + for file in alumet_frequency_raw_files: + site, g5k_cluster, node, frequency, _tool1, _tool2 = ( + frequency_file_metadata(file) + ) + alumet_df = pl.read_csv( + source=file, + ) + alumet_df = alumet_df.with_columns( + [ + ( + pl.col("timestamp") + .map_elements( + lambda x: datetime.timestamp( + datetime.fromisoformat(clamp_date(x)) + ), + return_dtype=pl.Float64, + ) + .alias("timestamp") + ) + ] + ) + alumet_df = alumet_df.unique(keep="any") + alumet_df = alumet_df.sql( + "SELECT domain, timestamp, SUM(energy) as energy, iteration FROM self GROUP BY domain, timestamp, iteration" + ) + alumet_df = alumet_df.pivot( + on="domain", index="timestamp", values=["energy", "iteration"] + ) + alumet_df = alumet_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), + node=pl.lit(node), + frequency=pl.lit(frequency), + cores=pl.lit(0.0), + ) + alumet_dfs.append( + alumet_df.sql( + "SELECT g5k_cluster, node, timestamp, cores, energy_package as pkg, energy_dram as ram, iteration_package as iteration, frequency FROM self" + ) + ) + alumet_df = pl.concat(alumet_dfs) + alumet_df.write_csv(alumet_frequency_csv_file) + return alumet_df + + +def clamp_date(date): + if len(date) > 26: + return date[:26] + elif len(date) > 23: + return date[:23] + else: + return date - ranges = { - "10%": (0, 0.1), - "25": (0.1, 0.25), - "50": (0.25, 0.5), - "75": (0.5, 0.75), - "90": (0.75, 0.9), - "100": (0.9, 1.0), - "110": (1.0, 1.1) - } - def assign_category(value): - for label, (low, high) in ranges.items(): - if low <= value < high: - return int(label) - return None +def load_scaphandre_frequency(batch_identifier="", results_directory=""): + print("Loading scaphandre Frequency Results") + scaphandre_frequency_csv_file = ( + f"../data/{batch_identifier}.d/scaphandre_frequency.csv" + ) + if os.path.exists(scaphandre_frequency_csv_file): + print("Returning content from :", scaphandre_frequency_csv_file) + scaphandre_df = pl.read_csv(scaphandre_frequency_csv_file) + return scaphandre_df + else: + regex = "frequency.*scaphandre_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + scaphandre_frequency_raw_files = utils.find_files( + root_dir=results_directory, regex=regex + ) + scaphandre_dfs = [] + for file in scaphandre_frequency_raw_files: + print("Reading scaphandre frequency file", file) + site, g5k_cluster, node, frequency, _tool1, _tool2 = ( + frequency_file_metadata(file) + ) + scaphandre_df = pl.read_csv( + source=file, + ) + scaphandre_df = scaphandre_df.unique(keep="any") + scaphandre_df = scaphandre_df.pivot( + on="domain", index="timestamp", values=["energy", "iteration"] + ) + scaphandre_df = scaphandre_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), + node=pl.lit(node), + cores=pl.lit(0.0), + ram=pl.lit(0.0), + frequency=pl.lit(frequency), + ) + scaphandre_dfs.append( + scaphandre_df.sql( + "SELECT g5k_cluster, node, timestamp, cores, energy_package as pkg, ram, iteration_package as iteration, frequency FROM self" + ) + ) + scaphandre_df = pl.concat(scaphandre_dfs) + scaphandre_df.write_csv(scaphandre_frequency_csv_file) + return scaphandre_df + + +def load_vjoule_frequency(batch_identifier="", results_directory=""): + print("Loading vjoule Frequency Results") + vjoule_frequency_csv_file = f"../data/{batch_identifier}.d/vjoule_frequency.csv" + if os.path.exists(vjoule_frequency_csv_file): + print("Returning content from :", vjoule_frequency_csv_file) + vjoule_df = pl.read_csv(vjoule_frequency_csv_file) + return vjoule_df + else: + regex = "frequency.*vjoule_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + vjoule_dfs = [] + vjoule_frequency_raw_files = utils.find_files( + root_dir=results_directory, regex=regex + ) + for file in vjoule_frequency_raw_files: + site, g5k_cluster, node, frequency, _tool1, _tool2 = ( + frequency_file_metadata(file) + ) + vjoule_df = pl.read_csv( + source=file, + ) + vjoule_df = vjoule_df.unique(keep="any") + vjoule_df = vjoule_df.with_columns( + [ + (pl.col("timestamp").str.strip_chars().alias("timestamp")), + (pl.col("energy").str.strip_chars().alias("energy")), + ] + ) + vjoule_df = vjoule_df.cast( + { + "energy": pl.Float64, + } + ) + + vjoule_df = vjoule_df.pivot( + on="domain", + index="timestamp", + values=["energy", "iteration"], + aggregate_function="sum", + ) + vjoule_df = vjoule_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), + node=pl.lit(node), + cores=pl.lit(0.0), + frequency=pl.lit(frequency), + ) + vjoule_dfs.append( + vjoule_df.sql( + "SELECT g5k_cluster, node, timestamp, cores, energy_CPU as pkg, energy_RAM as ram, iteration_CPU as iteration, frequency FROM self" + ) + ) + vjoule_df = pl.concat(vjoule_dfs) + vjoule_df.write_csv(vjoule_frequency_csv_file) + return vjoule_df - energy_stats_df = energy_stats_df.with_columns( - pl.col("percent_cores_used") - .map_elements(lambda x : assign_category(x)) - .alias("percent_cores_used_category") +def load_vjoule_frequency_agg(batch_identifier="", results_directory=""): + print("Loading vjoule Frequency Results") + vjoule_frequency_csv_file = f"../data/{batch_identifier}.d/vjoule_frequency_agg.csv" + + if os.path.exists(vjoule_frequency_csv_file): + print("Returning content from :", vjoule_frequency_csv_file) + return pl.read_csv(vjoule_frequency_csv_file) + + # Else: load raw files + regex = "frequency.*vjoule_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + + vjoule_dfs = [] + vjoule_frequency_raw_files = utils.find_files( + root_dir=results_directory, + regex=regex ) - energy_stats_df = energy_stats_df.with_columns( - pl.col("percent_threads_used") - .map_elements(lambda x : assign_category(x)) - .alias("percent_threads_used_category") + for file in vjoule_frequency_raw_files: + site, g5k_cluster, node, frequency, _tool1, _tool2 = frequency_file_metadata(file) + + vjoule_df = pl.read_csv(file).unique(keep="any") + + # Clean columns + vjoule_df = vjoule_df.with_columns( + [ + pl.col("timestamp").str.strip_chars().alias("timestamp"), + pl.col("energy").str.strip_chars().alias("energy"), + ] + ) + + # Cast types + vjoule_df = vjoule_df.cast({"energy": pl.Float64}) + + # ---- NEW PART: keep only last energy per (iteration, domain) ---- + vjoule_df = ( + vjoule_df + .sort("timestamp") + .group_by(["iteration", "domain"]) + .tail(1) # keep latest record for each pair + ) + + # Pivot + vjoule_df = vjoule_df.pivot( + on="domain", + index="timestamp", + values=["energy", "iteration"], + aggregate_function="sum", + ) + + # Add metadata + vjoule_df = vjoule_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), + node=pl.lit(node), + cores=pl.lit(0.0), + frequency=pl.lit(frequency), + ) + + # Reorder/select columns via SQL + vjoule_dfs.append( + vjoule_df.sql( + """ + SELECT + g5k_cluster, node, timestamp, cores, + energy_CPU AS pkg, + energy_RAM AS ram, + iteration_CPU AS iteration, + frequency + FROM self + """ + ) + ) + + # Concatenate across files + vjoule_df = pl.concat(vjoule_dfs) + + # Save cache + vjoule_df.write_csv(vjoule_frequency_csv_file) + + return vjoule_df + + +def load_codecarbon_frequency_agg(batch_identifier="", results_directory=""): + print("Loading codecarbon Frequency Results") + codecarbon_frequency_csv_file = f"../data/{batch_identifier}.d/codecarbon_frequency_agg.csv" + + if os.path.exists(codecarbon_frequency_csv_file): + print("Returning content from :", codecarbon_frequency_csv_file) + return pl.read_csv(codecarbon_frequency_csv_file) + + # Else: load raw files + regex = "frequency.*codecarbon_and.*csv" + print("No import found, will load from raw files matching regex : ", regex) + + codecarbon_dfs = [] + codecarbon_frequency_raw_files = utils.find_files( + root_dir=results_directory, + regex=regex ) + for file in codecarbon_frequency_raw_files: + site, g5k_cluster, node, frequency, _tool1, _tool2 = frequency_file_metadata(file) + + codecarbon_df = pl.read_csv(file).unique(keep="any") + + # Clean columns + codecarbon_df = codecarbon_df.with_columns( + [ + pl.col("timestamp").str.strip_chars().alias("timestamp"), + ] + ) + + # Cast types + codecarbon_df = codecarbon_df.cast({"energy": pl.Float64}) + + # ---- NEW PART: keep only last energy per (iteration, domain) ---- + codecarbon_df = ( + codecarbon_df + .sort("timestamp") + .group_by(["iteration", "domain"]) + .tail(1) # keep latest record for each pair + ) + + # Pivot + codecarbon_df = codecarbon_df.pivot( + on="domain", + index="timestamp", + values=["energy", "iteration"], + aggregate_function="sum", + ) + + # Add metadata + codecarbon_df = codecarbon_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), + node=pl.lit(node), + cores=pl.lit(0.0), + frequency=pl.lit(frequency), + ) + + # Reorder/select columns via SQL + codecarbon_dfs.append( + codecarbon_df.sql( + """ + SELECT + g5k_cluster, node, timestamp, cores, + energy_CPU AS pkg, + energy_RAM AS ram, + iteration_CPU AS iteration, + frequency + FROM self + """ + ) + ) + + # Concatenate across files + codecarbon_df = pl.concat(codecarbon_dfs) + + # Save cache + codecarbon_df.write_csv(codecarbon_frequency_csv_file) + + return codecarbon_df + + + + +def polish_frequency(frequency_df): + print("Polishing Frequency Results") + print("No operations yet") + return frequency_df + - jobs = { - "hwpc_true": "hwpc_alone", - "hwpc_false": "hwpc_with_perf", - "perf_true": "perf_alone", - "perf_false": "perf_with_hwpc", - } +def frequency_validation(frequency_df): + print("Frequency Validation Results") + print("No operations yet") + return True - energy_stats_df = energy_stats_df.with_columns( - pl.concat_str(["task", "alone"], separator="_").alias("job") + +def load_inventory(batch_identifier=""): + print("Loading Inventory Results") + inventories_directory = ( + f"../data/{batch_identifier}/inventories-{batch_identifier}.d" + ) + inventory_csv_file = f"../data/{batch_identifier}/inventory.csv" + if os.path.exists(inventory_csv_file): + print("Returning content from :", inventory_csv_file) + inventory_df = pd.read_csv(inventory_csv_file) + return inventory_df + inventory_df = extract_inventory_json_files( + directory=inventories_directory, schema=schemas.nodes_configuration_columns ) - energy_stats_df = energy_stats_df.with_columns(pl.col("job").replace_strict(jobs)) + inventory_df = inventory_df.with_columns( + [ + # (pl.col("processor_version").map_elements(lambda x: f"{x}\nGen: {vendor_generation_map[x]['architecture']}\nRelease: {vendor_generation_map[x]['launch_date']}", return_dtype=pl.String).alias("processor_detail")), + ( + pl.col("processor_version") + .map_elements( + lambda x: f"{x}\n{vendor_generation_map[x]['architecture']}", + return_dtype=pl.String, + ) + .alias("processor_detail") + ), + ( + pl.col("processor_version") + .map_elements( + lambda x: f"{vendor_generation_map[x]['generation']}", + return_dtype=pl.String, + ) + .alias("processor_generation") + ), + ( + pl.col("processor_version") + .map_elements( + lambda x: f"{vendor_generation_map[x]['vendor']}", + return_dtype=pl.String, + ) + .alias("processor_vendor") + ), + ( + pl.col("processor_version") + .map_elements( + lambda x: vendor_generation_map[x]["numa_nodes_first_cpus"], + return_dtype=pl.List(pl.Int64), + ) + .alias("numa_nodes_first_cpus") + ), + ] + ) + return inventory_df + + +def load_energy(batch_identifier=""): + print("Loading Energy Results") + energy_csv_file = f"../data/{batch_identifier}/energy.csv" + if os.path.exists(energy_csv_file): + print("Returning content from :", energy_csv_file) + energy_df = pd.read_csv(energy_csv_file) + return energy_df + # TODO + return energy_df - print("New columns :", energy_stats_df.sql("SELECT percent_cores_used, percent_threads_used, percent_cores_used_category, percent_threads_used_category FROM self").describe()) +def load_energy_stats(batch_identifier=""): + print("Loading Energy Stats Results") + energy_stats_csv_file = f"../data/{batch_identifier}/energy_stats.csv" + if os.path.exists(energy_stats_csv_file): + print("Returning content from :", energy_stats_csv_file) + energy_stats_df = pd.read_csv(energy_stats_csv_file) + return energy_stats_df + # TODO return energy_stats_df + + +def baseline_file_metadata(filename): + g5k_cluster, node = filename.split("/")[5:7] + return g5k_cluster, node + + +def load_baseline(batch_identifier="", results_directory=""): + print("Loading Baseline Results") + baseline_csv_file = f"../data/{batch_identifier}.d/baseline_consumption.csv" + if os.path.exists(baseline_csv_file): + print("Returning content from :", baseline_csv_file) + baseline_df = pl.read_csv(baseline_csv_file) + return baseline_df + else: + regex = "baseline_consumption.csv" + print("No import found, will load from raw files matching regex : ", regex) + baseline_raw_files = utils.find_files(root_dir=results_directory, regex=regex) + baseline_dfs = [] + for file in baseline_raw_files: + g5k_cluster, node = baseline_file_metadata(file) + baseline_df = pl.read_csv(file) + if baseline_df.shape[0] == 0: + print("No Baseline data found for ", file) + continue + baseline_df = baseline_df.cast({cs.numeric(): pl.Float32}) + baseline_df = baseline_df.with_columns( + g5k_cluster=pl.lit(g5k_cluster), node=pl.lit(node) + ) + baseline_dfs.append(baseline_df) + + baseline_df = pl.concat(baseline_dfs) + baseline_df.write_csv(baseline_csv_file) + return baseline_df diff --git a/analysis/marimo_data_analysis.py b/analysis/marimo_data_analysis.py new file mode 100644 index 0000000..d5c4498 --- /dev/null +++ b/analysis/marimo_data_analysis.py @@ -0,0 +1,1507 @@ +# /// script +# requires-python = ">=3.13" +# dependencies = [ +# "altair==6.0.0", +# "matplot2tikz==0.5.1", +# "matplotlib==3.10.7", +# "mplcyberpunk==0.7.6", +# "numpy==2.3.5", +# "polars==1.34.0", +# "pyarrow", +# "ruff==0.14.4", +# "seaborn==0.13.2", +# "tikzplotlib==0.10.1", +# "tqdm==4.67.1", +# "vegafusion==2.0.3", +# "vl-convert-python==1.8.0", +# ] +# /// + +import marimo + +__generated_with = "0.17.2" +app = marimo.App(width="full") + + +@app.cell +def _(): + # IMPORTS + import os # open files + import sys + import argparse + import random + import polars as pl # Dataframes for data manipulation + import pandas as pd + import numpy as np # Statistics + import gc + import math + + + + import schemas # Dataframes schemas + import load # Lib for data loading + + import matplotlib.pyplot as plt # Viz package 1 + import matplotlib.patheffects as path_effects + from matplotlib.colors import LogNorm + import matplotlib.ticker as ticker + import seaborn as sns # Viz package 2 + plt.style.use("seaborn-v0_8-paper") + + + from pprint import pprint # Pretty print for data structures + import re # regex + import marimo as mo + import test_file_load + + import json + from pathlib import Path + return Path, json, load, math, mo, np, pd, pl, plt, re, sns, test_file_load + + +@app.cell +def _(mo): + mo.md(r"""# Vendor generation map with informations about processors""") + return + + +@app.cell(hide_code=True) +def vendor_generation_map_1(): + vendor_generation_map = { + "E5-2620 v4": { + "architecture": "Broadwell-E", + "vendor": "Intel", + "generation": 6, + "launch_date": "Q1 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2630L v4": { + "architecture": "Broadwell-E", + "vendor": "Intel", + "generation": 6, + "launch_date": "Q1 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2698 v4": { + "architecture": "Broadwell-E", + "vendor": "Intel", + "generation": 6, + "launch_date": "Q1 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2630 v3": { + "architecture": "Haswell-E", + "vendor": "Intel", + "generation": 5, + "launch_date": "Q3 2014", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "Gold 5220": { + "architecture": "Cascade Lake-SP", + "vendor": "Intel", + "generation": 10, + "launch_date": "Q2 2019", + "numa_nodes_number": "1", + "numa_nodes_first_cpus": [0], + }, + "Gold 5218": { + "architecture": "Cascade Lake-SP", + "vendor": "Intel", + "generation": 10, + "launch_date": "Q2 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "i7-9750H": { + "architecture": "Coffee Lake", + "vendor": "Intel", + "generation": 9, + "launch_date": "Q2 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "Silver 4314": { + "architecture": "Ice Lake-SP", + "vendor": "Intel", + "generation": 10, + "launch_date": "Q2 2021", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "Gold 5320": { + "architecture": "Ice Lake-SP", + "vendor": "Intel", + "generation": 10, + "launch_date": "Q2 2021", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "Gold 6126": { + "architecture": "Skylake-SP", + "vendor": "Intel", + "generation": 6, + "launch_date": "Q3 2017", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "Gold 6130": { + "architecture": "Skylake-SP", + "vendor": "Intel", + "generation": 6, + "launch_date": "Q3 2017", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2620": { + "architecture": "Sandy Bridge-EP", + "vendor": "Intel", + "generation": 3, + "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2630": { + "architecture": "Sandy Bridge-EP", + "vendor": "Intel", + "generation": 3, + "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2630L": { + "architecture": "Sandy Bridge-EP", + "vendor": "Intel", + "generation": 3, + "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "E5-2660": { + "architecture": "Sandy Bridge-EP", + "vendor": "Intel", + "generation": 3, + "launch_date": "Q1 2012", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "X5670": { + "architecture": "Westmere-EP", + "vendor": "Intel", + "generation": 1, + "launch_date": "Q1 2010", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "7301": { + "architecture": "Zen", + "vendor": "AMD", + "generation": 1, + "launch_date": "Q2 2017", + "numa_nodes_number": "8", + "numa_nodes_first_cpus": [0, 1, 2, 3, 4, 5, 6, 7], + }, + "7352": { + "architecture": "Zen 2", + "vendor": "AMD", + "generation": 2, + "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "7452": { + "architecture": "Zen 2", + "vendor": "AMD", + "generation": 2, + "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "7642": { + "architecture": "Zen 2", + "vendor": "AMD", + "generation": 2, + "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "7742": { + "architecture": "Zen 2", + "vendor": "AMD", + "generation": 2, + "launch_date": "Q3 2019", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "250": { + "architecture": "Opteron", + "vendor": "AMD", + "generation": 1, + "launch_date": "Q4 2004", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + "99xx": { + "architecture": "ThunderX2", + "vendor": "Cavium", + "generation": 1, + "launch_date": "Q2 2016", + "numa_nodes_number": "2", + "numa_nodes_first_cpus": [0, 1], + }, + } + return (vendor_generation_map,) + + +@app.cell +def globals(sns): + TOOLS = ["hwpc", "codecarbon", "alumet", "scaphandre", "vjoule"] + palette_for_tools = { + "hwpc": "#4878CF", + "codecarbon": "#6ACC65", + "alumet": "#D65F5F", + "scaphandre": "#B47CC7", + "vjoule": "#C4AD66", + } + sns_palette_for_tools = sns.color_palette(list(palette_for_tools.values())) + + batch_identifier = "ubuntu2404nfs-6.8-6"#input("Enter the batch identifier : e.g ubuntu2404nfs-6.10-6") + results_directory = f"../data/{batch_identifier}.d/results-{batch_identifier}.d" + inventories_directory = (f"../data/{batch_identifier}.d/inventories-{batch_identifier}.d") + return ( + batch_identifier, + inventories_directory, + palette_for_tools, + results_directory, + ) + + +@app.cell +def _(Path, inventories_directory, json, pl, vendor_generation_map): + def load_inventory(directory: str) -> pl.DataFrame: + data = [] + print("Loading inventory from:", directory) + + for file_path in Path(directory).rglob("*.json"): + try: + with open(file_path, "r") as f: + content = json.load(f) + + cluster = content.get("cluster") + cores = content.get("architecture", {}).get("nb_cores") + microarch = content.get("processor", {}).get("microarchitecture") + vendor = content.get("processor", {}).get("vendor") + version = content.get("processor", {}).get("version") + other_desc = content.get("processor", {}).get("other_description") + + data.append({ + "cluster": cluster, + "cores_per_node": cores, + "microarchitecture": microarch, + "vendor": vendor, + "version": version, + "other_description": other_desc + }) + + except Exception as e: + print(f"Error reading {file_path}: {e}") + + df = pl.DataFrame(data) + + # Count nodes & compute total cores per cluster + return ( + df.group_by("cluster") + .agg([ + pl.len().alias("node_count"), + pl.first("cores_per_node"), + pl.first("microarchitecture"), + pl.first("vendor"), + pl.first("version"), + pl.first("other_description") + ]) + .with_columns( + (pl.col("node_count") * pl.col("cores_per_node")).alias("total_cores") + ) + ) + + inventory = load_inventory(directory=inventories_directory) + + # Step 1: Convert vendor_generation_map to a Polars DataFrame + map_data = [ + {"version": version, **info} + for version, info in vendor_generation_map.items() + ] + + vendor_map_df = pl.DataFrame(map_data) + + # Step 2: Join with inventory on "version" + inventory = inventory.join( + vendor_map_df, + left_on="version", + right_on="version", + how="left" + ) + + # Step 3: Optional: create processor_description column + inventory = inventory.with_columns([ + (pl.col("architecture").cast(str) + " (" + pl.col("launch_date") + ")").alias("processor_description") + ]) + return (inventory,) + + +@app.cell +def _(): + return + + +@app.cell +def _(mo): + mo.md(r"""# Testing loading""") + return + + +@app.cell +def _(results_directory, test_file_load): + test_file_load.test_all_files( + results_dir=f"{results_directory}/rennes/parasilo/parasilo-24", + nb_core=32, + nb_ops=25_000, + ) + return + + +@app.cell +def _(mo): + mo.md(r"""# Baseline Consumption analysis""") + return + + +@app.cell +def _(batch_identifier, inventory, load, pl, plt, results_directory, sns): + + + baseline_consumptions = load.load_baseline( + batch_identifier=batch_identifier, + results_directory=results_directory) + + baseline_consumptions = baseline_consumptions.with_columns([ + # Compute lower bound of the 5°C bin + (pl.col("average_temperature") // 5 * 5).alias("temp_lower"), + # Compute upper bound + ((pl.col("average_temperature") // 5 * 5) + 4).alias("temp_upper") + ]) + + # Combine into formatted interval strings like "45-50°C" + baseline_consumptions = baseline_consumptions.with_columns([ + (pl.col("temp_lower").cast(pl.Float64).cast(pl.Utf8) + "-" + + pl.col("temp_upper").cast(pl.Float64).cast(pl.Utf8) + "°C" + ).alias("temperature_range") + ]) + + baseline_consumptions = baseline_consumptions.sql( + """ + SELECT + g5k_cluster, + avg(pkg) AS average_pkg, + avg(ram) AS average_ram, + stddev(pkg) AS std_pkg, + stddev(ram) AS std_ram, + temperature_range + FROM self + GROUP BY + g5k_cluster, + temperature_range + ORDER BY g5k_cluster + """ + ) + + + + baseline = baseline_consumptions.join( + other=inventory, + left_on=["g5k_cluster"], + right_on=["cluster"], + how="left", + #validate="1:m" + ) + + baseline = baseline.with_columns([ + (pl.col("average_pkg") / pl.col("cores_per_node")).alias("pkg_per_core"), + (pl.col("average_ram") / pl.col("cores_per_node")).alias("ram_per_core"), + (pl.col("std_pkg") / pl.col("cores_per_node")).alias("pkg_per_core_std"), + (pl.col("std_ram") / pl.col("cores_per_node")).alias("ram_per_core_std"), + ]) + + + + + df_baseline_facetgrid = baseline.to_pandas() + + df_baseline_facetgrid = df_baseline_facetgrid.sort_values( + ["temperature_range"] + ) + + + unique_temps = sorted(df_baseline_facetgrid["temperature_range"].unique()) + n_colors = len(unique_temps) + cmap = sns.color_palette("coolwarm", n_colors=n_colors) # you can also try "viridis" or "Spectral" + + # Map each temperature range low to a specific color + color_map = dict(zip(unique_temps, cmap)) + + + # --- Custom plotting function with color and error bars --- + def plot_with_errorbars(data, **kwargs): + # Sort values for consistent bar order + data = data.sort_values("temperature_range") + + colors = [color_map[val] for val in data["temperature_range"]] + + + ax = sns.barplot( + data=data, + x="temperature_range", + y="pkg_per_core", + estimator="median", + hue="temperature_range", + palette=color_map, + errorbar=("pi", 50), capsize=.2, + err_kws={"color": ".3", "linewidth": 1.2}, + **kwargs + ) + #ax.set_title(ax.get_title().split("processor_description = ")[1]) + every_nth = 2 + for n, label in enumerate(ax.xaxis.get_ticklabels()): + if n % every_nth != 0: + label.set_visible(False) + plt.xticks(rotation=45, ha="right", fontsize=8) + + + + sns.set_theme(context="paper", style="whitegrid") + selected_clusters = [ + "chiclet", + "chuc", + "gros", + "parasilo", + "paradoxe", + "taurus", + "chifflot", + "fleckenstein", + "montcalm", + "econome", + "ecotype", + "nova" + ] + df_baseline_facetgrid_filtered_temperatures = df_baseline_facetgrid[df_baseline_facetgrid["g5k_cluster"].isin(selected_clusters)] + + # Compute a paper-friendly figure size (7in wide total) + g = sns.FacetGrid( + df_baseline_facetgrid_filtered_temperatures, + col="g5k_cluster", + col_wrap=5, + height=2.2, # Adjust to make it compact + aspect=1.0, + sharex=True, + sharey=True, + ) + + g.map_dataframe(plot_with_errorbars) + + # Axis labels and main title + g.set_axis_labels("Temperature Range", "Baseline Package \nConsumption per Core (W)", fontsize=8) + g.fig.suptitle("", y=1.03, fontsize=10) + g.set_titles(col_template="{col_name}", fontsize=12) + + + g.savefig("baseline_consumption_clusters.png", bbox_inches="tight", dpi=600) + plt.show() + return baseline, selected_clusters + + +@app.cell +def _(baseline): + + baseline.describe() + return + + +@app.cell +def _(baseline, pl, plt, sns): + baseline_cluster_mean = ( + baseline + .group_by("g5k_cluster") + .agg([ + pl.mean("pkg_per_core").alias("Package domain consumption per core"), + pl.mean("ram_per_core").alias("RAM domain consumption per core"), + ]) + ) + baseline_long = baseline_cluster_mean.melt( + id_vars=["g5k_cluster"], + value_vars=["Package domain consumption per core", "RAM domain consumption per core"], + variable_name="domain", + value_name="consumption" + ) + df_plot_domains = baseline_long.to_pandas() + # --- Compute cluster order by pkg consumption --- + cluster_order = ( + df_plot_domains[df_plot_domains["domain"] == "Package domain consumption per core"] + .sort_values("consumption")["g5k_cluster"] + .tolist() + ) + + # Merge processor info into df_plot_domains + processor_info = baseline.select([ + pl.col("g5k_cluster"), + pl.col("architecture"), + pl.col("version"), + pl.col("launch_date") + ]).unique().to_pandas() + + df_plot_domains = df_plot_domains.merge( + processor_info, + on="g5k_cluster", + how="left" + ) + + # Create formatted labels for the x-axis + df_plot_domains["cluster_label"] = ( + #df_plot_domains["g5k_cluster"] + #+ "\n" + df_plot_domains["architecture"].fillna("Unknown") + ", " + + df_plot_domains["version"].fillna("Unknown") + ", " + + df_plot_domains["launch_date"].fillna("Unknown") + ) + + # --- Sort clusters by pkg consumption --- + cluster_order = ( + df_plot_domains[df_plot_domains["domain"] == "Package domain consumption per core"] + .sort_values("consumption")["cluster_label"] + .tolist() + ) + + + + # --- FacetGrid: stacked vertically --- + grid = sns.FacetGrid( + df_plot_domains, + row="domain", + sharex=True, + sharey=True, + height=4.5, + aspect=2 + ) + + grid.map_dataframe( + sns.barplot, + x="cluster_label", + y="consumption", + order=cluster_order, + palette="muted" + ) + + # --- Improve tick labels --- + for axe in grid.axes.flat: + axe.tick_params(axis="x", labelrotation=45, labelsize=9) + for label_processor in axe.get_xticklabels(): + label_processor.set_ha("right") + label_processor.set_rotation_mode('anchor') # ensures text doesn't get cut off + axe.set_xlabel("Processor (Architecture, Version, Launch Date)") + axe.set_ylabel("Mean Baseline Consumption per Core (W)") + + # --- Adjust figure spacing so ticks are visible --- + grid.fig.subplots_adjust(hspace=0.25, bottom=0.3, top=0.9) + grid.fig.suptitle("Average Baseline Consumption per Core by Cluster and Domain", y=0.95) + grid.savefig("baseline_consumption_clusters_processors.pdf", bbox_inches="tight", pad_inches=0.01) + + plt.show() + return + + +@app.cell +def _(): + return + + +@app.cell +def _(mo): + mo.md(r"""#Frequency analysis""") + return + + +@app.cell +def _(batch_identifier, load, results_directory): + + ( + perf_frequency, + hwpc_frequency, + codecarbon_frequency, + alumet_frequency, + scaphandre_frequency, + vjoule_frequency, + ) = load.load_frequency( + batch_identifier=batch_identifier, results_directory=results_directory + ) + + vjoule_frequency_agg_raw = load.load_vjoule_frequency_agg(batch_identifier, results_directory) + codecarbon_frequency_agg_raw = load.load_codecarbon_frequency_agg(batch_identifier, results_directory) + return ( + alumet_frequency, + codecarbon_frequency, + codecarbon_frequency_agg_raw, + hwpc_frequency, + scaphandre_frequency, + vjoule_frequency, + vjoule_frequency_agg_raw, + ) + + +@app.cell +def _( + alumet_frequency, + codecarbon_frequency, + hwpc_frequency, + np, + pl, + scaphandre_frequency, + vjoule_frequency, +): + def collect_frequency_data(frequency_df, frequencies, metadatada): + tool = metadatada["tool"] + unit = metadatada["unit"] + + target = [] + reached = [] + + for frequency in frequencies: + df_ts = frequency_df.sql( + f""" + SELECT node, iteration, timestamp + FROM self + WHERE frequency = {frequency} + ORDER BY node, iteration, timestamp + """ + ).to_pandas() + + for (node, iteration), group in df_ts.groupby(["node", "iteration"]): + arr = group["timestamp"].values.astype(float) + + if arr.size < 2: + continue + + # computes (n+1 - n) interval and get inverse + arr = np.sort(arr) + intervals = arr[1:] - arr[:-1] + + if unit == "milliseconds": + inst_freqs = 1000.0 / intervals + else: + inst_freqs = 1.0 / intervals + + reached.extend(inst_freqs.tolist()) + target.extend([frequency] * len(inst_freqs)) + + return pl.DataFrame({ + "tool": tool, + "target_frequency": target, + "reached_frequency": reached, + }) + + + + df_all = pl.concat([ + collect_frequency_data(hwpc_frequency, [1, 10, 100, 1000], {"tool": "hwpc", "unit": "milliseconds"}), + collect_frequency_data(codecarbon_frequency, [1, 10, 100, 1000], {"tool": "codecarbon", "unit": "seconds"}), + collect_frequency_data(alumet_frequency, [1, 10, 100, 1000], {"tool": "alumet", "unit": "seconds"}), + collect_frequency_data(scaphandre_frequency, [1, 10, 100, 1000], {"tool": "scaphandre", "unit": "seconds"}), + collect_frequency_data(vjoule_frequency, [1, 10, 100, 1000], {"tool": "vjoule", "unit": "seconds"}), + ]) + return (df_all,) + + +@app.cell +def _(df_all): + sampled = df_all.sample(fraction=0.10).sort("tool") + return (sampled,) + + +@app.cell +def _(palette_for_tools, plt, sampled, sns): + plt.figure(figsize=(8,8)) + sns.lineplot( + data=sampled, x="target_frequency", y="reached_frequency", hue="tool", err_style="bars", errorbar=("ci"), palette=palette_for_tools + ) + sns.lineplot( + x=[0,1000], y=[0,1000], dashes=(2, 2), legend="auto" + ) + + plt.xscale("log") + plt.yscale("log") + + plt.xlabel("Target Frequency (Hz)", fontsize=12) + plt.ylabel("Reached Frequency (Hz)", fontsize=12) + plt.tick_params(axis="x", labelsize=12) + plt.tick_params(axis="y", labelsize=12) + plt.title("", pad=6, fontsize=12) + plt.xlim(0,10000) + plt.ylim(0,10000) + plt.legend(frameon=False, fontsize=12, title_fontsize=10, loc="upper left") + plt.grid(True, which="major", linestyle="--", linewidth=0.4, alpha=0.6) + + plt.savefig("reached_vs_target_frequency.png", bbox_inches="tight", dpi=600) + plt.show() + return + + +@app.cell +def _(mo): + mo.md(r"""##Overhead function of frequency""") + return + + +@app.cell +def _(Path, pl, re, results_directory): + # Define the root directory to search + temperatures_root_dir = Path(results_directory) # change this to your directory + + # Define the filename pattern (regex to extract frequency and tool) + temperatures_pattern = re.compile(r"temperatures_frequency_(\d+)_perf_and_(\w+)\.csv") + + # List of tools of interest + temperatures_valid_tools = {"hwpc", "codecarbon", "scaphandre", "alumet", "vjoule"} + + # Collect all matching CSV files + temperatures_csv_files = [] + for temperatures_file_path in temperatures_root_dir.rglob("temperatures_frequency_*_perf_and_*.csv"): + temperatures_match = temperatures_pattern.match(temperatures_file_path.name) + if temperatures_match: + temperatures_frequency, temperatures_tested_tool = temperatures_match.groups() + if temperatures_tested_tool in temperatures_valid_tools: + temperatures_node = temperatures_file_path.parent.name + temperatures_g5k_cluster = temperatures_node.split("-")[0] if "-" in temperatures_node else temperatures_node + temperatures_csv_files.append((temperatures_file_path, int(temperatures_frequency), temperatures_tested_tool, temperatures_node, temperatures_g5k_cluster)) + + # Load all CSVs into a list of DataFrames + temperatures_overhead_dfs = [] + for temperatures_file_path, temperatures_frequency, temperatures_valid_tool, temperatures_node, temperatures_g5k_cluster in temperatures_csv_files: + temperatures_df = ( + pl.read_csv(temperatures_file_path) + .with_columns([ + pl.lit(temperatures_frequency).alias("target_frequency"), + pl.lit(temperatures_valid_tool).alias("tool"), + pl.lit(temperatures_node).alias("node"), + pl.lit(temperatures_g5k_cluster).alias("g5k_cluster"), + ]) + ) + temperatures_overhead_dfs.append(temperatures_df) + + # Concatenate all into one big Polars DataFrame + if temperatures_overhead_dfs: + temperatures_all_data = pl.concat(temperatures_overhead_dfs, how="vertical") + else: + temperatures_all_data = pl.DataFrame() + + temperatures_all_data = temperatures_all_data.with_columns([ + ((pl.col("temperature_start") + pl.col("temperature_stop")) / 2).alias("average_temperature").cast(pl.Int64) + ]) + print(temperatures_all_data.head()) + return (temperatures_all_data,) + + +@app.cell +def _( + Path, + baseline, + inventory, + pl, + re, + results_directory, + temperatures_all_data, +): + + # Define the root directory to search + root_dir = Path(results_directory) # change this to your directory + + # Define the filename pattern (regex to extract frequency and tool) + pattern = re.compile(r"frequency_(\d+)_perf_and_(\w+)\.csv") + + # List of tools of interest + valid_tools = {"hwpc", "codecarbon", "scaphandre", "alumet", "vjoule"} + + # Collect all matching CSV files + csv_files = [] + for file_path in root_dir.rglob("frequency_*_perf_and_*.csv"): + match = pattern.match(file_path.name) + if match: + frequency, tested_tool = match.groups() + if tested_tool in valid_tools: + node = file_path.parent.name # e.g. "paravance-5" + g5k_cluster = node.split("-")[0] if "-" in node else node + csv_files.append((file_path, int(frequency), tested_tool, node, g5k_cluster)) + + # Load all CSVs into a list of DataFrames + overhead_dfs = [] + for file_path, frequency, valid_tool, node, g5k_cluster in csv_files: + df = ( + pl.read_csv(file_path) + .with_columns([ + pl.lit(frequency).alias("target_frequency"), + pl.lit(valid_tool).alias("tool"), + pl.lit(node).alias("node"), + pl.lit(g5k_cluster).alias("g5k_cluster"), + ]) + ) + overhead_dfs.append(df) + + # Concatenate all into one big Polars DataFrame + if overhead_dfs: + all_data = pl.concat(overhead_dfs, how="vertical") + else: + all_data = pl.DataFrame() + + all_data = all_data.join( + other=temperatures_all_data, + left_on=["node", "target_frequency", "tool", "iteration"], + right_on=["node", "target_frequency", "tool", "iteration"], + how="left", + validate="1:1" + ) + + all_data = all_data.with_columns([ + # Compute lower bound of the 5°C bin + (pl.col("average_temperature") // 5 * 5).alias("temp_lower"), + # Compute upper bound + ((pl.col("average_temperature") // 5 * 5) + 4).alias("temp_upper") + ]) + + # Combine into formatted interval strings like "45-50°C" + all_data = all_data.with_columns([ + (pl.col("temp_lower").cast(pl.Float64).cast(pl.Utf8) + "-" + + pl.col("temp_upper").cast(pl.Float64).cast(pl.Utf8) + "°C" + ).alias("temperature_range") + ]) + + + stats = ( + all_data + .group_by(["g5k_cluster", "tool", "target_frequency", "temperature_range"]) + .agg([ + # Package domain + pl.col("power_energy_pkg").median().alias("median_pkg"), + pl.col("power_energy_pkg").mean().alias("mean_pkg"), + pl.col("power_energy_pkg").std().alias("std_pkg"), + + # RAM domain + pl.col("power_energy_ram").median().alias("median_ram"), + pl.col("power_energy_ram").mean().alias("mean_ram"), + pl.col("power_energy_ram").std().alias("std_ram"), + + ]) + .sort(["g5k_cluster", "tool", "target_frequency"]) + ) + stats = stats.with_columns([ + pl.col("g5k_cluster").cast(pl.Utf8), + pl.col("temperature_range").cast(pl.Utf8), + ]) + stats = stats.join( + other=inventory, + left_on=["g5k_cluster"], + right_on=["cluster"], + how="left", + #validate="1:m" + ) + + + stats = stats.with_columns([ + (pl.col("median_pkg") / pl.col("cores_per_node")).alias("median_pkg_per_core"), + (pl.col("median_ram") / pl.col("cores_per_node")).alias("median_ram_per_core") + ]) + + stats = stats.join( + other=baseline.with_columns([ + pl.col("g5k_cluster").cast(pl.Utf8), + pl.col("temperature_range").cast(pl.Utf8), + ]).sql("SELECT g5k_cluster, pkg_per_core, ram_per_core, pkg_per_core_std, ram_per_core_std, temperature_range FROM self"), + left_on=["g5k_cluster", "temperature_range"], + right_on=["g5k_cluster", "temperature_range"], + how="left", + #validate="1:m" + ) + + stats = stats.with_columns([ + (pl.col("median_pkg_per_core") - pl.col("pkg_per_core")).alias("pkg_overhead_per_core"), + (pl.col("median_ram_per_core") - pl.col("ram_per_core")).alias("ram_overhead_per_core"), + ]) + + stats.describe() + return all_data, stats + + +@app.cell +def _(pd, stats): + # Convert from Polars if needed + overhead_df = stats.to_pandas() + + # Ensure numeric and categorical order + overhead_df["target_frequency"] = overhead_df["target_frequency"].astype(int) + overhead_df["tool"] = pd.Categorical(overhead_df["tool"], ordered=True) + overhead_df = overhead_df.sort_values("tool") + return (overhead_df,) + + +@app.cell +def _(overhead_df, palette_for_tools, plt, sns): + def draw_barplot(data, **kwargs): + """ + Draw grouped barplot of mean pkg_overhead_per_core per frequency, with error bars per tool. + """ + sns.barplot( + data=data, + x="target_frequency", + y="pkg_overhead_per_core", + hue="tool", + estimator="median", + palette=palette_for_tools, + errorbar=("pi", 50), capsize=.2, + err_kws={"color": ".3", "linewidth": 1.2}, + **kwargs + ) + + # Facet by cluster + g_bar = sns.FacetGrid( + overhead_df, + col="processor_description", + col_wrap=3, + margin_titles=True, + height=3.7, + aspect=1.5 + ) + + plt.suptitle("", fontsize=18) + g_bar.map_dataframe(draw_barplot) + + # Beautify + g_bar.set_axis_labels("Frequency (Hz)", "Package domain overhead\nper core with IQR (W)", fontsize=14) + g_bar.set_titles(col_template="{col_name}", fontsize=12) + g_bar.add_legend(fontsize=16, ncol=5, bbox_to_anchor=(0.0, -0.3175, 0.5, 0.5)) + + for axis in g_bar.axes: + axis.tick_params(axis="x", labelsize=14) + axis.tick_params(axis="y", labelsize=14) + axis.set_title(label=axis.get_title(), fontsize=16) + plt.savefig("package_overhead.png", bbox_inches="tight", dpi=600) + plt.show() + return + + +@app.cell +def _( + alumet_frequency, + codecarbon_frequency_agg_raw, + hwpc_frequency, + math, + pl, + scaphandre_frequency, + vjoule_frequency_agg_raw, +): + hwpc_frequency_agg = ( + hwpc_frequency.group_by(["iteration", "node", "frequency"]) + .agg([ + pl.sum("cores").alias("cores_raw"), + pl.sum("pkg").alias("pkg_raw"), + pl.sum("ram").alias("ram_raw"), + ]) + .with_columns([ + pl.col("cores_raw").map_elements(lambda x: math.ldexp(x, -32), return_dtype=pl.Float64).alias("cores_total"), + pl.col("pkg_raw").map_elements(lambda x: math.ldexp(x, -32),return_dtype=pl.Float64).alias("pkg_total"), + pl.col("ram_raw").map_elements(lambda x: math.ldexp(x, -32),return_dtype=pl.Float64).alias("ram_total"), + + pl.lit("hwpc").alias("tool") + ]) + .select([ + "node", "cores_total", "pkg_total", "ram_total","iteration", "frequency", "tool" + ]).cast({"pkg_total": pl.Float32, "cores_total": pl.Float32, "ram_total": pl.Float32, "iteration": pl.Int32, "frequency": pl.Int32}) + ) + + alumet_frequency_agg = ( + alumet_frequency.group_by(["iteration", "node", "frequency"]) + .agg([ + pl.sum("cores").alias("cores_total"), + pl.sum("pkg").alias("pkg_total"), + pl.sum("ram").alias("ram_total"), + ]) + .with_columns([ + pl.lit("alumet").alias("tool") + ]) + .select([ + "node", "cores_total", "pkg_total", "ram_total","iteration", "frequency", "tool" + ]).cast({"pkg_total": pl.Float32, "cores_total": pl.Float32, "ram_total": pl.Float32, "iteration": pl.Int32, "frequency": pl.Int32}) + ) + + scaphandre_frequency_agg = ( + scaphandre_frequency.group_by(["iteration", "node", "frequency"]) + .agg([ + pl.sum("cores").alias("cores_raw"), + pl.sum("pkg").alias("pkg_raw"), + pl.sum("ram").alias("ram_raw"), + ]).with_columns([ + (pl.col("cores_raw") / (pl.col("frequency") * 1_000_000)).alias("cores_total"), + (pl.col("pkg_raw")/ (pl.col("frequency") * 1_000_000)).alias("pkg_total"), + (pl.col("ram_raw")/ (pl.col("frequency") * 1_000_000)).alias("ram_total"), + pl.lit("scaphandre").alias("tool") + ]) + .select([ + "node", "cores_total", "pkg_total", "ram_total","iteration", "frequency", "tool" + ]).cast({"pkg_total": pl.Float32, "cores_total": pl.Float32, "ram_total": pl.Float32, "iteration": pl.Int32, "frequency": pl.Int32}) + ) + + vjoule_frequency_agg = vjoule_frequency_agg_raw.with_columns([ + pl.col("cores").alias("cores_total"), + pl.col("pkg").alias("pkg_total"), + pl.col("ram").alias("ram_total"), + pl.lit("vjoule").alias("tool") + ]).select(["node", "cores_total", "pkg_total", "ram_total","iteration", "frequency", "tool"]).cast({"pkg_total": pl.Float32, "cores_total": pl.Float32, "ram_total": pl.Float32, "iteration": pl.Int32, "frequency": pl.Int32}) + + + codecarbon_frequency_agg = codecarbon_frequency_agg_raw.with_columns([ + (pl.col("cores") * 1_000_000).alias("cores_total"), + (pl.col("pkg") * 1_000_000).alias("pkg_total"), + (pl.col("ram") * 1_000_000).alias("ram_total"), + pl.lit("codecarbon").alias("tool") + ]).select(["node", "cores_total", "pkg_total", "ram_total","iteration", "frequency", "tool"]).cast({"pkg_total": pl.Float32, "cores_total": pl.Float32, "ram_total": pl.Float32, "iteration": pl.Int32, "frequency": pl.Int32}) + + frequency_agg = pl.concat([alumet_frequency_agg, hwpc_frequency_agg, scaphandre_frequency_agg, vjoule_frequency_agg, codecarbon_frequency_agg]) + return ( + alumet_frequency_agg, + codecarbon_frequency_agg, + frequency_agg, + hwpc_frequency_agg, + scaphandre_frequency_agg, + vjoule_frequency_agg, + ) + + +@app.cell +def _( + alumet_frequency_agg, + codecarbon_frequency_agg, + frequency_agg, + hwpc_frequency_agg, + scaphandre_frequency_agg, + vjoule_frequency_agg, +): + print(alumet_frequency_agg.columns) + print(hwpc_frequency_agg.columns) + print(scaphandre_frequency_agg.columns) + print(vjoule_frequency_agg.columns) + print(codecarbon_frequency_agg.columns) + print(frequency_agg.columns) + return + + +@app.cell +def _(all_data, frequency_agg, pl): + merged_frequency_measurements_df = frequency_agg.join( + all_data, + left_on=["node", "tool", "frequency", "iteration"], + right_on=["node", "tool", "frequency", "iteration"], + how="left" + ) + + merged_frequency_measurements_df = merged_frequency_measurements_df.cast({"cores_total": pl.Float32, "power_energy_cores": pl.Float32}) + merged_frequency_measurements_df = merged_frequency_measurements_df.with_columns([ + (((pl.col("power_energy_pkg") - pl.col("pkg_total")).abs()) / pl.col("power_energy_pkg")).alias("pkg_diff"), + (((pl.col("power_energy_cores") - pl.col("cores_total")).abs()) /pl.col("power_energy_cores")).alias("cores_diff"), + (((pl.col("power_energy_ram") - pl.col("ram_total")).abs()) /pl.col("power_energy_ram")).alias("ram_diff")]) + return (merged_frequency_measurements_df,) + + +@app.cell +def _(merged_frequency_measurements_df, palette_for_tools, plt, sns): + fig_diff, axes = plt.subplots(nrows=2, ncols=1, sharex=True, figsize=(10,6)) + from matplotlib.patches import Rectangle + + # --- Add rectangle to axes[0] --- + # Coordinates are in data space by default; you can switch to axes fraction if preferred. + + rect_x_100 = 1.65 # rectangle lower-left x + rect_y_100 = 0.8# rectangle lower-left y + rect_width_100 = 0.35 + rect_height_100 = 0.1 + + # Add rectangle patch + rect_100 = Rectangle( + (rect_x_100, rect_y_100), + rect_width_100, + rect_height_100, + linewidth=0.5, + edgecolor='black', + facecolor='white', + fill=True + ) + + rect_x_1000 = 2.625# rectangle lower-left x + rect_y_1000 = 0.8# rectangle lower-left y + rect_width_1000 = 0.35 + rect_height_1000 = 0.1 + rect_1000 = Rectangle( + (rect_x_1000, rect_y_1000), + rect_width_1000, + rect_height_1000, + linewidth=0.5, + edgecolor='black', + facecolor='white', + fill=True + ) + + + sns.barplot( + data=merged_frequency_measurements_df.sort("tool"), + x="frequency", + y="pkg_diff", + hue="tool", + palette=palette_for_tools, + ax=axes[0], + errorbar=("pi", 50) + ) + sns.barplot( + data=merged_frequency_measurements_df.sql("SELECT * FROM self WHERE tool != 'scaphandre'").sort("tool"), + x="frequency", + y="ram_diff", + hue="tool", + palette=palette_for_tools, + ax=axes[1] + ) + fig_diff.suptitle("", fontsize=10) + for axe_diff in axes: + axe_diff.tick_params(axis="x", labelsize=14) + axe_diff.tick_params(axis="y", labelsize=14) + + axe_diff.set_xlabel("Sampling frequency (Hz)", fontsize=14) + axe_diff.set_ylim(0, top=1.0) + axe_diff.set_ylabel("") + axes[1].get_legend().remove() + axes[0].legend(title="", fontsize=14, bbox_to_anchor=(.475,-2, .5, .5), frameon=False, ncols=5) + axes[0].set_ylabel("", fontsize=10) + #axes[1].set_ylabel("Package domain ratio", fontsize=10) + axes[1].text(x=-1, y=0.1, s="RAM domain ratio", fontsize=14, rotation="vertical") + + axes[0].text(x=-1, y=0.0, s="Package domain ratio", fontsize=14, rotation="vertical") + axes[0].add_patch(rect_100) + axes[0].add_patch(rect_1000) + + # Add text inside the rectangle + axes[0].text( + rect_x_100 + rect_width_100/2, + rect_y_100 + rect_height_100/2, + "y=1.4", + ha='center', + va='center', + fontsize=12 + ) + + axes[0].text( + rect_x_1000 + rect_width_1000/2, + rect_y_1000 + rect_height_1000/2, + "y=1.8E4", + ha='center', + va='center', + fontsize=12 + ) + + + plt.savefig("frequency_measurements_diff.png", bbox_inches="tight", dpi=600) + plt.show() + return + + +@app.cell +def _(plt): + fig_ram_freq = plt.figure(figsize=(8,6)) + + fig_ram_freq.axes[0].set_ylim(0, top=1.0) + plt.title("") + plt.xlabel("Sampling frequency (Hz)") + plt.ylabel("Relative difference of perf and tools over ram RAPL domain (1.0 = 100%)") + plt.tight_layout(pad=0.1) + plt.savefig("frequency_measurements_diff_ram.pdf", bbox_inches="tight") + plt.show() + return + + +@app.cell +def _(mo): + mo.md(r"""# Stability of measurement tool""") + return + + +@app.cell +def _(Path, inventory, math, pl, re, results_directory): + def load_tool_csvs(base_directory: str): + """ + Load all TOOL_and_perf_*.csv and perf_and_TOOL_*.csv files recursively + into per-tool DataFrames with added columns 'node' and 'g5k_cluster'. + + Returns a dict: {tool_name: DataFrame} + """ + # Define the supported tools + tools = ["hwpc", "alumet", "codecarbon", "vjoule", "scaphandre"] + pattern = re.compile(rf"({'|'.join(tools)})_and_perf_\d+_\d+\.csv") + + # Prepare results dict + dfs = {tool: [] for tool in tools} + + base_path = Path(base_directory) + print(f"Loading CSVs from: {base_path.resolve()}") + + for file_path in base_path.rglob("*.csv"): + filename = file_path.name + + # Match the file pattern + if not pattern.match(filename): + continue + + # Extract node and cluster + node = file_path.parent.name + g5k_cluster = node.split("-")[0] if "-" in node else node + + # Determine tool name + tool = None + for t in tools: + if t in filename: + tool = t + break + if tool is None: + continue + print("Reading file:", file_path) + + # Choose schema depending on the tool + try: + df = pl.read_csv(file_path) + + if tool == "hwpc": + try: + df = ( + df.group_by(["iteration"]) + .agg([ + pl.sum("rapl_energy_cores").alias("energy_cores_raw"), + pl.sum("rapl_energy_pkg").alias("energy_pkg_raw"), + pl.sum("rapl_energy_dram").alias("energy_ram_raw"), + pl.first("nb_core"), + pl.first("nb_ops_per_core"), + ]) + .with_columns([ + pl.col("energy_cores_raw").map_elements(lambda x: math.ldexp(x, -32), return_dtype=pl.Float64).alias("energy_cores"), + pl.col("energy_pkg_raw").map_elements(lambda x: math.ldexp(x, -32),return_dtype=pl.Float64).alias("energy_pkg"), + pl.col("energy_ram_raw").map_elements(lambda x: math.ldexp(x, -32),return_dtype=pl.Float64).alias("energy_ram"), + ]) + .select([ + "energy_cores", "energy_pkg", "energy_ram", + "nb_core", "nb_ops_per_core", "iteration" + ]) + .with_columns([ + pl.lit(node).alias("node"), + pl.lit(g5k_cluster).alias("g5k_cluster") + ]) + ) + + dfs[tool].append(df) + continue + + except Exception as e: + print(f"❌ Error reading HWPC file {file_path}: {e}") + else: + expected_columns = [ + "energy_cores", "energy_pkg", "energy_ram", + "nb_core", "nb_ops_per_core", "iteration" + ] + + # Keep only expected columns (if file has extras) + df = df.select([col for col in expected_columns if col in df.columns]) + + # Add metadata columns + df = df.with_columns([ + pl.lit(node).alias("node"), + pl.lit(g5k_cluster).alias("g5k_cluster") + ]) + dfs[tool].append(df) + + except Exception as e: + print(f"❌ Error reading {file_path}: {e}") + + + merged_dfs = {} + for tool in tools: + + merged_df = pl.DataFrame(schema=[ + "energy_cores", "energy_pkg", "energy_ram", + "nb_core", "nb_ops_per_core", "iteration", "node", "g5k_cluster" + ]) + for df in dfs[tool]: + try: + merged_df = pl.concat([merged_df, df], how="vertical_relaxed") + except Exception as e: + print("Failed for:", df.describe(), "because:", e, "with tool", tool) + try: + merged_df = merged_df.join( + other=inventory, + left_on=["g5k_cluster"], + right_on=["cluster"], + how="left", + #validate="1:m" + ) + except Exception as e: + print(f"❌ Error merging {tool}: {e}") + merged_dfs[tool] = merged_df + return merged_dfs + + dfs = load_tool_csvs(results_directory) + return (dfs,) + + +@app.cell +def _(dfs, np, pl, plt, selected_clusters, sns): + def compute_cv_per_tool(tool_dfs, filler=np.nan): + """ + Compute coefficient of variation (std/mean) across iterations for each tool, node, and cluster. + If a field is missing or has only nulls, fill with a filler value. + """ + all_domains = ["energy_cores", "energy_pkg", "energy_ram"] + results = [] + + for tool_name, df in tool_dfs.items(): + df = df.lazy() + available_cols = set(df.columns) + + # Identify grouping columns dynamically (some datasets may not have g5k_cluster) + group_cols = ["node"] + if "g5k_cluster" in available_cols: + group_cols.append("g5k_cluster") + + for field in all_domains: + if field in available_cols: + # Check if at least one non-null value exists + has_data = df.select(pl.col(field).drop_nulls().count()).collect().item() > 0 + else: + has_data = False + + if has_data: + # Compute CV across iterations for each node (+ cluster if present) + cv_df = ( + df.group_by(group_cols) + .agg([ + (pl.std(field) / pl.mean(field)).alias("cv") + ]) + .with_columns([ + pl.lit(tool_name).alias("tool"), + pl.lit(field).alias("domain") + ]) + .collect() + ) + + else: + # Fill with the filler value for each node/cluster + nodes = df.select(group_cols).unique().collect() + cv_df = nodes.with_columns([ + pl.lit(filler).alias("cv"), + pl.lit(tool_name).alias("tool"), + pl.lit(field).alias("domain") + ]) + + results.append(cv_df) + + all_cv = pl.concat(results) + return all_cv.to_pandas() + + + + # --- Compute CVs --- + cv_df = compute_cv_per_tool(dfs) + + # Clean up domain names for display + cv_df["domain"] = cv_df["domain"].str.replace("energy_", "").str.capitalize() + + + + colors_domain= { + "Cores": "#4878CF", + "Ram": "#6ACC65", + "Pkg": "#D65F5F", + } + + def fancy_boxplot(data, color=None, **kwargs): + _ = sns.boxplot( + data=data.sort_values("tool"), + x="tool", + y="cv", + palette=colors_domain, + showmeans=False, + showfliers=False, + hue="domain", + ) + + # Assume cv_df also has a column 'g5k_cluster' + domains = cv_df["domain"].unique() + + # --- pick 6 clusters --- + cv_df_filtered = cv_df[cv_df["g5k_cluster"].isin(selected_clusters)] + + #for domain in domains: + # df_sub = cv_df_filtered[cv_df_filtered["domain"] == domain] + g_cv = sns.FacetGrid( + cv_df_filtered, + col="g5k_cluster", + col_wrap=5, + sharey=True, + sharex=True + ) + + g_cv.map_dataframe(fancy_boxplot) + for ax_cv in g_cv.axes.flat: + ax_cv.set_facecolor("#F8FAFC") + ax_cv.grid(True, color="#E2E8F0") + ax_cv.set_xlabel("Tools", fontsize=12) + ax_cv.set_ylabel("Coefficient of Variation", fontsize=12) + + # Title per domain + g_cv.fig.suptitle( + f"", + fontsize=12, + fontweight="bold", + color="#1E293B", + x=0.425, + y=1.1 + ) + + # Beautify + g_cv.set_axis_labels("Tools", "Coefficient of Variation", fontsize=12) + g_cv.set_titles(col_template="{col_name}", fontsize=12) + for axis_stability in g_cv.axes: + axis_stability.tick_params(axis="x", labelsize=12, labelrotation=45) + axis_stability.tick_params(axis="y", labelsize=12) + axis_stability.set_title(label=axis_stability.get_title(), fontsize=12) + if axis_stability.get_legend(): + axis_stability.get_legend().remove() + g_cv.add_legend(fontsize=12, ncols=3, bbox_to_anchor=(0.02, -0.3875, 0.5, 0.5)) + + sns.despine(left=True, bottom=True) + #plt.subplots_adjust(top=0.88, hspace=0.25) + + + # Save figure for that domain + g_cv.savefig(f"cv_per_tool_per_cluster.pdf") + g_cv.savefig(f"cv_per_tool_per_cluster.png", bbox_inches="tight", dpi=600) + plt.show() + return + + +@app.cell +def _(): + return + + +if __name__ == "__main__": + app.run() diff --git a/analysis/package_overhead.pdf b/analysis/package_overhead.pdf new file mode 100644 index 0000000..2f4b9bd Binary files /dev/null and b/analysis/package_overhead.pdf differ diff --git a/analysis/package_overhead.png b/analysis/package_overhead.png new file mode 100644 index 0000000..8e95b8b Binary files /dev/null and b/analysis/package_overhead.png differ diff --git a/analysis/reached_vs_target_frequency.pdf b/analysis/reached_vs_target_frequency.pdf new file mode 100644 index 0000000..35f55ab Binary files /dev/null and b/analysis/reached_vs_target_frequency.pdf differ diff --git a/analysis/reached_vs_target_frequency.png b/analysis/reached_vs_target_frequency.png new file mode 100644 index 0000000..686a915 Binary files /dev/null and b/analysis/reached_vs_target_frequency.png differ diff --git a/analysis/rq1.py b/analysis/rq1.py deleted file mode 100644 index 64a3810..0000000 --- a/analysis/rq1.py +++ /dev/null @@ -1,57 +0,0 @@ -import visualization -import matplotlib.pyplot as plt -import seaborn as sns -import pandas as pd -import polars as pl - -def correlation_perf_hwpc_cv(df, job, os): - - if job == "alone": - df_perf = df.sql(f"SELECT * FROM self WHERE job = 'perf_{job}'") - df_hwpc = df.sql(f"SELECT * FROM self WHERE job = 'hwpc_{job}'") - title = f"Scatterplot of PERF coefficient of variation related to HWPC, PKG domain, measurement tools isolated\n{os}" - else: - df_perf = df.sql(f"SELECT * FROM self WHERE job = 'perf_with_hwpc'") - df_hwpc = df.sql(f"SELECT * FROM self WHERE job = 'hwpc_with_perf'") - title = f"Scatterplot of PERF coefficient of variation related to HWPC, PKG domain, measurement tools running together\n{os}" - - joined = df_hwpc.join( - other=df_perf, on=["node", "nb_ops_per_core", "nb_core", "alone"], how="left", validate="1:1", suffix="_perf" - ) - - sns.set_theme(style="whitegrid") - f, ax = plt.subplots(figsize=(12,8)) - sns.despine(f, left=True, bottom=True) - plotted_df = joined.sql("SELECT * FROM self WHERE nb_ops_per_core = 25000 and processor_version != 'Gold 5320'").drop_nulls(subset=["pkg_coefficient_of_variation", "pkg_coefficient_of_variation_perf"]).drop_nans(subset=["pkg_coefficient_of_variation", "pkg_coefficient_of_variation_perf"]) - - max_perf = plotted_df["pkg_coefficient_of_variation_perf"].max() - max_hwpc = plotted_df["pkg_coefficient_of_variation"].max() - max_both = max(max_perf, max_hwpc) - - corr = plotted_df.select(pl.corr("pkg_coefficient_of_variation_perf", "pkg_coefficient_of_variation")).item() - correlations = ( - plotted_df.group_by("processor_detail") - .agg(pl.corr("pkg_coefficient_of_variation_perf", "pkg_coefficient_of_variation").alias("corr")) - ) - corr_dict = dict(zip(correlations["processor_detail"], correlations["corr"])) - scatter = sns.scatterplot(data=plotted_df, - x="pkg_coefficient_of_variation_perf", - y="pkg_coefficient_of_variation", - hue="processor_detail", - style="processor_vendor" - ) - sns.lineplot(x=[0, max_both], y=[0, max_both], color="red", linestyle="dashed", label="f(x) = x") - plt.title(title) - plt.xlabel("Coefficient of variation of PERF for PKG domain") - plt.ylabel("Coefficient of variation of HWPC for PKG domain") - plt.text(0.05, 0.95, f"Correlation: {corr:.2f}", transform=plt.gca().transAxes, - fontsize=12, verticalalignment='top', bbox=dict(boxstyle="round,pad=0.3", - edgecolor='black', - facecolor='white') - ) - handles, labels = scatter.get_legend_handles_labels() - new_labels = [f"{label} (corr: {corr_dict.get(label, 'N/A'):.2f})" for label in labels if label in corr_dict] - plt.legend(handles, new_labels, loc="lower right") - plt.tight_layout() - plt.show() - diff --git a/analysis/rq2.py b/analysis/rq2.py deleted file mode 100644 index c9923d6..0000000 --- a/analysis/rq2.py +++ /dev/null @@ -1,33 +0,0 @@ -import visualization -import matplotlib.pyplot as plt -import seaborn as sns -import pandas as pd -import polars as pl -import re - - -def boxplots_perf_hwpc_cv_processor(df, x, y, hue, prefix, save=True, show=True): - plt.figure(figsize=(12, 6)) - df = df.sql("SELECT * FROM self WHERE nb_ops_per_core = 25000") - sns.boxplot( - data=df, - x=x, - y=y, - hue=hue, - showfliers=False - ) - - title = f"{prefix} - PKG Coefficient of Variation by {hue} and {x}" - plt.title(title) - plt.xticks(rotation=90, ha="right") - plt.xlabel("Processor version and generation") - plt.ylabel("PKG Coefficient of Variation") - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters - safe_title = safe_title.replace(" ", "_") - safe_title = safe_title.replace("\n", "_") - plt.tight_layout() - if save: - plt.savefig(f'{safe_title}.png', dpi=500) - if show: - plt.show() - diff --git a/analysis/rq3.py b/analysis/rq3.py deleted file mode 100644 index e48826d..0000000 --- a/analysis/rq3.py +++ /dev/null @@ -1,68 +0,0 @@ -import visualization -import matplotlib.pyplot as plt -import seaborn as sns -import pandas as pd -import polars as pl - -def correlation_perf_perf_hwpc_hwpc_cv_os(df1, df2, job): - - if job == "alone": - df1_perf = df1.sql(f"SELECT * FROM self WHERE job = 'perf_{job}'") - df2_perf = df2.sql(f"SELECT * FROM self WHERE job = 'perf_{job}'") - df1_hwpc = df1.sql(f"SELECT * FROM self WHERE job = 'hwpc_{job}'") - df2_hwpc = df2.sql(f"SELECT * FROM self WHERE job = 'hwpc_{job}'") - title = f"Scatterplot of Ubuntu PERF coefficient of variation related to Debian, PKG domain, measurement tools isolated" - else: - df1_perf = df1.sql(f"SELECT * FROM self WHERE job = 'perf_with_hwpc'") - df2_perf = df2.sql(f"SELECT * FROM self WHERE job = 'perf_with_hwpc'") - df1_hwpc = df1.sql(f"SELECT * FROM self WHERE job = 'hwpc_with_perf'") - df2_hwpc = df2.sql(f"SELECT * FROM self WHERE job = 'hwpc_with_perf'") - title = f"Scatterplot of Ubuntu PERF coefficient of variation related to Debian, PKG domain, measurement tools running together" - - joined_perf = df1_perf.join( - other=df2_perf, on=["node", "nb_ops_per_core", "nb_core", "alone"], how="left", validate="1:1", suffix="_debian" - ) - joined_hwpc = df1_hwpc.join( - other=df2_hwpc, on=["node", "nb_ops_per_core", "nb_core", "alone"], how="left", validate="1:1", suffix="_debian" - ) - - sns.set_theme(style="whitegrid") - f, ax = plt.subplots(figsize=(12,8)) - sns.despine(f, left=True, bottom=True) - plotted_df_perf = joined_perf.sql("SELECT * FROM self WHERE nb_ops_per_core = 25000 and processor_version != 'Gold 5320'").drop_nulls(subset=["pkg_coefficient_of_variation_debian", "pkg_coefficient_of_variation"]).drop_nans(subset=["pkg_coefficient_of_variation_debian", "pkg_coefficient_of_variation"]) - plotted_df_hwpc = joined_hwpc.sql("SELECT * FROM self WHERE nb_ops_per_core = 25000 and processor_version != 'Gold 5320'").drop_nulls(subset=["pkg_coefficient_of_variation_debian", "pkg_coefficient_of_variation"]).drop_nans(subset=["pkg_coefficient_of_variation_debian", "pkg_coefficient_of_variation"]) - - max_perf_1 = plotted_df_perf["pkg_coefficient_of_variation"].max() - max_perf_2 = plotted_df_perf["pkg_coefficient_of_variation_debian"].max() - max_perf_both = max(max_perf_1, max_perf_2) - max_hwpc_1 = plotted_df_hwpc["pkg_coefficient_of_variation"].max() - max_hwpc_2 = plotted_df_hwpc["pkg_coefficient_of_variation_debian"].max() - max_hwpc_both = max(max_hwpc_1, max_hwpc_2) - - corr = plotted_df_perf.select(pl.corr("pkg_coefficient_of_variation", "pkg_coefficient_of_variation_debian")).item() - correlations = ( - plotted_df_perf.group_by("processor_detail") - .agg(pl.corr("pkg_coefficient_of_variation", "pkg_coefficient_of_variation_debian").alias("corr")) - ) - corr_dict = dict(zip(correlations["processor_detail"], correlations["corr"])) - scatter = sns.scatterplot(data=plotted_df_perf, - x="pkg_coefficient_of_variation", - y="pkg_coefficient_of_variation_debian", - hue="node", - style="processor_vendor" - ) - sns.lineplot(x=[0, max_perf_both], y=[0, max_perf_both], color="red", linestyle="dashed", label="f(x) = x") - plt.title(title) - plt.xlabel("Coefficient of variation of PERF for PKG domain - Ubuntu2404 - Kernel 6.8") - plt.ylabel("Coefficient of variation of HWPC for PKG domain - Debian11 - Kernel 5.10") - plt.text(0.05, 0.95, f"Correlation: {corr:.2f}", transform=plt.gca().transAxes, - fontsize=12, verticalalignment='top', bbox=dict(boxstyle="round,pad=0.3", - edgecolor='black', - facecolor='white') - ) - #handles, labels = scatter.get_legend_handles_labels() - #new_labels = [f"{label} (corr: {corr_dict.get(label, 'N/A'):.2f})" for label in labels if label in corr_dict] - #plt.legend(handles, new_labels, loc="lower right") - plt.tight_layout() - plt.show() - diff --git a/analysis/rq34.py b/analysis/rq34.py deleted file mode 100644 index faf0a24..0000000 --- a/analysis/rq34.py +++ /dev/null @@ -1,42 +0,0 @@ -import visualization - - -def os_comparison_boxplots_processor_versions_pkg_all(dfs, save=False, show=False): - visualization.plot_boxplots( - dfs, - "processor_detail", - "pkg_coefficient_of_variation", - "job", - "All Measurements", - ) - -def os_comparison_boxplots_processor_versions_ram_all(dfs, save=False, show=False): - visualization.plot_boxplots( - dfs, - "processor_detail", - "ram_coefficient_of_variation", - "job", - "All Measurements", - ) - - -def os_comparison_heatmap_processor_versions_pkg_nb_ops(joined_df, tool, save=False, show=False): - visualization.plot_os_degradation_nb_ops(joined_df, "pkg", tool) - -def os_comparison_heatmap_processor_versions_ram_nb_ops(joined_df, tool, save=False, show=False): - visualization.plot_os_degradation_nb_ops(joined_df, "ram", tool) - -def os_comparison_heatmap_processor_versions_pkg_percent_used(joined_df, save=False, show=False): - visualization.plot_os_degradation_percent_used(joined_df, "pkg") - -def os_comparison_heatmap_processor_versions_ram_percent_used(joined_df, save=False, show=False): - visualization.plot_os_degradation_percent_used(joined_df, "ram") - -def debian_facetgrid_processor_versions_pkg_cv_nb_ops(debian_df, save=True, show=True): - visualization.plot_facet_grid_nb_ops_per_core_versions_domain_cv(debian_df, "pkg", "debian11 5.10") -def debian_facetgrid_processor_versions_ram_cv_nb_ops(debian_df, save=True, show=True): - visualization.plot_facet_grid_nb_ops_per_core_versions_domain_cv(debian_df, "ram", "debian11 5.10") -def ubuntu_facetgrid_processor_versions_pkg_cv_nb_ops(ubuntu_df, save=True, show=True): - visualization.plot_facet_grid_nb_ops_per_core_versions_domain_cv(ubuntu_df, "pkg", "ubuntu2404 6.8") -def ubuntu_facetgrid_processor_versions_ram_cv_nb_ops(ubuntu_df, save=True, show=True): - visualization.plot_facet_grid_nb_ops_per_core_versions_domain_cv(ubuntu_df, "ram", "ubuntu2404 6.8") diff --git a/analysis/schemas.py b/analysis/schemas.py index 8250db4..00f0218 100644 --- a/analysis/schemas.py +++ b/analysis/schemas.py @@ -1,99 +1,194 @@ -# Schemas declaration -hwpc_columns = [ - ("timestamp", int), - ("sensor", str), - ("target", str), - ("socket", int), - ("cpu", int), - ("rapl_energy_pkg", int), - ("rapl_energy_dram", int), - ("rapl_energy_cores", int), - ("time_enabled", int), - ("time_running", int), - ("nb_core", int), - ("nb_ops_per_core", int), - ("iteration", int), - ("alone", bool), - ("site", str), - ("clstr", str), - ("node", str), -] +from typing import Dict -perf_columns = [ - ("power_energy_pkg", float), - ("power_energy_ram", float), - ("power_energy_cores", float), - ("time_elapsed", float), - ("nb_core", int), - ("nb_ops_per_core", int), - ("iteration", int), - ("alone", bool), - ("site", str), - ("clstr", str), - ("node", str), -] +hwpc_columns: Dict[str, type] = { + "timestamp": int, + "sensor": str, + "target": str, + "socket": int, + "cpu": int, + "rapl_energy_pkg": int, + "rapl_energy_dram": int, + "rapl_energy_cores": int, + "time_enabled": int, + "time_running": int, + "nb_core": int, + "nb_ops_per_core": int, + "iteration": int, + "task": str, + "site": str, + "g5k_cluster": str, + "node": str, +} -energy_columns = [ - ("node", str), - ("task", str), - ("nb_core", int), - ("nb_ops_per_core", int), - ("iteration", int), - ("alone", bool), - ("energy_pkg", float), - ("energy_cores", float), - ("energy_ram", float), -] +perf_columns: Dict[str, type] = { + "energy_pkg": float, + "energy_ram": float, + "energy_cores": float, + "time_elapsed": float, + "nb_core": int, + "nb_ops_per_core": int, + "iteration": int, + "task": str, + "site": str, + "g5k_cluster": str, + "node": str, +} -stats_columns = [ - ("node", str), - ("task", str), - ("nb_core", int), - ("nb_ops_per_core", int), - ("alone", bool), - ("pkg_minimum", float), - ("pkg_maximum", float), - ("pkg_average", float), - ("pkg_median", float), - ("pkg_standard_deviation", float), - ("pkg_quantile_25", float), - ("pkg_quantile_75", float), - ("pkg_coefficient_of_variation", float), - ("cores_minimum", float), - ("cores_maximum", float), - ("cores_average", float), - ("cores_median", float), - ("cores_standard_deviation", float), - ("cores_quantile_25", float), - ("cores_quantile_75", float), - ("cores_coefficient_of_variation", float), - ("ram_minimum", float), - ("ram_maximum", float), - ("ram_average", float), - ("ram_median", float), - ("ram_standard_deviation", float), - ("ram_quantile_25", float), - ("ram_quantile_75", float), - ("ram_coefficient_of_variation", float), -] +raw_perf_columns: Dict[str, type] = { + "energy_pkg": float, + "energy_ram": float, + "energy_cores": float, + "nb_core": int, + "nb_ops_per_core": int, + "iteration": int, + "task": str, + "site": str, + "g5k_cluster": str, + "node": str, +} -nodes_configuration_columns = [ - ("uid", str), - ("clstr", str), - ("exotic", bool), - ("architecture_nb_cores", int), - ("architecture_nb_threads", int), - ("processor_vendor", str), - ("processor_clock_speed", int), - ("processor_instruction_set", str), - ("processor_ht_capable", bool), - ("processor_microarchitecture", str), - ("processor_microcode", str), - ("processor_model", str), - ("processor_version", str), - ("os_cstate_driver", str), - ("os_cstate_governor", str), - ("os_pstate_driver", str), - ("os_pstate_governor", str), - ("os_turboboost_enabled", bool), -] +perf_columns: Dict[str, type] = { + "energy_pkg": float, + "energy_ram": float, + "energy_cores": float, + "nb_core": int, + "nb_ops_per_core": int, + "iteration": int, + "task": str, + "site": str, + "g5k_cluster": str, + "node": str, + "exotic": bool, + "architecture_nb_cores": int, + "architecture_nb_threads": int, + "processor_vendor": str, + "processor_clock_speed": int, + "processor_instruction_set": str, + "processor_ht_capable": bool, + "processor_microarchitecture": str, + "processor_microcode": str, + "processor_model": str, + "processor_version": str, + "os_cstate_driver": str, + "os_cstate_governor": str, + "os_pstate_driver": str, + "os_pstate_governor": str, + "os_turboboost_enabled": bool, + "processor_detail": str, + "processor_generation": str, +} + + +raw_energy_columns: Dict[str, type] = { + "energy_cores": float, + "energy_pkg": float, + "energy_ram": float, + "nb_core": int, + "nb_ops_per_core": int, + "iteration": int, + "task": str, + "site": str, + "g5k_cluster": str, + "node": str, +} + +energy_columns: Dict[str, type] = { + "energy_cores": float, + "energy_pkg": float, + "energy_ram": float, + "nb_core": int, + "nb_ops_per_core": int, + "iteration": int, + "task": str, + "site": str, + "g5k_cluster": str, + "node": str, + "exotic": bool, + "architecture_nb_cores": int, + "architecture_nb_threads": int, + "processor_vendor": str, + "processor_clock_speed": int, + "processor_instruction_set": str, + "processor_ht_capable": bool, + "processor_microarchitecture": str, + "processor_microcode": str, + "processor_model": str, + "processor_version": str, + "os_cstate_driver": str, + "os_cstate_governor": str, + "os_pstate_driver": str, + "os_pstate_governor": str, + "os_turboboost_enabled": bool, + "processor_detail": str, + "processor_generation": str, +} + +stats_columns: Dict[str, type] = { + "node": str, + "task": str, + "nb_core": int, + "nb_ops_per_core": int, + "pkg_minimum": float, + "pkg_maximum": float, + "pkg_average": float, + "pkg_median": float, + "pkg_standard_deviation": float, + "pkg_quantile_25": float, + "pkg_quantile_75": float, + "pkg_coefficient_of_variation": float, + "cores_minimum": float, + "cores_maximum": float, + "cores_average": float, + "cores_median": float, + "cores_standard_deviation": float, + "cores_quantile_25": float, + "cores_quantile_75": float, + "cores_coefficient_of_variation": float, + "ram_minimum": float, + "ram_maximum": float, + "ram_average": float, + "ram_median": float, + "ram_standard_deviation": float, + "ram_quantile_25": float, + "ram_quantile_75": float, + "ram_coefficient_of_variation": float, + "exotic": bool, + "architecture_nb_cores": int, + "architecture_nb_threads": int, + "processor_vendor": str, + "processor_clock_speed": int, + "processor_instruction_set": str, + "processor_ht_capable": bool, + "processor_microarchitecture": str, + "processor_microcode": str, + "processor_model": str, + "processor_version": str, + "os_cstate_driver": str, + "os_cstate_governor": str, + "os_pstate_driver": str, + "os_pstate_governor": str, + "os_turboboost_enabled": bool, + "processor_detail": str, + "processor_generation": str, +} + +nodes_configuration_columns: Dict[str, type] = { + "uid": str, + "g5k_cluster": str, + "exotic": bool, + "architecture_nb_cores": int, + "architecture_nb_threads": int, + "processor_vendor": str, + "processor_clock_speed": int, + "processor_instruction_set": str, + "processor_ht_capable": bool, + "processor_microarchitecture": str, + "processor_microcode": str, + "processor_model": str, + "processor_version": str, + "os_cstate_driver": str, + "os_cstate_governor": str, + "os_pstate_driver": str, + "os_pstate_governor": str, + "os_turboboost_enabled": bool, +} diff --git a/analysis/test.tex b/analysis/test.tex new file mode 100644 index 0000000..c79bbc6 --- /dev/null +++ b/analysis/test.tex @@ -0,0 +1,3515 @@ +% This file was created with matplot2tikz v0.5.1. +\begin{tikzpicture} + +\definecolor{black23}{RGB}{23,23,23} +\definecolor{darkslategray304159}{RGB}{30,41,59} +\definecolor{darkslategray363741}{RGB}{36,37,41} +\definecolor{darkslategray38}{RGB}{38,38,38} +\definecolor{darkslategray6277102}{RGB}{62,77,102} +\definecolor{ghostwhite248250252}{RGB}{248,250,252} +\definecolor{lavender226232240}{RGB}{226,232,240} +\definecolor{lightgray204}{RGB}{204,204,204} +\definecolor{mediumseagreen85168104}{RGB}{85,168,104} +\definecolor{steelblue88116163}{RGB}{88,116,163} + +\begin{groupplot}[group style={group size=5 by 2}] +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled x ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={taurus}, +x grid style={lavender226232240}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xticklabels={}, +y grid style={lavender226232240}, +ylabel=\textcolor{darkslategray38}{Coefficient of Variation}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\addlegendimage{ybar,ybar legend,draw=black23,fill=darkslategray363741,line width=0.32pt} +\addlegendentry{Cores} + +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\addlegendimage{ybar,ybar legend,draw=black23,fill=darkslategray6277102,line width=0.32pt} +\addlegendentry{Pkg} + +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\addlegendimage{ybar,ybar legend,draw=black23,fill=steelblue88116163,line width=0.32pt} +\addlegendentry{Ram} + +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0191149568005173) +--(axis cs:1.88333333333333,0.0191149568005173) +--(axis cs:1.88333333333333,0.0253854598236595) +--(axis cs:1.65,0.0253854598236595) +--(axis cs:1.65,0.0191149568005173) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0191149568005173 +1.76666666666667 0.0175134585355671 +}; +\addplot [black23] +table {% +1.76666666666667 0.0253854598236595 +1.76666666666667 0.0278635995934377 +}; +\addplot [black23] +table {% +1.70833333333333 0.0175134585355671 +1.825 0.0175134585355671 +}; +\addplot [black23] +table {% +1.70833333333333 0.0278635995934377 +1.825 0.0278635995934377 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0178447801775249) +--(axis cs:2.88333333333333,0.0178447801775249) +--(axis cs:2.88333333333333,0.0195698829385073) +--(axis cs:2.65,0.0195698829385073) +--(axis cs:2.65,0.0178447801775249) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0178447801775249 +2.76666666666667 0.0167526340227789 +}; +\addplot [black23] +table {% +2.76666666666667 0.0195698829385073 +2.76666666666667 0.0213899764435017 +}; +\addplot [black23] +table {% +2.70833333333333 0.0167526340227789 +2.825 0.0167526340227789 +}; +\addplot [black23] +table {% +2.70833333333333 0.0213899764435017 +2.825 0.0213899764435017 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.019578538603347) +--(axis cs:0.116666666666667,0.019578538603347) +--(axis cs:0.116666666666667,0.0243740848614134) +--(axis cs:-0.116666666666667,0.0243740848614134) +--(axis cs:-0.116666666666667,0.019578538603347) +--cycle; +\addplot [black23] +table {% +0 0.019578538603347 +0 0.0175566782114692 +}; +\addplot [black23] +table {% +0 0.0243740848614134 +0 0.0247803710168208 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0175566782114692 +0.0583333333333333 0.0175566782114692 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0247803710168208 +0.0583333333333333 0.0247803710168208 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00344775275892154) +--(axis cs:1.11666666666667,0.00344775275892154) +--(axis cs:1.11666666666667,0.00558159985060996) +--(axis cs:0.883333333333333,0.00558159985060996) +--(axis cs:0.883333333333333,0.00344775275892154) +--cycle; +\addplot [black23] +table {% +1 0.00344775275892154 +1 0.00256284163274126 +}; +\addplot [black23] +table {% +1 0.00558159985060996 +1 0.00635552198506928 +}; +\addplot [black23] +table {% +0.941666666666667 0.00256284163274126 +1.05833333333333 0.00256284163274126 +}; +\addplot [black23] +table {% +0.941666666666667 0.00635552198506928 +1.05833333333333 0.00635552198506928 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.0159419823446736) +--(axis cs:4.11666666666667,0.0159419823446736) +--(axis cs:4.11666666666667,0.0187794121684977) +--(axis cs:3.88333333333333,0.0187794121684977) +--(axis cs:3.88333333333333,0.0159419823446736) +--cycle; +\addplot [black23] +table {% +4 0.0159419823446736 +4 0.0151247714238632 +}; +\addplot [black23] +table {% +4 0.0187794121684977 +4 0.0196751492592873 +}; +\addplot [black23] +table {% +3.94166666666667 0.0151247714238632 +4.05833333333333 0.0151247714238632 +}; +\addplot [black23] +table {% +3.94166666666667 0.0196751492592873 +4.05833333333333 0.0196751492592873 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0228468280522918) +--(axis cs:0.35,0.0228468280522918) +--(axis cs:0.35,0.0277330660741713) +--(axis cs:0.116666666666667,0.0277330660741713) +--(axis cs:0.116666666666667,0.0228468280522918) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0228468280522918 +0.233333333333333 0.0220721440247892 +}; +\addplot [black23] +table {% +0.233333333333333 0.0277330660741713 +0.233333333333333 0.0315036291496915 +}; +\addplot [black23] +table {% +0.175 0.0220721440247892 +0.291666666666667 0.0220721440247892 +}; +\addplot [black23] +table {% +0.175 0.0315036291496915 +0.291666666666667 0.0315036291496915 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.00790324237697368) +--(axis cs:1.35,0.00790324237697368) +--(axis cs:1.35,0.00934587398753246) +--(axis cs:1.11666666666667,0.00934587398753246) +--(axis cs:1.11666666666667,0.00790324237697368) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.00790324237697368 +1.23333333333333 0.00665837154049097 +}; +\addplot [black23] +table {% +1.23333333333333 0.00934587398753246 +1.23333333333333 0.00964937967303406 +}; +\addplot [black23] +table {% +1.175 0.00665837154049097 +1.29166666666667 0.00665837154049097 +}; +\addplot [black23] +table {% +1.175 0.00964937967303406 +1.29166666666667 0.00964937967303406 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0168647153480515) +--(axis cs:2.35,0.0168647153480515) +--(axis cs:2.35,0.0425678951443716) +--(axis cs:2.11666666666667,0.0425678951443716) +--(axis cs:2.11666666666667,0.0168647153480515) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0168647153480515 +2.23333333333333 0.0138446395618001 +}; +\addplot [black23] +table {% +2.23333333333333 0.0425678951443716 +2.23333333333333 0.0478675938064243 +}; +\addplot [black23] +table {% +2.175 0.0138446395618001 +2.29166666666667 0.0138446395618001 +}; +\addplot [black23] +table {% +2.175 0.0478675938064243 +2.29166666666667 0.0478675938064243 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0221747565512334) +--(axis cs:3.35,0.0221747565512334) +--(axis cs:3.35,0.0241979237656704) +--(axis cs:3.11666666666667,0.0241979237656704) +--(axis cs:3.11666666666667,0.0221747565512334) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0221747565512334 +3.23333333333333 0.0204579336855595 +}; +\addplot [black23] +table {% +3.23333333333333 0.0241979237656704 +3.23333333333333 0.0270424448675478 +}; +\addplot [black23] +table {% +3.175 0.0204579336855595 +3.29166666666667 0.0204579336855595 +}; +\addplot [black23] +table {% +3.175 0.0270424448675478 +3.29166666666667 0.0270424448675478 +}; +\addplot [black23] +table {% +1.65 0.0210530547219188 +1.88333333333333 0.0210530547219188 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0221629891578398 +}; +\addplot [black23] +table {% +2.65 0.0184102493250079 +2.88333333333333 0.0184102493250079 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0187694718509246 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0214112293385829 +0.116666666666667 0.0214112293385829 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0215568246976167 +}; +\addplot [black23] +table {% +0.883333333333333 0.00431590333288869 +1.11666666666667 0.00431590333288869 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0044681077448682 +}; +\addplot [black23] +table {% +3.88333333333333 0.0165954074743568 +4.11666666666667 0.0165954074743568 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0177025116147783 +}; +\addplot [black23] +table {% +0.116666666666667 0.0246658806187896 +0.35 0.0246658806187896 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0256767986290395 +}; +\addplot [black23] +table {% +1.11666666666667 0.00871670894751099 +1.35 0.00871670894751099 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.00935289424315784 +}; +\addplot [black23] +table {% +2.11666666666667 0.0293260032997099 +2.35 0.0293260032997099 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.029980986261059 +}; +\addplot [black23] +table {% +3.11666666666667 0.022567554411926 +3.35 0.022567554411926 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0233286212708923 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled x ticks=manual:{}{\pgfmathparse{#1}}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={gros}, +x grid style={lavender226232240}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xticklabels={}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0142792029805023) +--(axis cs:1.88333333333333,0.0142792029805023) +--(axis cs:1.88333333333333,0.0181932836172026) +--(axis cs:1.65,0.0181932836172026) +--(axis cs:1.65,0.0142792029805023) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0142792029805023 +1.76666666666667 0.0110812666551712 +}; +\addplot [black23] +table {% +1.76666666666667 0.0181932836172026 +1.76666666666667 0.0209842816562031 +}; +\addplot [black23] +table {% +1.70833333333333 0.0110812666551712 +1.825 0.0110812666551712 +}; +\addplot [black23] +table {% +1.70833333333333 0.0209842816562031 +1.825 0.0209842816562031 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0115154018630938) +--(axis cs:2.88333333333333,0.0115154018630938) +--(axis cs:2.88333333333333,0.013917502861765) +--(axis cs:2.65,0.013917502861765) +--(axis cs:2.65,0.0115154018630938) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0115154018630938 +2.76666666666667 0.00889977588576865 +}; +\addplot [black23] +table {% +2.76666666666667 0.013917502861765 +2.76666666666667 0.0153452394972235 +}; +\addplot [black23] +table {% +2.70833333333333 0.00889977588576865 +2.825 0.00889977588576865 +}; +\addplot [black23] +table {% +2.70833333333333 0.0153452394972235 +2.825 0.0153452394972235 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0162405125816353) +--(axis cs:0.116666666666667,0.0162405125816353) +--(axis cs:0.116666666666667,0.0188495230808394) +--(axis cs:-0.116666666666667,0.0188495230808394) +--(axis cs:-0.116666666666667,0.0162405125816353) +--cycle; +\addplot [black23] +table {% +0 0.0162405125816353 +0 0.0132224868036099 +}; +\addplot [black23] +table {% +0 0.0188495230808394 +0 0.022401038059334 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0132224868036099 +0.0583333333333333 0.0132224868036099 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.022401038059334 +0.0583333333333333 0.022401038059334 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.0147861394447778) +--(axis cs:1.11666666666667,0.0147861394447778) +--(axis cs:1.11666666666667,0.0173160945347514) +--(axis cs:0.883333333333333,0.0173160945347514) +--(axis cs:0.883333333333333,0.0147861394447778) +--cycle; +\addplot [black23] +table {% +1 0.0147861394447778 +1 0.0113216686275405 +}; +\addplot [black23] +table {% +1 0.0173160945347514 +1 0.0206906099263917 +}; +\addplot [black23] +table {% +0.941666666666667 0.0113216686275405 +1.05833333333333 0.0113216686275405 +}; +\addplot [black23] +table {% +0.941666666666667 0.0206906099263917 +1.05833333333333 0.0206906099263917 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.011446808254464) +--(axis cs:4.11666666666667,0.011446808254464) +--(axis cs:4.11666666666667,0.0167716076365295) +--(axis cs:3.88333333333333,0.0167716076365295) +--(axis cs:3.88333333333333,0.011446808254464) +--cycle; +\addplot [black23] +table {% +4 0.011446808254464 +4 0.00798981088920622 +}; +\addplot [black23] +table {% +4 0.0167716076365295 +4 0.0188461970280513 +}; +\addplot [black23] +table {% +3.94166666666667 0.00798981088920622 +4.05833333333333 0.00798981088920622 +}; +\addplot [black23] +table {% +3.94166666666667 0.0188461970280513 +4.05833333333333 0.0188461970280513 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0141487769909503) +--(axis cs:0.35,0.0141487769909503) +--(axis cs:0.35,0.0167945601761408) +--(axis cs:0.116666666666667,0.0167945601761408) +--(axis cs:0.116666666666667,0.0141487769909503) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0141487769909503 +0.233333333333333 0.0122156409181241 +}; +\addplot [black23] +table {% +0.233333333333333 0.0167945601761408 +0.233333333333333 0.0175593771644829 +}; +\addplot [black23] +table {% +0.175 0.0122156409181241 +0.291666666666667 0.0122156409181241 +}; +\addplot [black23] +table {% +0.175 0.0175593771644829 +0.291666666666667 0.0175593771644829 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0109852291601513) +--(axis cs:1.35,0.0109852291601513) +--(axis cs:1.35,0.0149897678104147) +--(axis cs:1.11666666666667,0.0149897678104147) +--(axis cs:1.11666666666667,0.0109852291601513) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0109852291601513 +1.23333333333333 0.00747652323373436 +}; +\addplot [black23] +table {% +1.23333333333333 0.0149897678104147 +1.23333333333333 0.0173678394132729 +}; +\addplot [black23] +table {% +1.175 0.00747652323373436 +1.29166666666667 0.00747652323373436 +}; +\addplot [black23] +table {% +1.175 0.0173678394132729 +1.29166666666667 0.0173678394132729 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0101420611777681) +--(axis cs:2.35,0.0101420611777681) +--(axis cs:2.35,0.0376927019412615) +--(axis cs:2.11666666666667,0.0376927019412615) +--(axis cs:2.11666666666667,0.0101420611777681) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0101420611777681 +2.23333333333333 0.00778290563140228 +}; +\addplot [black23] +table {% +2.23333333333333 0.0376927019412615 +2.23333333333333 0.048391488119945 +}; +\addplot [black23] +table {% +2.175 0.00778290563140228 +2.29166666666667 0.00778290563140228 +}; +\addplot [black23] +table {% +2.175 0.048391488119945 +2.29166666666667 0.048391488119945 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0126947930222108) +--(axis cs:3.35,0.0126947930222108) +--(axis cs:3.35,0.0163434345393302) +--(axis cs:3.11666666666667,0.0163434345393302) +--(axis cs:3.11666666666667,0.0126947930222108) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0126947930222108 +3.23333333333333 0.0105234994755611 +}; +\addplot [black23] +table {% +3.23333333333333 0.0163434345393302 +3.23333333333333 0.0190146481575539 +}; +\addplot [black23] +table {% +3.175 0.0105234994755611 +3.29166666666667 0.0105234994755611 +}; +\addplot [black23] +table {% +3.175 0.0190146481575539 +3.29166666666667 0.0190146481575539 +}; +\addplot [black23] +table {% +1.65 0.0168344128025855 +1.88333333333333 0.0168344128025855 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0166181091112185 +}; +\addplot [black23] +table {% +2.65 0.0125447932707425 +2.88333333333333 0.0125447932707425 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0122067801021301 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0173555531301554 +0.116666666666667 0.0173555531301554 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0175253044553877 +}; +\addplot [black23] +table {% +0.883333333333333 0.0164503833275148 +1.11666666666667 0.0164503833275148 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0162304891295494 +}; +\addplot [black23] +table {% +3.88333333333333 0.0131740896617641 +4.11666666666667 0.0131740896617641 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0151293501545386 +}; +\addplot [black23] +table {% +0.116666666666667 0.0153897574514083 +0.35 0.0153897574514083 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0176337485793362 +}; +\addplot [black23] +table {% +1.11666666666667 0.012612994701587 +1.35 0.012612994701587 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0128844638972255 +}; +\addplot [black23] +table {% +2.11666666666667 0.0107623803547481 +2.35 0.0107623803547481 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0202085413120032 +}; +\addplot [black23] +table {% +3.11666666666667 0.0147539248684088 +3.35 0.0147539248684088 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0148544653157449 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled x ticks=manual:{}{\pgfmathparse{#1}}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={montcalm}, +x grid style={lavender226232240}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xticklabels={}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.00901379206510495) +--(axis cs:1.88333333333333,0.00901379206510495) +--(axis cs:1.88333333333333,0.024717628090962) +--(axis cs:1.65,0.024717628090962) +--(axis cs:1.65,0.00901379206510495) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.00901379206510495 +1.76666666666667 0.00900111143742518 +}; +\addplot [black23] +table {% +1.76666666666667 0.024717628090962 +1.76666666666667 0.0267839720219581 +}; +\addplot [black23] +table {% +1.70833333333333 0.00900111143742518 +1.825 0.00900111143742518 +}; +\addplot [black23] +table {% +1.70833333333333 0.0267839720219581 +1.825 0.0267839720219581 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.00900954822673582) +--(axis cs:2.88333333333333,0.00900954822673582) +--(axis cs:2.88333333333333,0.010398737512634) +--(axis cs:2.65,0.010398737512634) +--(axis cs:2.65,0.00900954822673582) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.00900954822673582 +2.76666666666667 0.00898504069894976 +}; +\addplot [black23] +table {% +2.76666666666667 0.010398737512634 +2.76666666666667 0.0105508239703011 +}; +\addplot [black23] +table {% +2.70833333333333 0.00898504069894976 +2.825 0.00898504069894976 +}; +\addplot [black23] +table {% +2.70833333333333 0.0105508239703011 +2.825 0.0105508239703011 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.00870723558944597) +--(axis cs:0.116666666666667,0.00870723558944597) +--(axis cs:0.116666666666667,0.0115254635953696) +--(axis cs:-0.116666666666667,0.0115254635953696) +--(axis cs:-0.116666666666667,0.00870723558944597) +--cycle; +\addplot [black23] +table {% +0 0.00870723558944597 +0 0.00724859262942791 +}; +\addplot [black23] +table {% +0 0.0115254635953696 +0 0.0116902550109752 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.00724859262942791 +0.0583333333333333 0.00724859262942791 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0116902550109752 +0.0583333333333333 0.0116902550109752 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00973277971151883) +--(axis cs:1.11666666666667,0.00973277971151883) +--(axis cs:1.11666666666667,0.0109708583744916) +--(axis cs:0.883333333333333,0.0109708583744916) +--(axis cs:0.883333333333333,0.00973277971151883) +--cycle; +\addplot [black23] +table {% +1 0.00973277971151883 +1 0.00931657067290799 +}; +\addplot [black23] +table {% +1 0.0109708583744916 +1 0.0109708583744916 +}; +\addplot [black23] +table {% +0.941666666666667 0.00931657067290799 +1.05833333333333 0.00931657067290799 +}; +\addplot [black23] +table {% +0.941666666666667 0.0109708583744916 +1.05833333333333 0.0109708583744916 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.00803624668111935) +--(axis cs:4.11666666666667,0.00803624668111935) +--(axis cs:4.11666666666667,0.0112820188696684) +--(axis cs:3.88333333333333,0.0112820188696684) +--(axis cs:3.88333333333333,0.00803624668111935) +--cycle; +\addplot [black23] +table {% +4 0.00803624668111935 +4 0.00694835626731143 +}; +\addplot [black23] +table {% +4 0.0112820188696684 +4 0.0112820188696684 +}; +\addplot [black23] +table {% +3.94166666666667 0.00694835626731143 +4.05833333333333 0.00694835626731143 +}; +\addplot [black23] +table {% +3.94166666666667 0.0112820188696684 +4.05833333333333 0.0112820188696684 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0214450664307326) +--(axis cs:0.35,0.0214450664307326) +--(axis cs:0.35,0.0249033901019307) +--(axis cs:0.116666666666667,0.0249033901019307) +--(axis cs:0.116666666666667,0.0214450664307326) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0214450664307326 +0.233333333333333 0.0179944639308656 +}; +\addplot [black23] +table {% +0.233333333333333 0.0249033901019307 +0.233333333333333 0.027779329931336 +}; +\addplot [black23] +table {% +0.175 0.0179944639308656 +0.291666666666667 0.0179944639308656 +}; +\addplot [black23] +table {% +0.175 0.027779329931336 +0.291666666666667 0.027779329931336 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0112721575373346) +--(axis cs:1.35,0.0112721575373346) +--(axis cs:1.35,0.0169849668871002) +--(axis cs:1.11666666666667,0.0169849668871002) +--(axis cs:1.11666666666667,0.0112721575373346) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0112721575373346 +1.23333333333333 0.00857646600287363 +}; +\addplot [black23] +table {% +1.23333333333333 0.0169849668871002 +1.23333333333333 0.0194261660166808 +}; +\addplot [black23] +table {% +1.175 0.00857646600287363 +1.29166666666667 0.00857646600287363 +}; +\addplot [black23] +table {% +1.175 0.0194261660166808 +1.29166666666667 0.0194261660166808 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0110708923952846) +--(axis cs:2.35,0.0110708923952846) +--(axis cs:2.35,0.0448089060238987) +--(axis cs:2.11666666666667,0.0448089060238987) +--(axis cs:2.11666666666667,0.0110708923952846) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0110708923952846 +2.23333333333333 0.00858780416195088 +}; +\addplot [black23] +table {% +2.23333333333333 0.0448089060238987 +2.23333333333333 0.0509046003245264 +}; +\addplot [black23] +table {% +2.175 0.00858780416195088 +2.29166666666667 0.00858780416195088 +}; +\addplot [black23] +table {% +2.175 0.0509046003245264 +2.29166666666667 0.0509046003245264 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0238715650172409) +--(axis cs:3.35,0.0238715650172409) +--(axis cs:3.35,0.0266233073368134) +--(axis cs:3.11666666666667,0.0266233073368134) +--(axis cs:3.11666666666667,0.0238715650172409) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0238715650172409 +3.23333333333333 0.0205051288111995 +}; +\addplot [black23] +table {% +3.23333333333333 0.0266233073368134 +3.23333333333333 0.0293529151490338 +}; +\addplot [black23] +table {% +3.175 0.0205051288111995 +3.29166666666667 0.0205051288111995 +}; +\addplot [black23] +table {% +3.175 0.0293529151490338 +3.29166666666667 0.0293529151490338 +}; +\addplot [black23] +table {% +1.65 0.0110586162620182 +1.88333333333333 0.0110586162620182 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0161150239754937 +}; +\addplot [black23] +table {% +2.65 0.0102679932065569 +2.88333333333333 0.0102679932065569 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.00984242872303552 +}; +\addplot [black23] +table {% +-0.116666666666667 0.00902888707958937 +0.116666666666667 0.00902888707958937 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.00964008678096161 +}; +\addplot [black23] +table {% +0.883333333333333 0.0103587342578887 +1.11666666666667 0.0103587342578887 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0109031134124165 +}; +\addplot [black23] +table {% +3.88333333333333 0.0108057461757628 +4.11666666666667 0.0108057461757628 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0106621546079734 +}; +\addplot [black23] +table {% +0.116666666666667 0.0231515723110533 +0.35 0.0231515723110533 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0230547645411836 +}; +\addplot [black23] +table {% +1.11666666666667 0.0117966081881906 +1.35 0.0117966081881906 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.013611272926436 +}; +\addplot [black23] +table {% +2.11666666666667 0.0135348005474028 +2.35 0.0135348005474028 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0257814006906127 +}; +\addplot [black23] +table {% +3.11666666666667 0.0240984042149521 +3.35 0.0240984042149521 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.024890264105848 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled x ticks=manual:{}{\pgfmathparse{#1}}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={paradoxe}, +x grid style={lavender226232240}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xticklabels={}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0134835304751599) +--(axis cs:1.88333333333333,0.0134835304751599) +--(axis cs:1.88333333333333,0.028041879948203) +--(axis cs:1.65,0.028041879948203) +--(axis cs:1.65,0.0134835304751599) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0134835304751599 +1.76666666666667 0.0110565441523622 +}; +\addplot [black23] +table {% +1.76666666666667 0.028041879948203 +1.76666666666667 0.0314055510207457 +}; +\addplot [black23] +table {% +1.70833333333333 0.0110565441523622 +1.825 0.0110565441523622 +}; +\addplot [black23] +table {% +1.70833333333333 0.0314055510207457 +1.825 0.0314055510207457 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0130169425249332) +--(axis cs:2.88333333333333,0.0130169425249332) +--(axis cs:2.88333333333333,0.0155140036206606) +--(axis cs:2.65,0.0155140036206606) +--(axis cs:2.65,0.0130169425249332) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0130169425249332 +2.76666666666667 0.0106416032672389 +}; +\addplot [black23] +table {% +2.76666666666667 0.0155140036206606 +2.76666666666667 0.0168778204254868 +}; +\addplot [black23] +table {% +2.70833333333333 0.0106416032672389 +2.825 0.0106416032672389 +}; +\addplot [black23] +table {% +2.70833333333333 0.0168778204254868 +2.825 0.0168778204254868 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0144059985809204) +--(axis cs:0.116666666666667,0.0144059985809204) +--(axis cs:0.116666666666667,0.0171045383587419) +--(axis cs:-0.116666666666667,0.0171045383587419) +--(axis cs:-0.116666666666667,0.0144059985809204) +--cycle; +\addplot [black23] +table {% +0 0.0144059985809204 +0 0.012843926619657 +}; +\addplot [black23] +table {% +0 0.0171045383587419 +0 0.0185894985484146 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.012843926619657 +0.0583333333333333 0.012843926619657 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0185894985484146 +0.0583333333333333 0.0185894985484146 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.0111672049329186) +--(axis cs:1.11666666666667,0.0111672049329186) +--(axis cs:1.11666666666667,0.0138633921834052) +--(axis cs:0.883333333333333,0.0138633921834052) +--(axis cs:0.883333333333333,0.0111672049329186) +--cycle; +\addplot [black23] +table {% +1 0.0111672049329186 +1 0.0102196003690378 +}; +\addplot [black23] +table {% +1 0.0138633921834052 +1 0.0150442709961618 +}; +\addplot [black23] +table {% +0.941666666666667 0.0102196003690378 +1.05833333333333 0.0102196003690378 +}; +\addplot [black23] +table {% +0.941666666666667 0.0150442709961618 +1.05833333333333 0.0150442709961618 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.0132133963381911) +--(axis cs:4.11666666666667,0.0132133963381911) +--(axis cs:4.11666666666667,0.0216644893893609) +--(axis cs:3.88333333333333,0.0216644893893609) +--(axis cs:3.88333333333333,0.0132133963381911) +--cycle; +\addplot [black23] +table {% +4 0.0132133963381911 +4 0.009028599091015 +}; +\addplot [black23] +table {% +4 0.0216644893893609 +4 0.0305210095373475 +}; +\addplot [black23] +table {% +3.94166666666667 0.009028599091015 +4.05833333333333 0.009028599091015 +}; +\addplot [black23] +table {% +3.94166666666667 0.0305210095373475 +4.05833333333333 0.0305210095373475 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0322412415926674) +--(axis cs:0.35,0.0322412415926674) +--(axis cs:0.35,0.0359720419914815) +--(axis cs:0.116666666666667,0.0359720419914815) +--(axis cs:0.116666666666667,0.0322412415926674) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0322412415926674 +0.233333333333333 0.0295768798704092 +}; +\addplot [black23] +table {% +0.233333333333333 0.0359720419914815 +0.233333333333333 0.0379395267809149 +}; +\addplot [black23] +table {% +0.175 0.0295768798704092 +0.291666666666667 0.0295768798704092 +}; +\addplot [black23] +table {% +0.175 0.0379395267809149 +0.291666666666667 0.0379395267809149 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0116542711622216) +--(axis cs:1.35,0.0116542711622216) +--(axis cs:1.35,0.0144233400833803) +--(axis cs:1.11666666666667,0.0144233400833803) +--(axis cs:1.11666666666667,0.0116542711622216) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0116542711622216 +1.23333333333333 0.0101495520689328 +}; +\addplot [black23] +table {% +1.23333333333333 0.0144233400833803 +1.23333333333333 0.0147039129998579 +}; +\addplot [black23] +table {% +1.175 0.0101495520689328 +1.29166666666667 0.0101495520689328 +}; +\addplot [black23] +table {% +1.175 0.0147039129998579 +1.29166666666667 0.0147039129998579 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0194856204525238) +--(axis cs:2.35,0.0194856204525238) +--(axis cs:2.35,0.0566505035835295) +--(axis cs:2.11666666666667,0.0566505035835295) +--(axis cs:2.11666666666667,0.0194856204525238) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0194856204525238 +2.23333333333333 0.0156199625000498 +}; +\addplot [black23] +table {% +2.23333333333333 0.0566505035835295 +2.23333333333333 0.0670555897990513 +}; +\addplot [black23] +table {% +2.175 0.0156199625000498 +2.29166666666667 0.0156199625000498 +}; +\addplot [black23] +table {% +2.175 0.0670555897990513 +2.29166666666667 0.0670555897990513 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0310990786511527) +--(axis cs:3.35,0.0310990786511527) +--(axis cs:3.35,0.0337812621500459) +--(axis cs:3.11666666666667,0.0337812621500459) +--(axis cs:3.11666666666667,0.0310990786511527) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0310990786511527 +3.23333333333333 0.0296221206577662 +}; +\addplot [black23] +table {% +3.23333333333333 0.0337812621500459 +3.23333333333333 0.0368549146244335 +}; +\addplot [black23] +table {% +3.175 0.0296221206577662 +3.29166666666667 0.0296221206577662 +}; +\addplot [black23] +table {% +3.175 0.0368549146244335 +3.29166666666667 0.0368549146244335 +}; +\addplot [black23] +table {% +1.65 0.0185285021273231 +1.88333333333333 0.0185285021273231 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0203539485626019 +}; +\addplot [black23] +table {% +2.65 0.0149493870277573 +2.88333333333333 0.0149493870277573 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0143535540717233 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0162451934408565 +0.116666666666667 0.0162451934408565 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0157893791594659 +}; +\addplot [black23] +table {% +0.883333333333333 0.0118632770311498 +1.11666666666667 0.0118632770311498 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.012411596093629 +}; +\addplot [black23] +table {% +3.88333333333333 0.0166468525903132 +4.11666666666667 0.0166468525903132 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.017561284321997 +}; +\addplot [black23] +table {% +0.116666666666667 0.0342811071789432 +0.35 0.0342811071789432 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0339221327322541 +}; +\addplot [black23] +table {% +1.11666666666667 0.0127389438520466 +1.35 0.0127389438520466 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0128710542424268 +}; +\addplot [black23] +table {% +2.11666666666667 0.0344134659235441 +2.35 0.0344134659235441 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0384453321088963 +}; +\addplot [black23] +table {% +3.11666666666667 0.0322595898127434 +3.35 0.0322595898127434 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0321158177474482 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled x ticks=manual:{}{\pgfmathparse{#1}}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={econome}, +x grid style={lavender226232240}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xticklabels={}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0169680628278846) +--(axis cs:1.88333333333333,0.0169680628278846) +--(axis cs:1.88333333333333,0.023174073675758) +--(axis cs:1.65,0.023174073675758) +--(axis cs:1.65,0.0169680628278846) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0169680628278846 +1.76666666666667 0.0158999678557528 +}; +\addplot [black23] +table {% +1.76666666666667 0.023174073675758 +1.76666666666667 0.0254410261165225 +}; +\addplot [black23] +table {% +1.70833333333333 0.0158999678557528 +1.825 0.0158999678557528 +}; +\addplot [black23] +table {% +1.70833333333333 0.0254410261165225 +1.825 0.0254410261165225 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0163589049099309) +--(axis cs:2.88333333333333,0.0163589049099309) +--(axis cs:2.88333333333333,0.0177496789815431) +--(axis cs:2.65,0.0177496789815431) +--(axis cs:2.65,0.0163589049099309) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0163589049099309 +2.76666666666667 0.0153182068599308 +}; +\addplot [black23] +table {% +2.76666666666667 0.0177496789815431 +2.76666666666667 0.0182845662854162 +}; +\addplot [black23] +table {% +2.70833333333333 0.0153182068599308 +2.825 0.0153182068599308 +}; +\addplot [black23] +table {% +2.70833333333333 0.0182845662854162 +2.825 0.0182845662854162 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0181917775152871) +--(axis cs:0.116666666666667,0.0181917775152871) +--(axis cs:0.116666666666667,0.0195870094570181) +--(axis cs:-0.116666666666667,0.0195870094570181) +--(axis cs:-0.116666666666667,0.0181917775152871) +--cycle; +\addplot [black23] +table {% +0 0.0181917775152871 +0 0.0167129414420801 +}; +\addplot [black23] +table {% +0 0.0195870094570181 +0 0.0204869168511667 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0167129414420801 +0.0583333333333333 0.0167129414420801 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0204869168511667 +0.0583333333333333 0.0204869168511667 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00360956186046717) +--(axis cs:1.11666666666667,0.00360956186046717) +--(axis cs:1.11666666666667,0.00518871953806546) +--(axis cs:0.883333333333333,0.00518871953806546) +--(axis cs:0.883333333333333,0.00360956186046717) +--cycle; +\addplot [black23] +table {% +1 0.00360956186046717 +1 0.0022797535035654 +}; +\addplot [black23] +table {% +1 0.00518871953806546 +1 0.00560872140624393 +}; +\addplot [black23] +table {% +0.941666666666667 0.0022797535035654 +1.05833333333333 0.0022797535035654 +}; +\addplot [black23] +table {% +0.941666666666667 0.00560872140624393 +1.05833333333333 0.00560872140624393 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.0171748398219825) +--(axis cs:4.11666666666667,0.0171748398219825) +--(axis cs:4.11666666666667,0.0212826717955914) +--(axis cs:3.88333333333333,0.0212826717955914) +--(axis cs:3.88333333333333,0.0171748398219825) +--cycle; +\addplot [black23] +table {% +4 0.0171748398219825 +4 0.014812448184255 +}; +\addplot [black23] +table {% +4 0.0212826717955914 +4 0.0254552625871648 +}; +\addplot [black23] +table {% +3.94166666666667 0.014812448184255 +4.05833333333333 0.014812448184255 +}; +\addplot [black23] +table {% +3.94166666666667 0.0254552625871648 +4.05833333333333 0.0254552625871648 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0232927832067277) +--(axis cs:0.35,0.0232927832067277) +--(axis cs:0.35,0.0280944475400897) +--(axis cs:0.116666666666667,0.0280944475400897) +--(axis cs:0.116666666666667,0.0232927832067277) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0232927832067277 +0.233333333333333 0.0203087114858235 +}; +\addplot [black23] +table {% +0.233333333333333 0.0280944475400897 +0.233333333333333 0.0290553210480397 +}; +\addplot [black23] +table {% +0.175 0.0203087114858235 +0.291666666666667 0.0203087114858235 +}; +\addplot [black23] +table {% +0.175 0.0290553210480397 +0.291666666666667 0.0290553210480397 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.00768897851239178) +--(axis cs:1.35,0.00768897851239178) +--(axis cs:1.35,0.0130886907649243) +--(axis cs:1.11666666666667,0.0130886907649243) +--(axis cs:1.11666666666667,0.00768897851239178) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.00768897851239178 +1.23333333333333 0.00379244182931826 +}; +\addplot [black23] +table {% +1.23333333333333 0.0130886907649243 +1.23333333333333 0.014144643200434 +}; +\addplot [black23] +table {% +1.175 0.00379244182931826 +1.29166666666667 0.00379244182931826 +}; +\addplot [black23] +table {% +1.175 0.014144643200434 +1.29166666666667 0.014144643200434 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0162302548033556) +--(axis cs:2.35,0.0162302548033556) +--(axis cs:2.35,0.0397113076192291) +--(axis cs:2.11666666666667,0.0397113076192291) +--(axis cs:2.11666666666667,0.0162302548033556) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0162302548033556 +2.23333333333333 0.0145042070356116 +}; +\addplot [black23] +table {% +2.23333333333333 0.0397113076192291 +2.23333333333333 0.0541589524616343 +}; +\addplot [black23] +table {% +2.175 0.0145042070356116 +2.29166666666667 0.0145042070356116 +}; +\addplot [black23] +table {% +2.175 0.0541589524616343 +2.29166666666667 0.0541589524616343 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0225200710588215) +--(axis cs:3.35,0.0225200710588215) +--(axis cs:3.35,0.0245965559404127) +--(axis cs:3.11666666666667,0.0245965559404127) +--(axis cs:3.11666666666667,0.0225200710588215) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0225200710588215 +3.23333333333333 0.0215745833810787 +}; +\addplot [black23] +table {% +3.23333333333333 0.0245965559404127 +3.23333333333333 0.0271769741385914 +}; +\addplot [black23] +table {% +3.175 0.0215745833810787 +3.29166666666667 0.0215745833810787 +}; +\addplot [black23] +table {% +3.175 0.0271769741385914 +3.29166666666667 0.0271769741385914 +}; +\addplot [black23] +table {% +1.65 0.0200796416607777 +1.88333333333333 0.0200796416607777 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0247123101757912 +}; +\addplot [black23] +table {% +2.65 0.0172420644516291 +2.88333333333333 0.0172420644516291 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0217838021719871 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0189494900195323 +0.116666666666667 0.0189494900195323 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0234709985005593 +}; +\addplot [black23] +table {% +0.883333333333333 0.00478757525493319 +1.11666666666667 0.00478757525493319 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0117280318852128 +}; +\addplot [black23] +table {% +3.88333333333333 0.0187631089757458 +4.11666666666667 0.0187631089757458 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0552582256495664 +}; +\addplot [black23] +table {% +0.116666666666667 0.0259339245872954 +0.35 0.0259339245872954 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0307499670862207 +}; +\addplot [black23] +table {% +1.11666666666667 0.00869357244440578 +1.35 0.00869357244440578 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0165099439250906 +}; +\addplot [black23] +table {% +2.11666666666667 0.034579228490776 +2.35 0.034579228490776 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0333280732961476 +}; +\addplot [black23] +table {% +3.11666666666667 0.0240321420586961 +3.35 0.0240321420586961 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0290871318218669 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +tick align=outside, +title={nova}, +x grid style={lavender226232240}, +xlabel=\textcolor{darkslategray38}{Tools}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xtick={0,1,2,3,4}, +xticklabels={hwpc,alumet,codecarbon,vjoule,scaphandre}, +y grid style={lavender226232240}, +ylabel=\textcolor{darkslategray38}{Coefficient of Variation}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0167037550764232) +--(axis cs:1.88333333333333,0.0167037550764232) +--(axis cs:1.88333333333333,0.0203865885668101) +--(axis cs:1.65,0.0203865885668101) +--(axis cs:1.65,0.0167037550764232) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0167037550764232 +1.76666666666667 0.0153357747166281 +}; +\addplot [black23] +table {% +1.76666666666667 0.0203865885668101 +1.76666666666667 0.0245374425796164 +}; +\addplot [black23] +table {% +1.70833333333333 0.0153357747166281 +1.825 0.0153357747166281 +}; +\addplot [black23] +table {% +1.70833333333333 0.0245374425796164 +1.825 0.0245374425796164 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0154862095682103) +--(axis cs:2.88333333333333,0.0154862095682103) +--(axis cs:2.88333333333333,0.0173531861206472) +--(axis cs:2.65,0.0173531861206472) +--(axis cs:2.65,0.0154862095682103) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0154862095682103 +2.76666666666667 0.0145234560634692 +}; +\addplot [black23] +table {% +2.76666666666667 0.0173531861206472 +2.76666666666667 0.0191558166890995 +}; +\addplot [black23] +table {% +2.70833333333333 0.0145234560634692 +2.825 0.0145234560634692 +}; +\addplot [black23] +table {% +2.70833333333333 0.0191558166890995 +2.825 0.0191558166890995 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0180381604294671) +--(axis cs:0.116666666666667,0.0180381604294671) +--(axis cs:0.116666666666667,0.0206443078183212) +--(axis cs:-0.116666666666667,0.0206443078183212) +--(axis cs:-0.116666666666667,0.0180381604294671) +--cycle; +\addplot [black23] +table {% +0 0.0180381604294671 +0 0.0148871144147874 +}; +\addplot [black23] +table {% +0 0.0206443078183212 +0 0.0218771450739446 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0148871144147874 +0.0583333333333333 0.0148871144147874 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0218771450739446 +0.0583333333333333 0.0218771450739446 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00262232387279502) +--(axis cs:1.11666666666667,0.00262232387279502) +--(axis cs:1.11666666666667,0.00376378971128713) +--(axis cs:0.883333333333333,0.00376378971128713) +--(axis cs:0.883333333333333,0.00262232387279502) +--cycle; +\addplot [black23] +table {% +1 0.00262232387279502 +1 0.00112123823683415 +}; +\addplot [black23] +table {% +1 0.00376378971128713 +1 0.00514674557738956 +}; +\addplot [black23] +table {% +0.941666666666667 0.00112123823683415 +1.05833333333333 0.00112123823683415 +}; +\addplot [black23] +table {% +0.941666666666667 0.00514674557738956 +1.05833333333333 0.00514674557738956 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.0146048401904975) +--(axis cs:4.11666666666667,0.0146048401904975) +--(axis cs:4.11666666666667,0.017419341227086) +--(axis cs:3.88333333333333,0.017419341227086) +--(axis cs:3.88333333333333,0.0146048401904975) +--cycle; +\addplot [black23] +table {% +4 0.0146048401904975 +4 0.0119598157100429 +}; +\addplot [black23] +table {% +4 0.017419341227086 +4 0.021534974909744 +}; +\addplot [black23] +table {% +3.94166666666667 0.0119598157100429 +4.05833333333333 0.0119598157100429 +}; +\addplot [black23] +table {% +3.94166666666667 0.021534974909744 +4.05833333333333 0.021534974909744 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0459507316816571) +--(axis cs:0.35,0.0459507316816571) +--(axis cs:0.35,0.0504319808370432) +--(axis cs:0.116666666666667,0.0504319808370432) +--(axis cs:0.116666666666667,0.0459507316816571) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0459507316816571 +0.233333333333333 0.042598863246448 +}; +\addplot [black23] +table {% +0.233333333333333 0.0504319808370432 +0.233333333333333 0.0517167004462554 +}; +\addplot [black23] +table {% +0.175 0.042598863246448 +0.291666666666667 0.042598863246448 +}; +\addplot [black23] +table {% +0.175 0.0517167004462554 +0.291666666666667 0.0517167004462554 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.029552438485941) +--(axis cs:1.35,0.029552438485941) +--(axis cs:1.35,0.035101151386897) +--(axis cs:1.11666666666667,0.035101151386897) +--(axis cs:1.11666666666667,0.029552438485941) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.029552438485941 +1.23333333333333 0.0258976194614627 +}; +\addplot [black23] +table {% +1.23333333333333 0.035101151386897 +1.23333333333333 0.040333525216834 +}; +\addplot [black23] +table {% +1.175 0.0258976194614627 +1.29166666666667 0.0258976194614627 +}; +\addplot [black23] +table {% +1.175 0.040333525216834 +1.29166666666667 0.040333525216834 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0160227045948242) +--(axis cs:2.35,0.0160227045948242) +--(axis cs:2.35,0.0431763681343712) +--(axis cs:2.11666666666667,0.0431763681343712) +--(axis cs:2.11666666666667,0.0160227045948242) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0160227045948242 +2.23333333333333 0.0148094131889436 +}; +\addplot [black23] +table {% +2.23333333333333 0.0431763681343712 +2.23333333333333 0.0603496961265857 +}; +\addplot [black23] +table {% +2.175 0.0148094131889436 +2.29166666666667 0.0148094131889436 +}; +\addplot [black23] +table {% +2.175 0.0603496961265857 +2.29166666666667 0.0603496961265857 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0411307074134407) +--(axis cs:3.35,0.0411307074134407) +--(axis cs:3.35,0.0469455882355394) +--(axis cs:3.11666666666667,0.0469455882355394) +--(axis cs:3.11666666666667,0.0411307074134407) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0411307074134407 +3.23333333333333 0.0371177522749047 +}; +\addplot [black23] +table {% +3.23333333333333 0.0469455882355394 +3.23333333333333 0.0495694186744902 +}; +\addplot [black23] +table {% +3.175 0.0371177522749047 +3.29166666666667 0.0371177522749047 +}; +\addplot [black23] +table {% +3.175 0.0495694186744902 +3.29166666666667 0.0495694186744902 +}; +\addplot [black23] +table {% +1.65 0.0185770889470874 +1.88333333333333 0.0185770889470874 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.019366009821751 +}; +\addplot [black23] +table {% +2.65 0.0166400024674434 +2.88333333333333 0.0166400024674434 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0165169707126084 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0192652037657048 +0.116666666666667 0.0192652037657048 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0191113531775881 +}; +\addplot [black23] +table {% +0.883333333333333 0.00342410699925372 +1.11666666666667 0.00342410699925372 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0031778579521069 +}; +\addplot [black23] +table {% +3.88333333333333 0.0157899755811082 +4.11666666666667 0.0157899755811082 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0162491424834255 +}; +\addplot [black23] +table {% +0.116666666666667 0.0479440710705916 +0.35 0.0479440710705916 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0487846672812203 +}; +\addplot [black23] +table {% +1.11666666666667 0.0330393675461456 +1.35 0.0330393675461456 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0331101994783563 +}; +\addplot [black23] +table {% +2.11666666666667 0.0175803430930048 +2.35 0.0175803430930048 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0295861237469635 +}; +\addplot [black23] +table {% +3.11666666666667 0.045038481693356 +3.35 0.045038481693356 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.044960496112572 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={parasilo}, +x grid style={lavender226232240}, +xlabel=\textcolor{darkslategray38}{Tools}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xtick={0,1,2,3,4}, +xticklabels={hwpc,alumet,codecarbon,vjoule,scaphandre}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0177490255784838) +--(axis cs:1.88333333333333,0.0177490255784838) +--(axis cs:1.88333333333333,0.0210455207946131) +--(axis cs:1.65,0.0210455207946131) +--(axis cs:1.65,0.0177490255784838) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0177490255784838 +1.76666666666667 0.0155962628560522 +}; +\addplot [black23] +table {% +1.76666666666667 0.0210455207946131 +1.76666666666667 0.0219854633939925 +}; +\addplot [black23] +table {% +1.70833333333333 0.0155962628560522 +1.825 0.0155962628560522 +}; +\addplot [black23] +table {% +1.70833333333333 0.0219854633939925 +1.825 0.0219854633939925 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.014148673347164) +--(axis cs:2.88333333333333,0.014148673347164) +--(axis cs:2.88333333333333,0.0156941414476309) +--(axis cs:2.65,0.0156941414476309) +--(axis cs:2.65,0.014148673347164) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.014148673347164 +2.76666666666667 0.0135504817746938 +}; +\addplot [black23] +table {% +2.76666666666667 0.0156941414476309 +2.76666666666667 0.017179669427719 +}; +\addplot [black23] +table {% +2.70833333333333 0.0135504817746938 +2.825 0.0135504817746938 +}; +\addplot [black23] +table {% +2.70833333333333 0.017179669427719 +2.825 0.017179669427719 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0198533362621389) +--(axis cs:0.116666666666667,0.0198533362621389) +--(axis cs:0.116666666666667,0.0227640402859541) +--(axis cs:-0.116666666666667,0.0227640402859541) +--(axis cs:-0.116666666666667,0.0198533362621389) +--cycle; +\addplot [black23] +table {% +0 0.0198533362621389 +0 0.018691423718433 +}; +\addplot [black23] +table {% +0 0.0227640402859541 +0 0.0260132990122414 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.018691423718433 +0.0583333333333333 0.018691423718433 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0260132990122414 +0.0583333333333333 0.0260132990122414 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00566531001194348) +--(axis cs:1.11666666666667,0.00566531001194348) +--(axis cs:1.11666666666667,0.0351537651210891) +--(axis cs:0.883333333333333,0.0351537651210891) +--(axis cs:0.883333333333333,0.00566531001194348) +--cycle; +\addplot [black23] +table {% +1 0.00566531001194348 +1 0.00405153618402764 +}; +\addplot [black23] +table {% +1 0.0351537651210891 +1 0.0508960009616419 +}; +\addplot [black23] +table {% +0.941666666666667 0.00405153618402764 +1.05833333333333 0.00405153618402764 +}; +\addplot [black23] +table {% +0.941666666666667 0.0508960009616419 +1.05833333333333 0.0508960009616419 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.01473077806894) +--(axis cs:4.11666666666667,0.01473077806894) +--(axis cs:4.11666666666667,0.0177306889306717) +--(axis cs:3.88333333333333,0.0177306889306717) +--(axis cs:3.88333333333333,0.01473077806894) +--cycle; +\addplot [black23] +table {% +4 0.01473077806894 +4 0.0144100261472384 +}; +\addplot [black23] +table {% +4 0.0177306889306717 +4 0.0192330155650769 +}; +\addplot [black23] +table {% +3.94166666666667 0.0144100261472384 +4.05833333333333 0.0144100261472384 +}; +\addplot [black23] +table {% +3.94166666666667 0.0192330155650769 +4.05833333333333 0.0192330155650769 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.033107538005393) +--(axis cs:0.35,0.033107538005393) +--(axis cs:0.35,0.0367073106619577) +--(axis cs:0.116666666666667,0.0367073106619577) +--(axis cs:0.116666666666667,0.033107538005393) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.033107538005393 +0.233333333333333 0.0303918706128732 +}; +\addplot [black23] +table {% +0.233333333333333 0.0367073106619577 +0.233333333333333 0.0400136798605116 +}; +\addplot [black23] +table {% +0.175 0.0303918706128732 +0.291666666666667 0.0303918706128732 +}; +\addplot [black23] +table {% +0.175 0.0400136798605116 +0.291666666666667 0.0400136798605116 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0183928805992049) +--(axis cs:1.35,0.0183928805992049) +--(axis cs:1.35,0.0497070075236213) +--(axis cs:1.11666666666667,0.0497070075236213) +--(axis cs:1.11666666666667,0.0183928805992049) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0183928805992049 +1.23333333333333 0.0106478795126601 +}; +\addplot [black23] +table {% +1.23333333333333 0.0497070075236213 +1.23333333333333 0.0715643572418956 +}; +\addplot [black23] +table {% +1.175 0.0106478795126601 +1.29166666666667 0.0106478795126601 +}; +\addplot [black23] +table {% +1.175 0.0715643572418956 +1.29166666666667 0.0715643572418956 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0150727882417651) +--(axis cs:2.35,0.0150727882417651) +--(axis cs:2.35,0.0363664900405348) +--(axis cs:2.11666666666667,0.0363664900405348) +--(axis cs:2.11666666666667,0.0150727882417651) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0150727882417651 +2.23333333333333 0.0138702238575627 +}; +\addplot [black23] +table {% +2.23333333333333 0.0363664900405348 +2.23333333333333 0.0558474181627221 +}; +\addplot [black23] +table {% +2.175 0.0138702238575627 +2.29166666666667 0.0138702238575627 +}; +\addplot [black23] +table {% +2.175 0.0558474181627221 +2.29166666666667 0.0558474181627221 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0311663514599197) +--(axis cs:3.35,0.0311663514599197) +--(axis cs:3.35,0.0342847857354693) +--(axis cs:3.11666666666667,0.0342847857354693) +--(axis cs:3.11666666666667,0.0311663514599197) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0311663514599197 +3.23333333333333 0.027740312795887 +}; +\addplot [black23] +table {% +3.23333333333333 0.0342847857354693 +3.23333333333333 0.0364167597286288 +}; +\addplot [black23] +table {% +3.175 0.027740312795887 +3.29166666666667 0.027740312795887 +}; +\addplot [black23] +table {% +3.175 0.0364167597286288 +3.29166666666667 0.0364167597286288 +}; +\addplot [black23] +table {% +1.65 0.0187648577912866 +1.88333333333333 0.0187648577912866 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0194431822473113 +}; +\addplot [black23] +table {% +2.65 0.0145572942982637 +2.88333333333333 0.0145572942982637 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0150173870492487 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0215391328011997 +0.116666666666667 0.0215391328011997 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0213087053312567 +}; +\addplot [black23] +table {% +0.883333333333333 0.00694295257374511 +1.11666666666667 0.00694295257374511 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0164959767207243 +}; +\addplot [black23] +table {% +3.88333333333333 0.0170520074553997 +4.11666666666667 0.0170520074553997 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0166887318186482 +}; +\addplot [black23] +table {% +0.116666666666667 0.0354430742543215 +0.35 0.0354430742543215 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0353869663019502 +}; +\addplot [black23] +table {% +1.11666666666667 0.0205819037043831 +1.35 0.0205819037043831 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0302635048655805 +}; +\addplot [black23] +table {% +2.11666666666667 0.0165308997184618 +2.35 0.0165308997184618 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0260421104391887 +}; +\addplot [black23] +table {% +3.11666666666667 0.0329282637314746 +3.35 0.0329282637314746 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0325267133738428 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={ecotype}, +x grid style={lavender226232240}, +xlabel=\textcolor{darkslategray38}{Tools}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xtick={0,1,2,3,4}, +xticklabels={hwpc,alumet,codecarbon,vjoule,scaphandre}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0197633451379412) +--(axis cs:1.88333333333333,0.0197633451379412) +--(axis cs:1.88333333333333,0.0220059715834638) +--(axis cs:1.65,0.0220059715834638) +--(axis cs:1.65,0.0197633451379412) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0197633451379412 +1.76666666666667 0.0184828960821614 +}; +\addplot [black23] +table {% +1.76666666666667 0.0220059715834638 +1.76666666666667 0.0223687688490104 +}; +\addplot [black23] +table {% +1.70833333333333 0.0184828960821614 +1.825 0.0184828960821614 +}; +\addplot [black23] +table {% +1.70833333333333 0.0223687688490104 +1.825 0.0223687688490104 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0175439733992873) +--(axis cs:2.88333333333333,0.0175439733992873) +--(axis cs:2.88333333333333,0.0193843972939282) +--(axis cs:2.65,0.0193843972939282) +--(axis cs:2.65,0.0175439733992873) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0175439733992873 +2.76666666666667 0.015777155082526 +}; +\addplot [black23] +table {% +2.76666666666667 0.0193843972939282 +2.76666666666667 0.0210003098472026 +}; +\addplot [black23] +table {% +2.70833333333333 0.015777155082526 +2.825 0.015777155082526 +}; +\addplot [black23] +table {% +2.70833333333333 0.0210003098472026 +2.825 0.0210003098472026 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0219214922431388) +--(axis cs:0.116666666666667,0.0219214922431388) +--(axis cs:0.116666666666667,0.0243131361839211) +--(axis cs:-0.116666666666667,0.0243131361839211) +--(axis cs:-0.116666666666667,0.0219214922431388) +--cycle; +\addplot [black23] +table {% +0 0.0219214922431388 +0 0.020758975983803 +}; +\addplot [black23] +table {% +0 0.0243131361839211 +0 0.0266014807066632 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.020758975983803 +0.0583333333333333 0.020758975983803 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0266014807066632 +0.0583333333333333 0.0266014807066632 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00378591980376797) +--(axis cs:1.11666666666667,0.00378591980376797) +--(axis cs:1.11666666666667,0.00633603816159365) +--(axis cs:0.883333333333333,0.00633603816159365) +--(axis cs:0.883333333333333,0.00378591980376797) +--cycle; +\addplot [black23] +table {% +1 0.00378591980376797 +1 0.00330860279631931 +}; +\addplot [black23] +table {% +1 0.00633603816159365 +1 0.00828185545622236 +}; +\addplot [black23] +table {% +0.941666666666667 0.00330860279631931 +1.05833333333333 0.00330860279631931 +}; +\addplot [black23] +table {% +0.941666666666667 0.00828185545622236 +1.05833333333333 0.00828185545622236 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.0155863861323463) +--(axis cs:4.11666666666667,0.0155863861323463) +--(axis cs:4.11666666666667,0.018424880322468) +--(axis cs:3.88333333333333,0.018424880322468) +--(axis cs:3.88333333333333,0.0155863861323463) +--cycle; +\addplot [black23] +table {% +4 0.0155863861323463 +4 0.0142846657299233 +}; +\addplot [black23] +table {% +4 0.018424880322468 +4 0.0201128986263684 +}; +\addplot [black23] +table {% +3.94166666666667 0.0142846657299233 +4.05833333333333 0.0142846657299233 +}; +\addplot [black23] +table {% +3.94166666666667 0.0201128986263684 +4.05833333333333 0.0201128986263684 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0368156768989646) +--(axis cs:0.35,0.0368156768989646) +--(axis cs:0.35,0.0399507415012361) +--(axis cs:0.116666666666667,0.0399507415012361) +--(axis cs:0.116666666666667,0.0368156768989646) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0368156768989646 +0.233333333333333 0.0342489349593942 +}; +\addplot [black23] +table {% +0.233333333333333 0.0399507415012361 +0.233333333333333 0.0423923427718258 +}; +\addplot [black23] +table {% +0.175 0.0342489349593942 +0.291666666666667 0.0342489349593942 +}; +\addplot [black23] +table {% +0.175 0.0423923427718258 +0.291666666666667 0.0423923427718258 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0204343101145014) +--(axis cs:1.35,0.0204343101145014) +--(axis cs:1.35,0.0237942144485318) +--(axis cs:1.11666666666667,0.0237942144485318) +--(axis cs:1.11666666666667,0.0204343101145014) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0204343101145014 +1.23333333333333 0.0165338689786514 +}; +\addplot [black23] +table {% +1.23333333333333 0.0237942144485318 +1.23333333333333 0.0242578117108351 +}; +\addplot [black23] +table {% +1.175 0.0165338689786514 +1.29166666666667 0.0165338689786514 +}; +\addplot [black23] +table {% +1.175 0.0242578117108351 +1.29166666666667 0.0242578117108351 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0175886211513566) +--(axis cs:2.35,0.0175886211513566) +--(axis cs:2.35,0.0306340360899585) +--(axis cs:2.11666666666667,0.0306340360899585) +--(axis cs:2.11666666666667,0.0175886211513566) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0175886211513566 +2.23333333333333 0.0148490513277294 +}; +\addplot [black23] +table {% +2.23333333333333 0.0306340360899585 +2.23333333333333 0.0461805393111917 +}; +\addplot [black23] +table {% +2.175 0.0148490513277294 +2.29166666666667 0.0148490513277294 +}; +\addplot [black23] +table {% +2.175 0.0461805393111917 +2.29166666666667 0.0461805393111917 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0338155251276284) +--(axis cs:3.35,0.0338155251276284) +--(axis cs:3.35,0.0382251606640576) +--(axis cs:3.11666666666667,0.0382251606640576) +--(axis cs:3.11666666666667,0.0338155251276284) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0338155251276284 +3.23333333333333 0.0319315793564778 +}; +\addplot [black23] +table {% +3.23333333333333 0.0382251606640576 +3.23333333333333 0.0417044678944827 +}; +\addplot [black23] +table {% +3.175 0.0319315793564778 +3.29166666666667 0.0319315793564778 +}; +\addplot [black23] +table {% +3.175 0.0417044678944827 +3.29166666666667 0.0417044678944827 +}; +\addplot [black23] +table {% +1.65 0.0207924980560906 +1.88333333333333 0.0207924980560906 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0214539534766337 +}; +\addplot [black23] +table {% +2.65 0.0185605447864298 +2.88333333333333 0.0185605447864298 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.0184933512178107 +}; +\addplot [black23] +table {% +-0.116666666666667 0.023131923707058 +0.116666666666667 0.023131923707058 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0229575045474648 +}; +\addplot [black23] +table {% +0.883333333333333 0.00442810982342283 +1.11666666666667 0.00442810982342283 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.00771368441570493 +}; +\addplot [black23] +table {% +3.88333333333333 0.0169515829651561 +4.11666666666667 0.0169515829651561 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0170726780723358 +}; +\addplot [black23] +table {% +0.116666666666667 0.0383004538840632 +0.35 0.0383004538840632 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0384341595795949 +}; +\addplot [black23] +table {% +1.11666666666667 0.021614896873114 +1.35 0.021614896873114 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0242709523816156 +}; +\addplot [black23] +table {% +2.11666666666667 0.0188676963381849 +2.35 0.0188676963381849 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0249553267081074 +}; +\addplot [black23] +table {% +3.11666666666667 0.0365661467422812 +3.35 0.0365661467422812 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0360487672296435 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={chifflot}, +x grid style={lavender226232240}, +xlabel=\textcolor{darkslategray38}{Tools}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xtick={0,1,2,3,4}, +xticklabels={hwpc,alumet,codecarbon,vjoule,scaphandre}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0190682059985593) +--(axis cs:1.88333333333333,0.0190682059985593) +--(axis cs:1.88333333333333,0.0288666189711277) +--(axis cs:1.65,0.0288666189711277) +--(axis cs:1.65,0.0190682059985593) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0190682059985593 +1.76666666666667 0.017838723136689 +}; +\addplot [black23] +table {% +1.76666666666667 0.0288666189711277 +1.76666666666667 0.0413869644263308 +}; +\addplot [black23] +table {% +1.70833333333333 0.017838723136689 +1.825 0.017838723136689 +}; +\addplot [black23] +table {% +1.70833333333333 0.0413869644263308 +1.825 0.0413869644263308 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.0169699294233387) +--(axis cs:2.88333333333333,0.0169699294233387) +--(axis cs:2.88333333333333,0.0180655933419397) +--(axis cs:2.65,0.0180655933419397) +--(axis cs:2.65,0.0169699294233387) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.0169699294233387 +2.76666666666667 0.0165137234726801 +}; +\addplot [black23] +table {% +2.76666666666667 0.0180655933419397 +2.76666666666667 0.0183294546308646 +}; +\addplot [black23] +table {% +2.70833333333333 0.0165137234726801 +2.825 0.0165137234726801 +}; +\addplot [black23] +table {% +2.70833333333333 0.0183294546308646 +2.825 0.0183294546308646 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.0178217307007335) +--(axis cs:0.116666666666667,0.0178217307007335) +--(axis cs:0.116666666666667,0.0215757264201188) +--(axis cs:-0.116666666666667,0.0215757264201188) +--(axis cs:-0.116666666666667,0.0178217307007335) +--cycle; +\addplot [black23] +table {% +0 0.0178217307007335 +0 0.0163271957394624 +}; +\addplot [black23] +table {% +0 0.0215757264201188 +0 0.0228138580693771 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0163271957394624 +0.0583333333333333 0.0163271957394624 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.0228138580693771 +0.0583333333333333 0.0228138580693771 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.011680714525917) +--(axis cs:1.11666666666667,0.011680714525917) +--(axis cs:1.11666666666667,0.0152098085655009) +--(axis cs:0.883333333333333,0.0152098085655009) +--(axis cs:0.883333333333333,0.011680714525917) +--cycle; +\addplot [black23] +table {% +1 0.011680714525917 +1 0.00817542333003638 +}; +\addplot [black23] +table {% +1 0.0152098085655009 +1 0.0162317297858658 +}; +\addplot [black23] +table {% +0.941666666666667 0.00817542333003638 +1.05833333333333 0.00817542333003638 +}; +\addplot [black23] +table {% +0.941666666666667 0.0162317297858658 +1.05833333333333 0.0162317297858658 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.0181916933543309) +--(axis cs:4.11666666666667,0.0181916933543309) +--(axis cs:4.11666666666667,0.0191371107693679) +--(axis cs:3.88333333333333,0.0191371107693679) +--(axis cs:3.88333333333333,0.0181916933543309) +--cycle; +\addplot [black23] +table {% +4 0.0181916933543309 +4 0.018186274621061 +}; +\addplot [black23] +table {% +4 0.0191371107693679 +4 0.0197973763060732 +}; +\addplot [black23] +table {% +3.94166666666667 0.018186274621061 +4.05833333333333 0.018186274621061 +}; +\addplot [black23] +table {% +3.94166666666667 0.0197973763060732 +4.05833333333333 0.0197973763060732 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.0264358711824983) +--(axis cs:0.35,0.0264358711824983) +--(axis cs:0.35,0.0338626249604217) +--(axis cs:0.116666666666667,0.0338626249604217) +--(axis cs:0.116666666666667,0.0264358711824983) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.0264358711824983 +0.233333333333333 0.0247421461593989 +}; +\addplot [black23] +table {% +0.233333333333333 0.0338626249604217 +0.233333333333333 0.0362377532424588 +}; +\addplot [black23] +table {% +0.175 0.0247421461593989 +0.291666666666667 0.0247421461593989 +}; +\addplot [black23] +table {% +0.175 0.0362377532424588 +0.291666666666667 0.0362377532424588 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0167441187830964) +--(axis cs:1.35,0.0167441187830964) +--(axis cs:1.35,0.0202398150817574) +--(axis cs:1.11666666666667,0.0202398150817574) +--(axis cs:1.11666666666667,0.0167441187830964) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0167441187830964 +1.23333333333333 0.0159865203993794 +}; +\addplot [black23] +table {% +1.23333333333333 0.0202398150817574 +1.23333333333333 0.0225331511950646 +}; +\addplot [black23] +table {% +1.175 0.0159865203993794 +1.29166666666667 0.0159865203993794 +}; +\addplot [black23] +table {% +1.175 0.0225331511950646 +1.29166666666667 0.0225331511950646 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0171765034653706) +--(axis cs:2.35,0.0171765034653706) +--(axis cs:2.35,0.0609062113671219) +--(axis cs:2.11666666666667,0.0609062113671219) +--(axis cs:2.11666666666667,0.0171765034653706) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0171765034653706 +2.23333333333333 0.0134355528202829 +}; +\addplot [black23] +table {% +2.23333333333333 0.0609062113671219 +2.23333333333333 0.0853242738625793 +}; +\addplot [black23] +table {% +2.175 0.0134355528202829 +2.29166666666667 0.0134355528202829 +}; +\addplot [black23] +table {% +2.175 0.0853242738625793 +2.29166666666667 0.0853242738625793 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0268579667991101) +--(axis cs:3.35,0.0268579667991101) +--(axis cs:3.35,0.0292280187923076) +--(axis cs:3.11666666666667,0.0292280187923076) +--(axis cs:3.11666666666667,0.0268579667991101) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0268579667991101 +3.23333333333333 0.0246201403676913 +}; +\addplot [black23] +table {% +3.23333333333333 0.0292280187923076 +3.23333333333333 0.0307541690660631 +}; +\addplot [black23] +table {% +3.175 0.0246201403676913 +3.29166666666667 0.0246201403676913 +}; +\addplot [black23] +table {% +3.175 0.0307541690660631 +3.29166666666667 0.0307541690660631 +}; +\addplot [black23] +table {% +1.65 0.023905045727962 +1.88333333333333 0.023905045727962 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0258353636377586 +}; +\addplot [black23] +table {% +2.65 0.0175716310355586 +2.88333333333333 0.0175716310355586 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.017497674972999 +}; +\addplot [black23] +table {% +-0.116666666666667 0.0193294612821428 +0.116666666666667 0.0193294612821428 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.0195739352799166 +}; +\addplot [black23] +table {% +0.883333333333333 0.0119619960102334 +1.11666666666667 0.0119619960102334 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0127017519863506 +}; +\addplot [black23] +table {% +3.88333333333333 0.0185834659308829 +4.11666666666667 0.0185834659308829 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.0179896203505085 +}; +\addplot [black23] +table {% +0.116666666666667 0.0286396643460771 +0.35 0.0286396643460771 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0299273522311979 +}; +\addplot [black23] +table {% +1.11666666666667 0.0184116816398865 +1.35 0.0184116816398865 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0187300820445718 +}; +\addplot [black23] +table {% +2.11666666666667 0.033471373032302 +2.35 0.033471373032302 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.041249768417098 +}; +\addplot [black23] +table {% +3.11666666666667 0.0277741164015668 +3.35 0.0277741164015668 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0278646297929556 +}; + +\nextgroupplot[ +axis background/.style={fill=ghostwhite248250252}, +axis line style={lightgray204}, +scaled y ticks=manual:{}{\pgfmathparse{#1}}, +tick align=outside, +title={fleckenstein}, +x grid style={lavender226232240}, +xlabel=\textcolor{darkslategray38}{Tools}, +xmajorgrids, +xmajorticks=false, +xmin=-0.5, xmax=4.5, +xtick style={color=darkslategray38}, +xtick={0,1,2,3,4}, +xticklabels={hwpc,alumet,codecarbon,vjoule,scaphandre}, +y grid style={lavender226232240}, +ymajorgrids, +ymajorticks=false, +ymin=-0.00308891354445311, ymax=0.0895344256438666, +ytick style={color=darkslategray38}, +yticklabels={} +] +\draw[draw=black23,fill=darkslategray363741,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=darkslategray6277102,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\draw[draw=black23,fill=steelblue88116163,line width=0.32pt] (axis cs:0,0) rectangle (axis cs:0,0); +\path [draw=black23, fill=darkslategray363741] +(axis cs:1.65,0.0137050949749271) +--(axis cs:1.88333333333333,0.0137050949749271) +--(axis cs:1.88333333333333,0.0249129676718076) +--(axis cs:1.65,0.0249129676718076) +--(axis cs:1.65,0.0137050949749271) +--cycle; +\addplot [black23] +table {% +1.76666666666667 0.0137050949749271 +1.76666666666667 0.00977124940192543 +}; +\addplot [black23] +table {% +1.76666666666667 0.0249129676718076 +1.76666666666667 0.0377403157451261 +}; +\addplot [black23] +table {% +1.70833333333333 0.00977124940192543 +1.825 0.00977124940192543 +}; +\addplot [black23] +table {% +1.70833333333333 0.0377403157451261 +1.825 0.0377403157451261 +}; +\path [draw=black23, fill=darkslategray363741] +(axis cs:2.65,0.00809897614361256) +--(axis cs:2.88333333333333,0.00809897614361256) +--(axis cs:2.88333333333333,0.0101185878177105) +--(axis cs:2.65,0.0101185878177105) +--(axis cs:2.65,0.00809897614361256) +--cycle; +\addplot [black23] +table {% +2.76666666666667 0.00809897614361256 +2.76666666666667 0.00751503493563591 +}; +\addplot [black23] +table {% +2.76666666666667 0.0101185878177105 +2.76666666666667 0.0105102009316589 +}; +\addplot [black23] +table {% +2.70833333333333 0.00751503493563591 +2.825 0.00751503493563591 +}; +\addplot [black23] +table {% +2.70833333333333 0.0105102009316589 +2.825 0.0105102009316589 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:-0.116666666666667,0.00691897561569275) +--(axis cs:0.116666666666667,0.00691897561569275) +--(axis cs:0.116666666666667,0.00890019722792456) +--(axis cs:-0.116666666666667,0.00890019722792456) +--(axis cs:-0.116666666666667,0.00691897561569275) +--cycle; +\addplot [black23] +table {% +0 0.00691897561569275 +0 0.00639027366149581 +}; +\addplot [black23] +table {% +0 0.00890019722792456 +0 0.00926582926663565 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.00639027366149581 +0.0583333333333333 0.00639027366149581 +}; +\addplot [black23] +table {% +-0.0583333333333333 0.00926582926663565 +0.0583333333333333 0.00926582926663565 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:0.883333333333333,0.00922464793495222) +--(axis cs:1.11666666666667,0.00922464793495222) +--(axis cs:1.11666666666667,0.0103295145158681) +--(axis cs:0.883333333333333,0.0103295145158681) +--(axis cs:0.883333333333333,0.00922464793495222) +--cycle; +\addplot [black23] +table {% +1 0.00922464793495222 +1 0.00843073122864526 +}; +\addplot [black23] +table {% +1 0.0103295145158681 +1 0.0104290811301621 +}; +\addplot [black23] +table {% +0.941666666666667 0.00843073122864526 +1.05833333333333 0.00843073122864526 +}; +\addplot [black23] +table {% +0.941666666666667 0.0104290811301621 +1.05833333333333 0.0104290811301621 +}; +\path [draw=black23, fill=darkslategray6277102] +(axis cs:3.88333333333333,0.00711005539803623) +--(axis cs:4.11666666666667,0.00711005539803623) +--(axis cs:4.11666666666667,0.0164691710247863) +--(axis cs:3.88333333333333,0.0164691710247863) +--(axis cs:3.88333333333333,0.00711005539803623) +--cycle; +\addplot [black23] +table {% +4 0.00711005539803623 +4 0.00319992947491277 +}; +\addplot [black23] +table {% +4 0.0164691710247863 +4 0.0207699257170522 +}; +\addplot [black23] +table {% +3.94166666666667 0.00319992947491277 +4.05833333333333 0.00319992947491277 +}; +\addplot [black23] +table {% +3.94166666666667 0.0207699257170522 +4.05833333333333 0.0207699257170522 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:0.116666666666667,0.011049466431293) +--(axis cs:0.35,0.011049466431293) +--(axis cs:0.35,0.0144206492654592) +--(axis cs:0.116666666666667,0.0144206492654592) +--(axis cs:0.116666666666667,0.011049466431293) +--cycle; +\addplot [black23] +table {% +0.233333333333333 0.011049466431293 +0.233333333333333 0.00916723079642868 +}; +\addplot [black23] +table {% +0.233333333333333 0.0144206492654592 +0.233333333333333 0.0150407063209625 +}; +\addplot [black23] +table {% +0.175 0.00916723079642868 +0.291666666666667 0.00916723079642868 +}; +\addplot [black23] +table {% +0.175 0.0150407063209625 +0.291666666666667 0.0150407063209625 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:1.11666666666667,0.0100357051392326) +--(axis cs:1.35,0.0100357051392326) +--(axis cs:1.35,0.0113442498696197) +--(axis cs:1.11666666666667,0.0113442498696197) +--(axis cs:1.11666666666667,0.0100357051392326) +--cycle; +\addplot [black23] +table {% +1.23333333333333 0.0100357051392326 +1.23333333333333 0.00998705229286427 +}; +\addplot [black23] +table {% +1.23333333333333 0.0113442498696197 +1.23333333333333 0.011419101920634 +}; +\addplot [black23] +table {% +1.175 0.00998705229286427 +1.29166666666667 0.00998705229286427 +}; +\addplot [black23] +table {% +1.175 0.011419101920634 +1.29166666666667 0.011419101920634 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:2.11666666666667,0.0231511255928527) +--(axis cs:2.35,0.0231511255928527) +--(axis cs:2.35,0.0453488343917392) +--(axis cs:2.11666666666667,0.0453488343917392) +--(axis cs:2.11666666666667,0.0231511255928527) +--cycle; +\addplot [black23] +table {% +2.23333333333333 0.0231511255928527 +2.23333333333333 0.0142378177143495 +}; +\addplot [black23] +table {% +2.23333333333333 0.0453488343917392 +2.23333333333333 0.0592687088075356 +}; +\addplot [black23] +table {% +2.175 0.0142378177143495 +2.29166666666667 0.0142378177143495 +}; +\addplot [black23] +table {% +2.175 0.0592687088075356 +2.29166666666667 0.0592687088075356 +}; +\path [draw=black23, fill=steelblue88116163] +(axis cs:3.11666666666667,0.0132415321263485) +--(axis cs:3.35,0.0132415321263485) +--(axis cs:3.35,0.0178551450878508) +--(axis cs:3.11666666666667,0.0178551450878508) +--(axis cs:3.11666666666667,0.0132415321263485) +--cycle; +\addplot [black23] +table {% +3.23333333333333 0.0132415321263485 +3.23333333333333 0.0128327305114503 +}; +\addplot [black23] +table {% +3.23333333333333 0.0178551450878508 +3.23333333333333 0.01870304193302 +}; +\addplot [black23] +table {% +3.175 0.0128327305114503 +3.29166666666667 0.0128327305114503 +}; +\addplot [black23] +table {% +3.175 0.01870304193302 +3.29166666666667 0.01870304193302 +}; +\addplot [black23] +table {% +1.65 0.0230387003701784 +1.88333333333333 0.0230387003701784 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.76666666666667 0.0216200970838226 +}; +\addplot [black23] +table {% +2.65 0.0091605129440447 +2.88333333333333 0.0091605129440447 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.76666666666667 0.00908822306796419 +}; +\addplot [black23] +table {% +-0.116666666666667 0.00768786465410083 +0.116666666666667 0.00768786465410083 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0 0.00828107683165625 +}; +\addplot [black23] +table {% +0.883333333333333 0.0099829822424258 +1.11666666666667 0.0099829822424258 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1 0.0114118913473355 +}; +\addplot [black23] +table {% +3.88333333333333 0.0144785607033192 +4.11666666666667 0.0144785607033192 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +4 0.012452261893359 +}; +\addplot [black23] +table {% +0.116666666666667 0.0129600529424914 +0.35 0.0129600529424914 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +0.233333333333333 0.0125746936616193 +}; +\addplot [black23] +table {% +1.11666666666667 0.010650678697457 +1.35 0.010650678697457 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +1.23333333333333 0.0115153876566061 +}; +\addplot [black23] +table {% +2.11666666666667 0.0412079618984348 +2.35 0.0412079618984348 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +2.23333333333333 0.0366306259498757 +}; +\addplot [black23] +table {% +3.11666666666667 0.0154264009483584 +3.35 0.0154264009483584 +}; +\addplot [mediumseagreen85168104, mark=triangle*, mark size=3, mark options={solid}, only marks] +table {% +3.23333333333333 0.0155944238879801 +}; +\end{groupplot} + +\draw ({$(current bounding box.south west)!0.5!(current bounding box.south east)$}|-{$(current bounding box.south west)!1.05!(current bounding box.north west)$}) node[ + scale=1.6, + anchor=north, + text=darkslategray304159, + rotate=0.0 +]{\bfseries Coefficient of Variation of measurement per tool and per domain}; +\end{tikzpicture} diff --git a/analysis/test_ada.py b/analysis/test_ada.py new file mode 100644 index 0000000..5cae1b0 --- /dev/null +++ b/analysis/test_ada.py @@ -0,0 +1,155 @@ +import click +import pickle +import os +from pathlib import Path +import pandas as pd +import numpy as np +import matplotlib.pyplot as plt +from adastop import MultipleAgentsComparator + +LITTER_FILE = ".adastop_comparator.pkl" + + +def compare( + ctx, + input_file, + n_groups, + size_group, + n_permutations, + alpha, + beta, + seed, + comparisons, + compare_to_first, +): + """ + Perform one step of adaptive stopping algorithm using csv file intput_file. + The csv file must be of size `size_group`. + At first call, the comparator will be initialized with the arguments passed and then it will be saved to a save file in `.adastop_comparator.pkl`. + """ + path_lf = Path(input_file).parent.absolute() / LITTER_FILE + df = pd.read_csv(input_file, index_col=0) + + assert len(df) == size_group, ( + "The csv file does not contain the right number of scores. If must constain `size_group` scores. Either change the argument `size_group` or give a csv file with {} scores".format( + size_group + ) + ) + + n_fits_per_group = len(df) + n_agents = len(df.columns) + + if compare_to_first: + comparisons = [(0, i) for i in range(1, n_agents)] + else: + comparisons = None + + # if this is not first group, load data for comparator. + if os.path.isfile(path_lf): + with open(path_lf, "rb") as fp: + comparator = pickle.load(fp) + + names = [] + for i in range(len(comparator.agent_names)): + if i in comparator.current_comparisons.ravel(): + names.append(comparator.agent_names[i]) + + Z = [np.hstack([comparator.eval_values[agent], df[agent]]) for agent in names] + if len(Z[0]) > comparator.K * n_fits_per_group: + raise ValueError( + "Error: you tried to use more group than what was initially declared, this is not allowed by the theory." + ) + assert "continue" in list(comparator.decisions.values()), ( + "Test finished at last iteration." + ) + + else: + comparator = MultipleAgentsComparator( + n=n_fits_per_group, + K=n_groups, + B=n_permutations, + comparisons=comparisons, + alpha=alpha, + beta=beta, + seed=seed, + ) + names = df.columns + + Z = [df[agent].values for agent in names] + + data = {names[i]: Z[i] for i in range(len(names))} + # recover also the data of agent that were decided. + if comparator.agent_names is not None: + for agent in comparator.agent_names: + if agent not in data.keys(): + data[agent] = comparator.eval_values[agent] + + comparator.partial_compare(data, False) + if not ("continue" in list(comparator.decisions.values())): + click.echo("") + click.echo("Test is finished, decisions are") + click.echo(comparator.get_results().to_markdown()) + + else: + still_here = [] + for c in comparator.comparisons: + if comparator.decisions[str(c)] == "continue": + still_here.append(comparator.agent_names[c[0]]) + still_here.append(comparator.agent_names[c[1]]) + still_here = np.unique(still_here) + click.echo("Still undecided about " + " ".join(still_here)) + click.echo("") + + with open(path_lf, "wb") as fp: + pickle.dump(comparator, fp) + click.echo("Comparator Saved") + + +compare( + ctx=None, + input_file="test_csv_domains_1.csv", + n_groups=4, + size_group=4, + n_permutations=0, + alpha=0.01, + beta=0, + comparisons=[(0, 3), (1, 4), (2, 5), (8, 6), (9, 7)], + seed=None, + compare_to_first=False, +) +compare( + ctx=None, + input_file="test_csv_domains_2.csv", + n_groups=4, + size_group=4, + n_permutations=0, + alpha=0.01, + beta=0, + comparisons=[(0, 3), (1, 4), (2, 5), (8, 6), (9, 7)], + seed=None, + compare_to_first=False, +) +compare( + ctx=None, + input_file="test_csv_domains_3.csv", + n_groups=4, + size_group=4, + n_permutations=0, + alpha=0.01, + beta=0, + comparisons=[(0, 3), (1, 4), (2, 5), (8, 6), (9, 7)], + seed=None, + compare_to_first=False, +) +compare( + ctx=None, + input_file="test_csv_domains_4.csv", + n_groups=4, + size_group=4, + n_permutations=0, + alpha=0.01, + beta=0, + comparisons=[(0, 3), (1, 4), (2, 5), (8, 6), (9, 7)], + seed=None, + compare_to_first=False, +) diff --git a/analysis/test_file_load.py b/analysis/test_file_load.py new file mode 100644 index 0000000..d314e7f --- /dev/null +++ b/analysis/test_file_load.py @@ -0,0 +1,105 @@ +import polars as pl + + +def test_all_files(results_dir="", nb_core=0, nb_ops=0): + describe_file( + separator=",", path=f"{results_dir}/frequency_1_codecarbon_and_perf.csv" + ) + describe_file( + separator=",", path=f"{results_dir}/frequency_10_codecarbon_and_perf.csv" + ) + describe_file( + separator=",", path=f"{results_dir}/frequency_100_codecarbon_and_perf.csv" + ) + describe_file( + separator=",", path=f"{results_dir}/frequency_1000_codecarbon_and_perf.csv" + ) + describe_file(path=f"{results_dir}/frequency_1_vjoule_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_10_vjoule_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_100_vjoule_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1000_vjoule_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1_scaphandre_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_10_scaphandre_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_100_scaphandre_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1000_scaphandre_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1_alumet_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_10_alumet_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_100_alumet_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1000_alumet_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1_hwpc_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_10_hwpc_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_100_hwpc_and_perf.csv") + describe_file(path=f"{results_dir}/frequency_1000_hwpc_and_perf.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_1_perf_and_vjoule.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_10_perf_and_vjoule.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_100_perf_and_vjoule.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_1000_perf_and_vjoule.csv") + describe_file( + path=f"{results_dir}/temperatures_frequency_1_perf_and_scaphandre.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_10_perf_and_scaphandre.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_100_perf_and_scaphandre.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_1000_perf_and_scaphandre.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_1_perf_and_codecarbon.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_10_perf_and_codecarbon.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_100_perf_and_codecarbon.csv" + ) + describe_file( + path=f"{results_dir}/temperatures_frequency_1000_perf_and_codecarbon.csv" + ) + describe_file(path=f"{results_dir}/temperatures_frequency_1_perf_and_alumet.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_10_perf_and_alumet.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_100_perf_and_alumet.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_1000_perf_and_alumet.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_1_perf_and_hwpc.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_10_perf_and_hwpc.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_100_perf_and_hwpc.csv") + describe_file(path=f"{results_dir}/temperatures_frequency_1000_perf_and_hwpc.csv") + describe_file(path=f"{results_dir}/baseline_consumption.csv") + describe_file(path=f"{results_dir}/alumet_and_perf_{nb_core}_{nb_ops}.csv") + describe_file(path=f"{results_dir}/perf_and_alumet_{nb_core}_{nb_ops}.csv") + describe_file( + path=f"{results_dir}/perf_and_alumet_{nb_core}_{nb_ops}_temperatures.csv" + ) + describe_file(path=f"{results_dir}/hwpc_and_perf_{nb_core}_{nb_ops}.csv") + describe_file(path=f"{results_dir}/perf_and_hwpc_{nb_core}_{nb_ops}.csv") + describe_file( + path=f"{results_dir}/perf_and_hwpc_{nb_core}_{nb_ops}_temperatures.csv" + ) + describe_file(path=f"{results_dir}/codecarbon_and_perf_{nb_core}_{nb_ops}.csv") + describe_file(path=f"{results_dir}/perf_and_codecarbon_{nb_core}_{nb_ops}.csv") + describe_file( + path=f"{results_dir}/perf_and_codecarbon_{nb_core}_{nb_ops}_temperatures.csv" + ) + describe_file(path=f"{results_dir}/vjoule_and_perf_{nb_core}_{nb_ops}.csv") + describe_file(path=f"{results_dir}/perf_and_vjoule_{nb_core}_{nb_ops}.csv") + describe_file( + path=f"{results_dir}/perf_and_vjoule_{nb_core}_{nb_ops}_temperatures.csv" + ) + describe_file(path=f"{results_dir}/scaphandre_and_perf_{nb_core}_{nb_ops}.csv") + describe_file(path=f"{results_dir}/perf_and_scaphandre_{nb_core}_{nb_ops}.csv") + describe_file( + path=f"{results_dir}/perf_and_scaphandre_{nb_core}_{nb_ops}_temperatures.csv" + ) + + +def describe_file(separator=",", path=""): + print("Testing file" + path) + try: + df = pl.read_csv(source=path, separator=separator) + with pl.Config(tbl_cols=-1): + print(df.describe()) + print("[OK] : File " + path + " ok") + except Exception as e: + print("[KO] : File " + path + " failed : " + str(e)) diff --git a/analysis/utils.py b/analysis/utils.py new file mode 100644 index 0000000..0505f54 --- /dev/null +++ b/analysis/utils.py @@ -0,0 +1,13 @@ +import os +import re + + +def find_files(root_dir="", regex=""): + found_files = [] + regex = re.compile(regex) + for root, dirs, files in os.walk(root_dir): + for file in files: + if regex.match(file): + found_files.append(os.path.join(root, file)) + + return found_files diff --git a/analysis/visualization.py b/analysis/visualization.py index 7d70a1f..da85c53 100644 --- a/analysis/visualization.py +++ b/analysis/visualization.py @@ -5,11 +5,10 @@ import polars as pl palette = { - "hwpc_alone": "#1f77b4", - "hwpc_with_perf": "#17becf", - "perf_alone": "#d62728", - "perf_with_hwpc": "#ff7f0e", - } + "hwpc_with_perf": "#17becf", + "perf_with_hwpc": "#ff7f0e", +} + def plot_violinplot(dfs, x, y, hue, save=True, show=True): fig, axs = plt.subplots(nrows=1, ncols=2, sharey=True) @@ -27,26 +26,29 @@ def plot_violinplot(dfs, x, y, hue, save=True, show=True): def plot_boxplot(df, x, y, hue, prefix, save=True, show=True): plt.figure(figsize=(12, 6)) - plt.ylim(0, .1) + plt.ylim(0, 0.1) df = df.sql("SELECT * FROM self WHERE nb_ops_per_core > 25") sns.boxplot(data=df, x=x, y=y, hue=hue) - # sns.boxplot( - # data=df, - # x=x, - # y=y, - # hue=hue, - # ) + # sns.boxplot( + # data=df, + # x=x, + # y=y, + # hue=hue, + # ) title = f"{prefix} - HWPC Coefficient of Variation\n{y} for {x} by {hue}" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() -def plot_facet_grid_nb_ops_per_core_versions_domain_cv(df, domain, os, save=True, show=True): + +def plot_facet_grid_nb_ops_per_core_versions_domain_cv( + df, domain, os, save=True, show=True +): df = df.to_pandas() df = df.sort_values(by=["processor_vendor", "processor_generation"]) g = sns.FacetGrid( @@ -73,27 +75,30 @@ def plot_facet_grid_nb_ops_per_core_versions_domain_cv(df, domain, os, save=True g.set_axis_labels("Processor Detail", f"{domain} coefficient of variation") g.set_titles(col_template="Ops per Core: {col_name}") g.add_legend(title="Job") - g.legend.set_bbox_to_anchor((0.85, 0.75)) # (x, y) coordinates relative to the first subplot + g.legend.set_bbox_to_anchor( + (0.85, 0.75) + ) # (x, y) coordinates relative to the first subplot g.legend.set_frame_on(True) # Rotate x-axis labels for better readability for ax in g.axes.flat: ax.tick_params(axis="x", rotation=90) title = f"Boxplots of {domain} measurements CV by nb_ops_per_core and processor versions - {os}" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") plt.suptitle(title) plt.tight_layout() if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() + def plot_boxplots(dfs, x, y, hue, prefix, save=True, show=True): - fig, axs = plt.subplots(nrows=1, ncols=2,figsize=(16,7) ,sharey=True) + fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(16, 7), sharey=True) dfs[0] = dfs[0].sort(x) dfs[1] = dfs[1].sort(x) - + plt.ylim(0, 1) sns.boxplot( @@ -121,16 +126,17 @@ def plot_boxplots(dfs, x, y, hue, prefix, save=True, show=True): axs[1].set_title("Ubuntu2404nfs - Kernel 6.8 - HWPC Coefficient of Variation") axs[1].set_xticklabels(axs[1].get_xticklabels(), rotation=90, ha="right") title = f"{prefix}\n{y} for {x} by {hue}" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") plt.title(title) plt.tight_layout() if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() + def plot_os_degradation_nb_ops(joined_df, domain, tool, save=True, show=True): joined_df = joined_df.with_columns( ( @@ -150,7 +156,6 @@ def plot_os_degradation_nb_ops(joined_df, domain, tool, save=True, show=True): ) ) - aggregated = joined_df.group_by(["processor_detail", "nb_ops_per_core"]).agg( pl.col(f"{domain}_ratio").median().alias(f"{domain}_median_ratio"), pl.col(f"{domain}_diff").median().alias(f"{domain}_median_diff"), @@ -178,7 +183,7 @@ def plot_os_degradation_nb_ops(joined_df, domain, tool, save=True, show=True): vmax=2, ) title = f"Heatmap of median ratio of {domain} measurements CV (ubuntu/debian) by vendor\nfor {tool} tool" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") plt.title(title) @@ -186,7 +191,7 @@ def plot_os_degradation_nb_ops(joined_df, domain, tool, save=True, show=True): plt.ylabel("Number of operations per core") plt.tight_layout() if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() @@ -212,15 +217,15 @@ def plot_os_degradation_nb_ops(joined_df, domain, tool, save=True, show=True): plt.xlabel("Processor Details") plt.xticks(rotation=90, ha="right") plt.ylabel("Number of Operations Per Core") - + title = f"Heatmap of median diff for {domain} measurements CV (ubuntu - debian) by vendor\nfor {tool} tool" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") plt.title(title) plt.tight_layout() if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() @@ -244,7 +249,9 @@ def plot_os_degradation_percent_used(joined_df, domain, save=True, show=True): ) ) - aggregated = joined_df.group_by(["processor_detail", "percent_cores_used_category"]).agg( + aggregated = joined_df.group_by( + ["processor_detail", "percent_cores_used_category"] + ).agg( pl.col(f"{domain}_ratio").median().alias(f"{domain}_median_ratio"), pl.col(f"{domain}_diff").median().alias(f"{domain}_median_diff"), ) @@ -269,13 +276,13 @@ def plot_os_degradation_percent_used(joined_df, domain, save=True, show=True): vmax=2, ) title = f"Heatmap of median ratio of HWPC {domain} measurements CV (ubuntu/debian) by vendor" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") plt.title(title) plt.tight_layout() if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() @@ -303,13 +310,12 @@ def plot_os_degradation_percent_used(joined_df, domain, save=True, show=True): plt.ylabel("Percent core used") title = f"Heatmap of median diff for HWPC {domain} measurements CV (ubuntu - debian) by vendor" - safe_title = re.sub(r'[^\w\s-]', '', title) # Remove invalid characters + safe_title = re.sub(r"[^\w\s-]", "", title) # Remove invalid characters safe_title = safe_title.replace(" ", "_") safe_title = safe_title.replace("\n", "_") plt.title(title) plt.tight_layout() if save: - plt.savefig(f'{safe_title}.png', dpi=600) + plt.savefig(f"{safe_title}.png", dpi=600) if show: plt.show() - diff --git a/src/configs.rs b/src/configs.rs index 6d492ef..51d4893 100644 --- a/src/configs.rs +++ b/src/configs.rs @@ -1,5 +1,4 @@ use crate::HwpcEvents; -use rand::Rng; use serde::Serialize; use std::collections::HashMap; use std::fmt; @@ -88,21 +87,19 @@ impl fmt::Display for HwpcSystemCore { } } -pub fn generate_core_values(n: usize, max: u32) -> Vec { - let mut rng = rand::thread_rng(); - let mut values = Vec::new(); - - for _ in 0..n { - let mut value = 1 + rng.gen_range(1..=max); - while value.is_power_of_two() { - value = 1 + rng.gen_range(1..=max); - } - values.push(value); - } - - values.sort_unstable(); - values.push(max); - values.dedup(); +pub fn generate_core_values(max: u32) -> Vec { + let values = vec!(max*2); + + // for _ in 0..n { + // let mut value = 1 + rng.gen_range(1..=max); + // while value.is_power_of_two() { + // value = 1 + rng.gen_range(1..=max); + // } + // values.push(value); + // } + // + // values.sort_unstable(); + // values.dedup(); values } @@ -151,7 +148,10 @@ pub fn generate_hwpc_configs( .iter() .map(|&core_value| { let name = format!("{}_sensor_{}", prefix, core_value); - (core_value, build_hwpc_config(name, hwpc_system.clone(), os_flavor)) + ( + core_value, + build_hwpc_config(name, hwpc_system.clone(), os_flavor), + ) }) .collect() } @@ -164,7 +164,7 @@ mod tests { #[test] fn test_generate_core_values() { - let values = generate_core_values(NB_VALUE, MAX_VALUE); + let values = generate_core_values(MAX_VALUE); assert!(values.len() > 0); assert!(values .iter() @@ -173,7 +173,7 @@ mod tests { #[test] fn test_max_is_present() { - let values = generate_core_values(NB_VALUE, MAX_VALUE); + let values = generate_core_values(MAX_VALUE); assert!(values.contains(&MAX_VALUE)); } } diff --git a/src/inventories.rs b/src/inventories.rs index c7bb916..14fae3a 100644 --- a/src/inventories.rs +++ b/src/inventories.rs @@ -112,7 +112,7 @@ pub struct Processor { ht_capable: bool, instruction_set: String, pub microarchitecture: String, - microcode: String, + microcode: Option, model: String, other_description: String, pub vendor: String, @@ -125,10 +125,9 @@ pub struct OperatingSystem { cstate_governor: String, pstate_driver: String, pstate_governor: String, - turboboost_enabled: bool + turboboost_enabled: bool, } - #[derive(Deserialize, Debug)] struct Cluster { uid: String, @@ -167,15 +166,16 @@ pub async fn fetch_nodes( cluster_uid: &str, ) -> Result, InventoryError> { if let Ok(response) = get_api_call( - client, - &format!( - "{}/sites/{}/clusters/{}/nodes", - base_url, site_uid, cluster_uid - ), - ) - .await { - let nodes: Vec = serde_json::from_value(response.get("items").unwrap().clone())?; - Ok(nodes) + client, + &format!( + "{}/sites/{}/clusters/{}/nodes", + base_url, site_uid, cluster_uid + ), + ) + .await + { + let nodes: Vec = serde_json::from_value(response.get("items").unwrap().clone())?; + Ok(nodes) } else { Err(InventoryError::Blacklisted) } @@ -196,14 +196,10 @@ pub async fn get_api_call( .basic_auth(username, Some(password)) .send() .await; - let response_json = match response { - Ok(response_body) => { - response_body - .json() - .await - }, - Err(e) => Err(e) - }; + let response_json = match response { + Ok(response_body) => response_body.json().await, + Err(e) => Err(e), + }; match response_json { Ok(json) => Ok(json), @@ -214,7 +210,7 @@ pub async fn get_api_call( pub async fn post_api_call( client: &Client, endpoint: &str, - data: &serde_json::Value + data: &serde_json::Value, ) -> Result, InventoryError> { dotenv::dotenv().ok(); let username = env::var("G5K_USERNAME").expect("G5K_USERNAME must be set"); @@ -229,14 +225,10 @@ pub async fn post_api_call( .basic_auth(username, Some(password)) .send() .await; - let response_json = match response { - Ok(response_body) => { - response_body - .json() - .await - }, - Err(e) => Err(e) - }; + let response_json = match response { + Ok(response_body) => response_body.json().await, + Err(e) => Err(e), + }; match response_json { Ok(json) => Ok(json), diff --git a/src/jobs.rs b/src/jobs.rs index b62b25c..effc0e3 100644 --- a/src/jobs.rs +++ b/src/jobs.rs @@ -1,23 +1,23 @@ use super::EventsByVendor; use crate::configs; use crate::inventories::{self, Node}; +use crate::results; use crate::scripts; use crate::ssh; -use crate::results; +use chrono::{Duration, Local, Timelike}; use log::{debug, error, info, warn}; use serde::{Deserialize, Serialize}; use serde_yaml::{self}; use std::collections::HashMap; use std::fmt::{self, Display}; +use std::path::{Path, PathBuf}; +use std::process::Command; use std::str::{self}; use std::{env, fs}; -use std::path::{Path, PathBuf}; use subprocess::{Popen, PopenConfig, Redirection}; use thiserror::Error; -use std::process::Command; -use chrono::{Local, Timelike, Duration}; -const MAX_CONCURRENT_JOBS: usize = 30; +const MAX_CONCURRENT_JOBS: usize = 20; const G5K_DAY_BOTTOM_BOUNDARY: i64 = 9; const G5K_DAY_UP_BOUNDARY: i64 = 19; @@ -61,7 +61,7 @@ pub enum OARState { UnknownState, Processing, Deployed, - WaitingToBeDeployed + WaitingToBeDeployed, } impl Display for OARState { @@ -83,12 +83,14 @@ impl OARState { OARState::UnknownState => "UnknownState", OARState::Processing => "Processing", OARState::Deployed => "Deployed", - OARState::WaitingToBeDeployed => "WaitingToBeDeployed" + OARState::WaitingToBeDeployed => "WaitingToBeDeployed", } } fn is_terminal(&self) -> bool { - self == &OARState::Terminated || self == &OARState::Failed || self == &OARState::UnknownState + self == &OARState::Terminated + || self == &OARState::Failed + || self == &OARState::UnknownState } } @@ -123,7 +125,7 @@ pub struct Job { pub results_dir: String, pub site: String, pub deployment_id: Option, - pub os_flavor: String + pub os_flavor: String, } impl Job { @@ -147,7 +149,15 @@ impl Job { ) } - fn new(id: usize, node: Node, core_values: Vec, site: String, root_scripts_dir: &str, root_results_dir: &str, os_flavor: String) -> Self { + fn new( + id: usize, + node: Node, + core_values: Vec, + site: String, + root_scripts_dir: &str, + root_results_dir: &str, + os_flavor: String, + ) -> Self { let script_file = Job::build_script_file_path(&node, &site, root_scripts_dir); let results_dir = Job::build_results_dir_path(&node, &site, root_results_dir); @@ -161,7 +171,7 @@ impl Job { results_dir, site, deployment_id: None, - os_flavor + os_flavor, } } @@ -188,7 +198,7 @@ impl Job { let client = reqwest::Client::builder().build()?; let endpoint = format!("{}/sites/{}/jobs", super::BASE_URL, self.site); let data = serde_json::json!({"properties": format!("host={}",self.node.uid), "resources": format!("walltime={}", scripts::WALLTIME), "types": ["deploy"], "command": "sleep 14500"}); - + if let Ok(response) = inventories::post_api_call(&client, &endpoint, &data).await { debug!("Job has been posted on deploy mode"); self.state = OARState::WaitingToBeDeployed; @@ -200,7 +210,6 @@ impl Job { } } - session.close().await?; Ok(()) @@ -211,10 +220,14 @@ impl Job { client: &reqwest::Client, base_url: &str, ) -> JobResult { - let state: OARState; if self.state == OARState::Processing { - let endpoint = format!("{}/sites/{}/deployments/{}", base_url, self.site, self.deployment_id.clone().unwrap()); + let endpoint = format!( + "{}/sites/{}/deployments/{}", + base_url, + self.site, + self.deployment_id.clone().unwrap() + ); if let Ok(response) = inventories::get_api_call(&client, &endpoint).await { let str_state = response.get("status").unwrap().as_str().unwrap(); if str_state == "terminated" { @@ -227,7 +240,6 @@ impl Job { } else { state = OARState::Failed; } - } else { let response: HashMap = crate::inventories::get_api_call( client, @@ -249,7 +261,7 @@ impl Job { state = OARState::try_from(str_state.unwrap()).unwrap(); } } - + if state != self.state { self.state_transition(state).await?; } @@ -277,19 +289,20 @@ impl Job { pub async fn job_running(&mut self) -> JobResult { if self.os_flavor == super::DEFAULT_OS_FLAVOR { info!("Starting script on {}", &self.node.uid); - return Ok(()) + return Ok(()); } info!("Deploying new environement on {}", &self.node.uid); // CURL KADEPLOY let client = reqwest::Client::builder().build()?; let endpoint = format!("{}/sites/{}/deployments", super::BASE_URL, self.site); let pub_key_content = fs::read_to_string(".ssh_g5k.pub") - .map_err(|e| format!("Failed to read the SSH public key: {}", e)).unwrap(); + .map_err(|e| format!("Failed to read the SSH public key: {}", e)) + .unwrap(); let pub_key_content = pub_key_content.trim(); let data = serde_json::json!({ - "nodes": [&format!("{}.{}.grid5000.fr",self.node.uid, self.site)], - "environment": self.os_flavor, + "nodes": [&format!("{}.{}.grid5000.fr",self.node.uid, self.site)], + "environment": self.os_flavor, "key": pub_key_content }); @@ -314,7 +327,7 @@ impl Job { let session = ssh::ssh_connect(&self.site).await?; let host = format!("{}.{}.grid5000.fr", self.node.uid, self.site); - if let Ok(script_result) = ssh::run_script(&session, &host, &self.script_file).await { + if let Ok(_script_result) = ssh::run_script(&session, &host, &self.script_file).await { self.state = OARState::Running; } else { self.state = OARState::Failed; @@ -325,17 +338,15 @@ impl Job { pub async fn job_terminated(&mut self) -> JobResult { info!("Downloading and processing results from {}", &self.node.uid); let root_results_dir = Path::new(&self.results_dir) - .components() + .components() .filter_map(|comp| match comp { std::path::Component::Normal(name) => name.to_str(), _ => None, - }) + }) .next(); - if let Err(rsync_result) = rsync_results( - &self.site, - &self.results_dir, - root_results_dir.unwrap(), - ) { + if let Err(_rsync_result) = + rsync_results(&self.site, &self.results_dir, root_results_dir.unwrap()) + { self.state = OARState::UnknownState; } else { if let Ok(_extracted) = extract_tar_xz(&self.results_dir) { @@ -348,20 +359,23 @@ impl Job { } pub async fn update_node(&mut self, client: &reqwest::Client, base_url: &str) -> JobResult { - let cluster = self.node.cluster.clone().unwrap(); if let Ok(nodes) = inventories::fetch_nodes(&client, base_url, &self.site, &cluster).await { - - let node: Node = nodes.into_iter().find(|node| node.uid == self.node.uid).unwrap(); - - debug!("Cluster : {} ; Node : {} ; os : {:?}", cluster, node.uid, node.operating_system); + let node: Node = nodes + .into_iter() + .find(|node| node.uid == self.node.uid) + .unwrap(); + + debug!( + "Cluster : {} ; Node : {} ; os : {:?}", + cluster, node.uid, node.operating_system + ); self.node = node; } else { warn!("Could not gather nodes"); } Ok(()) } - } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -414,7 +428,7 @@ impl Jobs { for cluster_nodes in clusters_nodes.iter() { // Check if this cluster has a node at the current index - if let Some((site, cluster, node)) = cluster_nodes.get(index) { + if let Some((site, _cluster, node)) = cluster_nodes.get(index) { all_clusters_completed = false; let node_uid = node.uid.clone(); @@ -434,7 +448,7 @@ impl Jobs { self.check_unfinished_jobs(&client, super::BASE_URL, jobs_file) .await?; } - while !within_time_window(scripts::WALLTIME) { + while false {//!within_time_window(scripts::WALLTIME) { info!( "Too close of day|night boundaries for {} WALLTIME", scripts::WALLTIME @@ -450,9 +464,16 @@ impl Jobs { } // Job creation and submission let core_values = - configs::generate_core_values(5, node.architecture.nb_cores); - let mut job = - Job::new(self.jobs.len(), node.clone(), core_values, site.to_string(), scripts_dir, results_dir, os_flavor.clone()); + configs::generate_core_values(node.architecture.nb_cores); + let mut job = Job::new( + self.jobs.len(), + node.clone(), + core_values, + site.to_string(), + scripts_dir, + results_dir, + os_flavor.clone(), + ); fs::create_dir_all( std::path::Path::new(&job.script_file).parent().unwrap(), )?; @@ -554,7 +575,7 @@ pub fn rsync_results(site: &str, results_dir: &str, root_results_dir: &str) -> J debug!("Rsync with site {} done.\n{:?}", site, out); } else { debug!("Rsync with site {} failed.\n{:?} ; {:?}", site, out, err); - return Err(JobError::UnknownState("Rsync failed".to_string())) + return Err(JobError::UnknownState("Rsync failed".to_string())); } } else { p.terminate()?; @@ -575,7 +596,7 @@ pub fn rsync_results(site: &str, results_dir: &str, root_results_dir: &str) -> J debug!("Checksum success.\n{:?}", out); } else { debug!("Checksum fail.\n{:?} ; {:?}", out, err); - return Err(JobError::UnknownState("Checksum failed".to_string())) + return Err(JobError::UnknownState("Checksum failed".to_string())); } } else { p.terminate()?; @@ -584,7 +605,7 @@ pub fn rsync_results(site: &str, results_dir: &str, root_results_dir: &str) -> J Ok(()) } -fn extract_tar_xz(dir_path: &str) -> Result <(), String> { +fn extract_tar_xz(dir_path: &str) -> Result<(), String> { let dir = Path::new(dir_path); let tar_xz_name = match dir.file_name() { @@ -596,7 +617,10 @@ fn extract_tar_xz(dir_path: &str) -> Result <(), String> { None => return Err("Failed to compute archive name from directory path.".to_string()), }; - let archive_path = dir.parent().unwrap_or_else(|| Path::new(".")).join(&tar_xz_name); + let archive_path = dir + .parent() + .unwrap_or_else(|| Path::new(".")) + .join(&tar_xz_name); if !archive_path.exists() { return Err(format!("Archive not found: {:?}", archive_path)); @@ -609,7 +633,8 @@ fn extract_tar_xz(dir_path: &str) -> Result <(), String> { .arg("-C") .arg(dir.parent().unwrap_or_else(|| Path::new("."))) .output() - .map_err(|e| format!("Failed to execute tar command stripping 5: {}", e)).unwrap(); + .map_err(|e| format!("Failed to execute tar command stripping 5: {}", e)) + .unwrap(); if !output_5.status.success() { let output_3 = Command::new("tar") @@ -622,7 +647,6 @@ fn extract_tar_xz(dir_path: &str) -> Result <(), String> { .map_err(|e| format!("Failed to execute tar command stripping 3: {}", e))?; if !output_3.status.success() { - return Err(format!( "tar command failed with error: {}", String::from_utf8_lossy(&output_3.stderr) @@ -656,11 +680,21 @@ fn within_time_window(walltime: &str) -> bool { let now = Local::now(); let current_hour = now.hour() as i64; let walltime_duration = parse_walltime(walltime).unwrap_or_else(|| Duration::hours(0)); - let adjusted_hour = current_hour + walltime_duration.num_hours(); - - if (G5K_DAY_BOTTOM_BOUNDARY..G5K_DAY_UP_BOUNDARY).contains(¤t_hour) { - adjusted_hour < G5K_DAY_UP_BOUNDARY - } else { - adjusted_hour < G5K_DAY_BOTTOM_BOUNDARY || adjusted_hour >= 24 + let adjusted_hour = (current_hour + walltime_duration.num_hours()) % 24; + if adjusted_hour > G5K_DAY_BOTTOM_BOUNDARY + && adjusted_hour < G5K_DAY_UP_BOUNDARY + && current_hour >= G5K_DAY_BOTTOM_BOUNDARY + { + return true; + } + if adjusted_hour > G5K_DAY_UP_BOUNDARY && current_hour > G5K_DAY_UP_BOUNDARY { + return true; + } + if adjusted_hour < G5K_DAY_BOTTOM_BOUNDARY && current_hour < G5K_DAY_BOTTOM_BOUNDARY { + return true; + } + if adjusted_hour < G5K_DAY_BOTTOM_BOUNDARY && current_hour > G5K_DAY_UP_BOUNDARY { + return true; } + return false; } diff --git a/src/main.rs b/src/main.rs index 932ec69..7c8eaf1 100644 --- a/src/main.rs +++ b/src/main.rs @@ -21,7 +21,7 @@ use std::{fmt, fs, time::Duration}; use thiserror::Error; const SUPPORTED_PROCESSOR_VENDOR: &[&str; 3] = &["Intel", "AMD", "Cavium"]; -const SLEEP_CHECK_TIME_IN_SECONDES: u64 = 900; +const SLEEP_CHECK_TIME_IN_SECONDES: u64 = 150; const BASE_URL: &str = "https://api.grid5000.fr/stable"; // URL de base de l'API const LOGS_DIRECTORY: &str = "logs.d"; const INVENTORIES_DIRECTORY: &str = "inventories.d"; @@ -72,10 +72,9 @@ struct BenchmarkArgs { /// OS version to deploy first on nodes thanks to kadeploy3 #[arg(long, default_value = "debian11-nfs")] - os_flavor: String + os_flavor: String, } - type BenchmarkResult = Result<(), BenchmarkError>; #[derive(Error, Debug)] pub enum BenchmarkError { @@ -206,7 +205,12 @@ impl EventsByVendor { } // Creates all directories if not already existing -fn init_directories(logs_directory: &str, inventories_directory: &str, scripts_directory: &str, results_directory: &str) -> BenchmarkResult { +fn init_directories( + logs_directory: &str, + inventories_directory: &str, + scripts_directory: &str, + results_directory: &str, +) -> BenchmarkResult { let directories = [ logs_directory, inventories_directory, @@ -268,19 +272,17 @@ async fn main() -> Result<(), BenchmarkError> { info!("Starting Benchmarks!"); debug!("LOG_LEVEL is : {:?}", &log_level); - - init_directories( &benchmark_args.logs_directory, &benchmark_args.inventories_directory, &benchmark_args.scripts_directory, &benchmark_args.results_directory, - )?; + )?; let events_by_vendor = load_events_config(&benchmark_args.config_file)?; let mut jobs: Jobs = load_or_init_jobs(&benchmark_args.jobs_file)?; - - if ! benchmark_args.inventory_skip { + + if !benchmark_args.inventory_skip { info!("Processing inventory step"); inventories::generate_inventory(&benchmark_args.inventories_directory).await?; /* @@ -295,12 +297,11 @@ async fn main() -> Result<(), BenchmarkError> { } jobs.dump_to_file(JOBS_FILE)?; */ - } else { info!("Skipping inventory scrapping as requested"); } - if ! benchmark_args.jobs_skip { + if !benchmark_args.jobs_skip { info!("Processing jobs step"); // If we loaded existing jobs, check their status if jobs.jobs.len() != 0 { @@ -315,7 +316,7 @@ async fn main() -> Result<(), BenchmarkError> { &benchmark_args.scripts_directory, &benchmark_args.results_directory, &events_by_vendor, - benchmark_args.os_flavor.clone() + benchmark_args.os_flavor.clone(), ) .await?; @@ -335,7 +336,6 @@ async fn main() -> Result<(), BenchmarkError> { results::process_results(&job.results_dir)?; } - Ok(()) } diff --git a/src/results.rs b/src/results.rs index 2cd1820..58b61ca 100644 --- a/src/results.rs +++ b/src/results.rs @@ -1,10 +1,10 @@ -use thiserror::Error; -use log::{debug, warn, error}; -use serde::{Serialize, Deserialize}; -use std::path::{Path, PathBuf}; -use std::fs::File; -use std::io::{self, BufRead, BufReader, Write}; +use log::{debug, error, warn}; +use serde::{Deserialize, Serialize}; use std::fs; +use std::fs::File; +use std::io::{self, BufRead, BufReader}; +use std::path::{Path, PathBuf}; +use thiserror::Error; #[derive(Error, Debug)] pub enum ResultError { @@ -26,11 +26,8 @@ struct HwpcRowRaw { time_running: i64, } - - - #[derive(Debug, Deserialize, Serialize, PartialEq)] -struct HwpcRow { +struct HwpcConsumptionRow { timestamp: i64, sensor: String, target: String, @@ -43,12 +40,17 @@ struct HwpcRow { time_running: i64, nb_core: i32, nb_ops_per_core: i32, - iteration: usize + iteration: usize, } -impl HwpcRow { - fn from_raw_record(raw_record: HwpcRowRaw, nb_core: i32, nb_ops_per_core: i32, iteration: usize) -> Self { - HwpcRow { +impl HwpcConsumptionRow { + fn from_raw_record( + raw_record: HwpcRowRaw, + nb_core: i32, + nb_ops_per_core: i32, + iteration: usize, + ) -> Self { + Self { timestamp: raw_record.timestamp, sensor: raw_record.sensor, target: raw_record.target, @@ -61,13 +63,52 @@ impl HwpcRow { time_running: raw_record.time_running, nb_core, nb_ops_per_core, - iteration + iteration, } } } #[derive(Debug, Deserialize, Serialize, PartialEq)] -struct PerfRow { +struct HwpcFrequencyRow { + timestamp: i64, + sensor: String, + target: String, + socket: i32, + cpu: i32, + rapl_energy_pkg: Option, + rapl_energy_dram: Option, + rapl_energy_cores: Option, + time_enabled: i64, + time_running: i64, + frequency: i32, + iteration: usize, +} + +impl HwpcFrequencyRow { + fn from_raw_record( + raw_record: HwpcRowRaw, + frequency: i32, + iteration: usize, + ) -> Self { + Self { + timestamp: raw_record.timestamp, + sensor: raw_record.sensor, + target: raw_record.target, + socket: raw_record.socket, + cpu: raw_record.cpu, + rapl_energy_pkg: raw_record.RAPL_ENERGY_PKG, + rapl_energy_dram: raw_record.RAPL_ENERGY_DRAM, + rapl_energy_cores: raw_record.RAPL_ENERGY_CORES, + time_enabled: raw_record.time_enabled, + time_running: raw_record.time_running, + frequency, + iteration, + } + } +} + +#[derive(Debug, Deserialize, Serialize, PartialEq)] +struct PerfConsumptionRow { power_energy_pkg: Option, power_energy_ram: Option, power_energy_cores: Option, @@ -76,16 +117,26 @@ struct PerfRow { nb_ops_per_core: i32, iteration: usize, } +#[derive(Debug, Deserialize, Serialize, PartialEq)] +struct PerfFrequencyRow { + power_energy_pkg: Option, + power_energy_ram: Option, + power_energy_cores: Option, + time_elapsed: f64, + frequency: i32, + iteration: usize, +} /// Creates an aggregation of perf___ into corresponding perf_alone__.csv file -fn aggregate_perf(raw_perf_results_file: PathBuf) -> io::Result<()> { - +fn aggregate_perf_consumption(raw_perf_results_file: PathBuf) -> io::Result<()> { let output_path = &format!("{}.csv", raw_perf_results_file.display()); fs::File::create(output_path)?; let mut output_writer = csv::Writer::from_path(output_path)?; - if let Some((nb_core, nb_ops_per_core)) = parse_perf_metadata(raw_perf_results_file.file_name().unwrap().to_str().unwrap()) { + if let Some((nb_core, nb_ops_per_core)) = + parse_perf_consumption_metadata(raw_perf_results_file.file_name().unwrap().to_str().unwrap()) + { let raw_perf_results_file = File::open(raw_perf_results_file)?; let reader = BufReader::new(raw_perf_results_file); let mut iteration = 1; @@ -106,21 +157,20 @@ fn aggregate_perf(raw_perf_results_file: PathBuf) -> io::Result<()> { } } else if line.contains("power/energy-ram/") { if let Some(value) = line.trim().split_whitespace().next() { - ram_joules = Some(value.replace(',', "").parse::().unwrap_or_default()); } } else if line.contains("seconds time elapsed") { if let Some(value) = line.trim().split_whitespace().next() { time_elapsed = Some(value.parse::().unwrap_or_default()); } - let perf_row = PerfRow { + let perf_row = PerfConsumptionRow { power_energy_pkg: pkg_joules, power_energy_ram: ram_joules, power_energy_cores: cores_joules, time_elapsed: time_elapsed.unwrap(), nb_core: nb_core.parse::().unwrap(), nb_ops_per_core: nb_ops_per_core.parse::().unwrap(), - iteration + iteration, }; output_writer.serialize(perf_row)?; iteration += 1; @@ -131,21 +181,90 @@ fn aggregate_perf(raw_perf_results_file: PathBuf) -> io::Result<()> { } } } else { - warn!("Could not parse metadata from file name: {:?}", raw_perf_results_file); + warn!( + "Could not parse metadata from file name: {:?}", + raw_perf_results_file + ); } Ok(()) } +fn aggregate_perf_frequency(raw_perf_results_file: PathBuf) -> io::Result<()> { + let output_path = &format!("{}.csv", raw_perf_results_file.display()); + fs::File::create(output_path)?; + let mut output_writer = csv::Writer::from_path(output_path)?; -fn parse_perf_metadata(file_name: &str) -> Option<(String, String)> { - if let Some(file_name) = Path::new(file_name).file_name().and_then(|os_str| os_str.to_str()) { + if let Some(frequency) = + parse_perf_frequency_metadata(raw_perf_results_file.file_name().unwrap().to_str().unwrap()) + { + let raw_perf_results_file = File::open(raw_perf_results_file)?; + let reader = BufReader::new(raw_perf_results_file); + let mut iteration = 1; + let mut cores_joules = None; + let mut pkg_joules = None; + let mut ram_joules = None; + let mut time_elapsed = None; + + for line in reader.lines() { + let line = line?; + if line.contains("power/energy-cores/") { + if let Some(value) = line.trim().split_whitespace().next() { + cores_joules = Some(value.replace(',', "").parse::().unwrap_or_default()); + } + } else if line.contains("power/energy-pkg/") { + if let Some(value) = line.trim().split_whitespace().next() { + pkg_joules = Some(value.replace(',', "").parse::().unwrap_or_default()); + } + } else if line.contains("power/energy-ram/") { + if let Some(value) = line.trim().split_whitespace().next() { + ram_joules = Some(value.replace(',', "").parse::().unwrap_or_default()); + } + } else if line.contains("seconds time elapsed") { + if let Some(value) = line.trim().split_whitespace().next() { + time_elapsed = Some(value.parse::().unwrap_or_default()); + } + let perf_row = PerfFrequencyRow { + power_energy_pkg: pkg_joules, + power_energy_ram: ram_joules, + power_energy_cores: cores_joules, + time_elapsed: time_elapsed.unwrap(), + frequency: frequency.parse::().unwrap(), + iteration, + }; + output_writer.serialize(perf_row)?; + iteration += 1; + cores_joules = None; + pkg_joules = None; + ram_joules = None; + time_elapsed = None; // Reset for the next iteration + } + } + } else { + warn!( + "Could not parse metadata from file name: {:?}", + raw_perf_results_file + ); + } + + Ok(()) +} + +fn parse_perf_consumption_metadata(file_name: &str) -> Option<(String, String)> { + if let Some(file_name) = Path::new(file_name) + .file_name() + .and_then(|os_str| os_str.to_str()) + { let parts: Vec<&str> = file_name.split('_').collect(); if parts.len() == 4 { - if let (Ok(nb_core), Ok(nb_ops_per_core)) = (parts[2].parse::(), parts[3].parse::()) { + if let (Ok(nb_core), Ok(nb_ops_per_core)) = + (parts[2].parse::(), parts[3].parse::()) + { return Some((nb_core.to_string(), nb_ops_per_core.to_string())); } } else if parts.len() == 5 { - if let (Ok(nb_core), Ok(nb_ops_per_core)) = (parts[3].parse::(), parts[4].parse::()) { + if let (Ok(nb_core), Ok(nb_ops_per_core)) = + (parts[3].parse::(), parts[4].parse::()) + { return Some((nb_core.to_string(), nb_ops_per_core.to_string())); } } @@ -154,16 +273,47 @@ fn parse_perf_metadata(file_name: &str) -> Option<(String, String)> { } None } +fn parse_perf_frequency_metadata(file_name: &str) -> Option { + if let Some(file_name) = Path::new(file_name) + .file_name() + .and_then(|os_str| os_str.to_str()) + { + let parts: Vec<&str> = file_name.split('_').collect(); + if parts.len() == 5 { + if let Ok(frequency) = + parts[1].parse::() + { + return Some(frequency.to_string()); + } + } else { + warn!("Could not parse filename {} to get metadata", file_name); + } + } else { + warn!("Could not parse filename {} to get metadata", file_name); + } + None +} -fn parse_hwpc_metadata(dir_name: &str) -> Option<(i32, i32, usize)> { - if let Some(dir_name) = Path::new(dir_name).file_name().and_then(|os_str| os_str.to_str()) { +fn parse_hwpc_consumption_metadata(dir_name: &str) -> Option<(i32, i32, usize)> { + if let Some(dir_name) = Path::new(dir_name) + .file_name() + .and_then(|os_str| os_str.to_str()) + { let parts: Vec<&str> = dir_name.split('_').collect(); if parts.len() == 5 { - if let (Ok(nb_core), Ok(nb_ops_per_core), Ok(iteration)) = (parts[2].parse::(), parts[3].parse::(), parts[4].parse::()) { + if let (Ok(nb_core), Ok(nb_ops_per_core), Ok(iteration)) = ( + parts[2].parse::(), + parts[3].parse::(), + parts[4].parse::(), + ) { return Some((nb_core, nb_ops_per_core, iteration)); } } else if parts.len() == 6 { - if let (Ok(nb_core), Ok(nb_ops_per_core), Ok(iteration)) = (parts[3].parse::(), parts[4].parse::(), parts[5].parse::()) { + if let (Ok(nb_core), Ok(nb_ops_per_core), Ok(iteration)) = ( + parts[3].parse::(), + parts[4].parse::(), + parts[5].parse::(), + ) { return Some((nb_core, nb_ops_per_core, iteration)); } } @@ -172,28 +322,56 @@ fn parse_hwpc_metadata(dir_name: &str) -> Option<(i32, i32, usize)> { } None } - -fn aggregate_hwpc_file(raw_rapl_file: &Path, output_path: &str, nb_core: i32, nb_ops_per_core: i32, iteration: usize) -> io::Result<()> { - let file_exists = std::fs::metadata(output_path).is_ok(); +fn parse_hwpc_frequency_metadata(dir_name: &str) -> Option<(i32, usize)> { + if let Some(dir_name) = Path::new(dir_name) + .file_name() + .and_then(|os_str| os_str.to_str()) + { + let parts: Vec<&str> = dir_name.split('_').collect(); + if parts.len() == 6 { + if let (Ok(frequency), Ok(iteration)) = ( + parts[1].parse::(), + parts[5].parse::(), + ) + { + return Some((frequency,iteration)); + } + } else { + warn!("Could not parse filename {} to get metadata", dir_name); + } + } else { + warn!("Could not parse filename {} to get metadata", dir_name); + } + None +} +fn aggregate_hwpc_consumption_file( + raw_rapl_file: &Path, + output_path: &str, + nb_core: i32, + nb_ops_per_core: i32, + iteration: usize, +) -> io::Result<()> { + let file_exists = std::fs::metadata(output_path).is_ok(); let file = std::fs::OpenOptions::new() .write(true) .create(true) .append(true) .open(output_path)?; - let mut output_writer = csv::WriterBuilder::new().has_headers(!file_exists).from_writer(file); - + let mut output_writer = csv::WriterBuilder::new() + .has_headers(!file_exists) + .from_writer(file); if let Ok(mut reader) = csv::Reader::from_path(raw_rapl_file) { - let iter = reader.deserialize::(); - + let iter = reader.deserialize::(); for hwpc_row_raw in iter { match hwpc_row_raw { Ok(row_raw) => { - let hwpc_raw = HwpcRow::from_raw_record(row_raw, nb_core, nb_ops_per_core, iteration); + let hwpc_raw = + HwpcConsumptionRow::from_raw_record(row_raw, nb_core, nb_ops_per_core, iteration); output_writer.serialize(hwpc_raw)?; - }, + } Err(e) => { warn!("Raw row malformed : {}", e); } @@ -204,12 +382,72 @@ fn aggregate_hwpc_file(raw_rapl_file: &Path, output_path: &str, nb_core: i32, nb } Ok(()) } +fn aggregate_hwpc_frequency_file( + raw_rapl_file: &Path, + output_path: &str, + frequency: i32, + iteration: usize, +) -> io::Result<()> { + let file_exists = std::fs::metadata(output_path).is_ok(); + let file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .append(true) + .open(output_path)?; + + let mut output_writer = csv::WriterBuilder::new() + .has_headers(!file_exists) + .from_writer(file); -fn aggregate_hwpc_subdir(subdir: &fs::DirEntry, output_path: &str) -> io::Result<()> { + if let Ok(mut reader) = csv::Reader::from_path(raw_rapl_file) { + let iter = reader.deserialize::(); + for hwpc_row_raw in iter { + match hwpc_row_raw { + Ok(row_raw) => { + let hwpc_raw = + HwpcFrequencyRow::from_raw_record(row_raw, frequency, iteration); + output_writer.serialize(hwpc_raw)?; + } + Err(e) => { + warn!("Raw row malformed : {}", e); + } + } + } + } else { + warn!("Could not open {}", output_path); + } + Ok(()) +} + +fn aggregate_hwpc_consumption_subdir(subdir: &fs::DirEntry, output_path: &str) -> io::Result<()> { let raw_rapl_file = subdir.path().join("rapl.csv"); - if let Some((nb_core, nb_ops_per_core, iteration)) = parse_hwpc_metadata(subdir.file_name().to_str().unwrap()) { - aggregate_hwpc_file(&raw_rapl_file, output_path, nb_core, nb_ops_per_core, iteration)?; + if let Some((nb_core, nb_ops_per_core, iteration)) = + parse_hwpc_consumption_metadata(subdir.file_name().to_str().unwrap()) + { + aggregate_hwpc_consumption_file( + &raw_rapl_file, + output_path, + nb_core, + nb_ops_per_core, + iteration, + )?; + } else { + warn!("Could not parse metadata from directory name: {:?}", subdir); + } + Ok(()) +} +fn aggregate_hwpc_frequency_subdir(subdir: &fs::DirEntry, output_path: &str) -> io::Result<()> { + let raw_rapl_file = subdir.path().join("rapl.csv"); + if let Some((frequency, iteration)) = + parse_hwpc_frequency_metadata(subdir.file_name().to_str().unwrap()) + { + aggregate_hwpc_frequency_file( + &raw_rapl_file, + output_path, + frequency, + iteration, + )?; } else { warn!("Could not parse metadata from directory name: {:?}", subdir); } @@ -217,12 +455,16 @@ fn aggregate_hwpc_subdir(subdir: &fs::DirEntry, output_path: &str) -> io::Result } /// Creates an aggregation of hwpc___ into corresponding hwpc___.csv file -fn aggregate_hwpc( - raw_results_dir_path: PathBuf, -) -> io::Result<()> { - - let (output_parent, output_basename) = (raw_results_dir_path.parent().unwrap(), raw_results_dir_path.file_name().unwrap()); - let output_path = &format!("{}/{}.csv", output_parent.to_str().unwrap(), output_basename.to_str().unwrap()); +fn aggregate_hwpc_consumption(raw_results_dir_path: PathBuf) -> io::Result<()> { + let (output_parent, output_basename) = ( + raw_results_dir_path.parent().unwrap(), + raw_results_dir_path.file_name().unwrap(), + ); + let output_path = &format!( + "{}/{}.csv", + output_parent.to_str().unwrap(), + output_basename.to_str().unwrap() + ); if Path::new(output_path).exists() { match fs::remove_file(output_path) { @@ -234,18 +476,63 @@ fn aggregate_hwpc( let mut raw_results_subdirs = Vec::new(); if let Ok(entries) = fs::read_dir(&raw_results_dir_path) { - raw_results_subdirs = entries.filter(|entry| entry.as_ref().unwrap().file_type().unwrap().is_dir()).collect(); + raw_results_subdirs = entries + .filter(|entry| entry.as_ref().unwrap().file_type().unwrap().is_dir()) + .collect(); } else { - warn!("Could not find subdirectories in {} directory", output_parent.to_str().unwrap()); + warn!( + "Could not find subdirectories in {} directory", + output_parent.to_str().unwrap() + ); } - assert!(raw_results_subdirs.iter().map(|subdir| aggregate_hwpc_subdir(subdir.as_ref().unwrap(), output_path)).all(|result| result.is_ok())); + assert!(raw_results_subdirs + .iter() + .map(|subdir| aggregate_hwpc_consumption_subdir(subdir.as_ref().unwrap(), output_path)) + .all(|result| result.is_ok())); Ok(()) } +fn aggregate_hwpc_frequency(raw_results_dir_path: PathBuf) -> io::Result<()> { + let (output_parent, output_basename) = ( + raw_results_dir_path.parent().unwrap(), + raw_results_dir_path.file_name().unwrap(), + ); + let output_path = &format!( + "{}/{}.csv", + output_parent.to_str().unwrap(), + output_basename.to_str().unwrap() + ); + if Path::new(output_path).exists() { + match fs::remove_file(output_path) { + Ok(_) => debug!("File '{}' was deleted successfully.", output_path), + Err(e) => error!("Failed to delete file '{}': {}", output_path, e), + } + } + + let mut raw_results_subdirs = Vec::new(); + + if let Ok(entries) = fs::read_dir(&raw_results_dir_path) { + raw_results_subdirs = entries + .filter(|entry| entry.as_ref().unwrap().file_type().unwrap().is_dir()) + .collect(); + } else { + warn!( + "Could not find subdirectories in {} directory", + output_parent.to_str().unwrap() + ); + } + + assert!(raw_results_subdirs + .iter() + .map(|subdir| aggregate_hwpc_frequency_subdir(subdir.as_ref().unwrap(), output_path)) + .all(|result| result.is_ok())); + + Ok(()) +} -fn filter_hwpc_dirs(directory: &str) -> Vec { +fn filter_hwpc_consumption_dirs(directory: &str) -> Vec { let mut filtered_files = Vec::new(); if let Ok(entries) = fs::read_dir(directory) { @@ -265,8 +552,28 @@ fn filter_hwpc_dirs(directory: &str) -> Vec { filtered_files } +fn filter_hwpc_frequency_dirs(directory: &str) -> Vec { + let mut filtered_files = Vec::new(); + + if let Ok(entries) = fs::read_dir(directory) { + for entry in entries { + if let Ok(entry) = entry { + let path = entry.path(); + if path.is_dir() { + if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) { + if ["frequency_1_hwpc", "frequency_10_hwpc", "frequency_100_hwpc", "frequency_1000_hwpc"].iter().any(|s| file_name.starts_with(*s)) { + filtered_files.push(path); + } + } + } + } + } + } + + filtered_files +} -fn filter_perf_files(directory: &str) -> Vec { +fn filter_perf_consumption_files(directory: &str) -> Vec { let mut filtered_files = Vec::new(); if let Ok(entries) = fs::read_dir(directory) { @@ -286,14 +593,50 @@ fn filter_perf_files(directory: &str) -> Vec { filtered_files } -pub fn process_results(results_directory: &str) -> io::Result<()> { - let perf_raw_files = filter_perf_files(results_directory); - assert!(perf_raw_files.iter().map(|perf_raw_file| aggregate_perf(perf_raw_file.to_path_buf())).all(|result| result.is_ok())); +fn filter_perf_frequency_files(directory: &str) -> Vec { + let mut filtered_files = Vec::new(); - let hwpc_raw_dirs = filter_hwpc_dirs(results_directory); - assert!(hwpc_raw_dirs.iter().map(|hwpc_raw_results_dir| aggregate_hwpc(hwpc_raw_results_dir.to_path_buf())).all(|result| result.is_ok())); + if let Ok(entries) = fs::read_dir(directory) { + for entry in entries { + if let Ok(entry) = entry { + let path = entry.path(); + if path.is_file() { + if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) { + if !file_name.ends_with(".csv") && ["frequency_1_perf", "frequency_10_perf", "frequency_100_perf", "frequency_1000_perf"].iter().any(|s| file_name.starts_with(*s)) { + filtered_files.push(path); + } + } + } + } + } + } + filtered_files +} +pub fn process_results(results_directory: &str) -> io::Result<()> { + let perf_consumption_raw_files = filter_perf_consumption_files(results_directory); + assert!(perf_consumption_raw_files + .iter() + .map(|perf_raw_file| aggregate_perf_consumption(perf_raw_file.to_path_buf())) + .all(|result| result.is_ok())); + + let perf_frequency_raw_files = filter_perf_frequency_files(results_directory); + assert!(perf_frequency_raw_files + .iter() + .map(|perf_raw_file| aggregate_perf_frequency(perf_raw_file.to_path_buf())) + .all(|result| result.is_ok())); + + let hwpc_consumption_raw_dirs = filter_hwpc_consumption_dirs(results_directory); + assert!(hwpc_consumption_raw_dirs + .iter() + .map(|hwpc_raw_results_dir| aggregate_hwpc_consumption(hwpc_raw_results_dir.to_path_buf())) + .all(|result| result.is_ok())); + + let hwpc_frequency_raw_dirs = filter_hwpc_frequency_dirs(results_directory); + assert!(hwpc_frequency_raw_dirs + .iter() + .map(|hwpc_raw_results_dir| aggregate_hwpc_frequency(hwpc_raw_results_dir.to_path_buf())) + .all(|result| result.is_ok())); Ok(()) } - diff --git a/src/scripts.rs b/src/scripts.rs index 5167f70..64fe752 100644 --- a/src/scripts.rs +++ b/src/scripts.rs @@ -12,19 +12,31 @@ use std::fs::File; use std::io::Write; use thiserror::Error; -pub const WALLTIME: &str = "5:00:00"; +pub const WALLTIME: &str = "4:30:00"; const QUEUE_TYPE: &str = "default"; -const CPU_OPS_PER_CORE_LIST: &[u32] = &[25, 250, 2_500, 25_000]; -const NB_ITERATIONS: usize = 10; +const CPU_OPS_PER_CORE_LIST: &[u32] = &[25_000]; +const NB_ITERATIONS: usize = 12; +const NB_ITERATIONS_FREQUENCIES: usize = 6; const HWPC_HOME_DIRECTORY: &str = "/app"; #[derive(Template)] #[template(path = "benchmark.sh", escape = "none")] struct BenchmarkTemplate { nb_iterations: usize, + nb_iterations_frequencies: usize, + frequencies_benchmark: bool, + target_frequencies: Vec, perf_alone: bool, hwpc_alone: bool, + codecarbon_alone: bool, + alumet_alone: bool, + scaphandre_alone: bool, + vjoule_alone: bool, hwpc_and_perf: bool, + codecarbon_and_perf: bool, + alumet_and_perf: bool, + scaphandre_and_perf: bool, + vjoule_and_perf: bool, docker_hub_username: String, docker_hub_token: String, hwpc_alone_configs: HashMap, @@ -38,16 +50,26 @@ struct BenchmarkTemplate { core_values: Vec, perf_events: PerfEvents, cpu_ops_per_core_list: Vec, - os_flavor: String - + os_flavor: String, } impl BenchmarkTemplate { fn new( nb_iterations: usize, + nb_iterations_frequencies: usize, + frequencies_benchmark: bool, + target_frequencies: Vec, perf_alone: bool, hwpc_alone: bool, + codecarbon_alone: bool, + alumet_alone: bool, + scaphandre_alone: bool, + vjoule_alone: bool, hwpc_and_perf: bool, + codecarbon_and_perf: bool, + alumet_and_perf: bool, + scaphandre_and_perf: bool, + vjoule_and_perf: bool, docker_hub_username: String, docker_hub_token: String, hwpc_alone_configs: HashMap, @@ -65,9 +87,20 @@ impl BenchmarkTemplate { ) -> Self { Self { nb_iterations, + nb_iterations_frequencies, + frequencies_benchmark, + target_frequencies, perf_alone, hwpc_alone, + codecarbon_alone, + alumet_alone, + scaphandre_alone, + vjoule_alone, hwpc_and_perf, + codecarbon_and_perf, + alumet_and_perf, + scaphandre_and_perf, + vjoule_and_perf, docker_hub_username, docker_hub_token, hwpc_alone_configs, @@ -81,7 +114,7 @@ impl BenchmarkTemplate { core_values, perf_events, cpu_ops_per_core_list: cpu_ops_per_core_list.into(), - os_flavor + os_flavor, } } } @@ -108,12 +141,31 @@ pub fn generate_script_file( &job.node.processor.microarchitecture, &job.node.processor.version, ); - let hwpc_alone_configs = - configs::generate_hwpc_configs(&hwpc_events, &job.core_values, "hwpc_alone", &job.os_flavor); - let hwpc_and_perf_configs = - configs::generate_hwpc_configs(&hwpc_events, &job.core_values, "hwpc_and_perf", &job.os_flavor); + let hwpc_alone_configs = configs::generate_hwpc_configs( + &hwpc_events, + &job.core_values, + "hwpc_alone", + &job.os_flavor, + ); + let hwpc_and_perf_configs = configs::generate_hwpc_configs( + &hwpc_events, + &job.core_values, + "hwpc_and_perf", + &job.os_flavor, + ); let benchmark = BenchmarkTemplate::new( NB_ITERATIONS, + NB_ITERATIONS_FREQUENCIES, + true, + vec![1_000, 100, 10, 1], + false, + false, + false, + false, + false, + false, + true, + true, true, true, true, @@ -130,7 +182,7 @@ pub fn generate_script_file( job.core_values.clone(), perf_events, CPU_OPS_PER_CORE_LIST, - job.os_flavor.clone() + job.os_flavor.clone(), ); let benchmark = benchmark.render().unwrap(); file.write_all(benchmark.as_bytes())?; diff --git a/src/ssh.rs b/src/ssh.rs index 6a5ed53..066ebe8 100644 --- a/src/ssh.rs +++ b/src/ssh.rs @@ -3,8 +3,7 @@ use openssh::{KnownHosts, Session, Stdio}; use openssh_sftp_client::Sftp; use regex::Regex; use std::str::{self}; -use thiserror::Error; -use tokio::io::AsyncWriteExt; +use thiserror::Error; #[derive(Error, Debug)] pub enum SshError { @@ -57,11 +56,14 @@ pub async fn make_script_executable(session: &Session, script_file: &str) -> Ssh Ok(()) } -pub async fn run_script(session: &Session, host:&str, script_file: &str) -> SshResult { +pub async fn run_script(session: &Session, host: &str, script_file: &str) -> SshResult { let ssh_command = session .command("ssh") .arg(&format!("root@{}", host)) - .arg(&format!("cd /home/nleblond && (nohup bash {} 1> out1 2> out2 &)", script_file)) + .arg(&format!( + "cd /home/nleblond && (nohup bash {} 1> out1 2> out2 &)", + script_file + )) .output() .await; match ssh_command { @@ -69,15 +71,14 @@ pub async fn run_script(session: &Session, host:&str, script_file: &str) -> SshR if ssh_output.status.success() { debug!("Script successsfully started"); } else { - error!("Job submission failed: {:?}", ssh_output.stderr); - } - }, - Err(e) => error!("Job command failed: {:?}", e) + error!("Job submission failed: {:?}", ssh_output.stderr); + } + } + Err(e) => error!("Job command failed: {:?}", e), } Ok(()) } - pub async fn run_oarsub(session: &Session, script_file: &str) -> Result, SshError> { let oarsub_output = session .command("oarsub") diff --git a/templates/alumet_alone.sh b/templates/alumet_alone.sh new file mode 100644 index 0000000..eb1a574 --- /dev/null +++ b/templates/alumet_alone.sh @@ -0,0 +1,29 @@ +${SUDO_CMD}apt install -y build-essential libssl-dev pkg-config +cd /tmp +git clone https://github.com/alumet-dev/alumet.git +git config --global --add safe.directory /tmp/alumet +cd alumet +git checkout "v0.7.0" +cd app-agent +cargo build --release --features local_x86 +${SUDO_CMD}ln -s $(realpath ../target/release/alumet-local-agent) /usr/local/bin/alumet +cd /home/nleblond +alumet regen-config + +{% for core_value in core_values %} + {% for cpu_ops_per_core in cpu_ops_per_core_list %} +mkdir -p {{ results_directory }}/alumet_alone_${CORE_VALUE}_${CPU_OPS_PER_CORE} +echo "domain,energy,iteration" > {{ results_directory }}/alumet_alone_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv +for i in {1..{{ nb_iterations }}}; do +### ALUMET with ${CORE_VALUE} CPU * ${CPU_OPS_PER_CORE} OPS + ${SUDO_CMD}bash -c "alumet --plugins 'csv,rapl' --output '/tmp/alumet_alone_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}.csv' & echo \$!" > /tmp/alumet_pid_$i + ALUMET_PID=$(cat /tmp/alumet_pid_$i) + while ! (grep 'rapl' /tmp/alumet_alone_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}.csv); do sleep 0.02s ; done + stress-ng --cpu ${CORE_VALUE} --cpu-ops $(( CPU_OPS_PER_CORE * CORE_VALUE )) -q + sleep 1s + ${SUDO_CMD}kill -2 $ALUMET_PID + cat /tmp/alumet_alone_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}.csv | grep rapl | awk -v ITER=$i -F';' '{printf("%s,%s,%s\n",$8,$3,ITER)}' >> {{ results_directory }}/alumet_alone_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv +done + {% endfor %} +{% endfor %} + diff --git a/templates/alumet_and_perf.sh b/templates/alumet_and_perf.sh new file mode 100644 index 0000000..bcfb51c --- /dev/null +++ b/templates/alumet_and_perf.sh @@ -0,0 +1,18 @@ +### ALUMET with ${CORE_VALUE} CPU * ${CPU_OPS_PER_CORE} OPS + TEMPERATURE_START=$(get_average_temperature) + sed -i 's/poll_interval = "[0-9]*m\{0,1\}s"/poll_interval = "1000ms"/' /home/nleblond/alumet-config.toml + ${SUDO_CMD}bash -c "alumet --plugins 'csv,rapl' --output '/tmp/alumet_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}.csv' & echo \$!" > /tmp/alumet_pid_$i + ALUMET_PID=$(cat /tmp/alumet_pid_$i) + ${SUDO_CMD}bash -c "perf stat -a -o /tmp/perf_and_alumet_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} & echo \$!" > /tmp/perf_pid_$i + PERF_PID=$(cat /tmp/perf_pid_$i) + while ! (grep 'rapl' /tmp/alumet_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}.csv); do sleep 0.02s ; done + stress-ng --cpu ${CORE_VALUE} --cpu-ops $(( CPU_OPS_PER_CORE * CORE_VALUE )) -q + sleep 1s + TEMPERATURE_STOP=$(get_average_temperature) + ${SUDO_CMD}kill -2 $ALUMET_PID + cat /tmp/alumet_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}.csv | grep rapl | awk -v ITER=$i -F';' '{printf("%s,%s,%s\n",$8,$3,ITER)}' >> {{ results_directory }}/alumet_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv + ${SUDO_CMD}kill -2 $PERF_PID + sleep 5s + cat /tmp/perf_and_alumet_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i >> {{ results_directory }}/perf_and_alumet_${CORE_VALUE}_${CPU_OPS_PER_CORE} + echo "$TEMPERATURE_START, $TEMPERATURE_STOP, $i" >> {{ results_directory }}/perf_and_alumet_${CORE_VALUE}_${CPU_OPS_PER_CORE}_temperatures.csv + diff --git a/templates/baseline_consumption.sh b/templates/baseline_consumption.sh new file mode 100644 index 0000000..596bb23 --- /dev/null +++ b/templates/baseline_consumption.sh @@ -0,0 +1,45 @@ +GLOBAL_BASELINE_MEASUREMENT_DURATION_SECONDS=900 +# Timer for the baseline measurement +START_BASELINE=$(date +%s) +BASELINE_CONSUMPTION_FILE={{ results_directory }}/baseline_consumption.csv +NUMBER_OF_TOP_PROCESSES=10 +OBSERVATION_DURATION_SECONDS=120 +ITERATION_STEP_INTERVAL_SECONDS=5 +PERF_STAT_FILE=/tmp/perf_stat_file +TIMEOUT_STRESS=60 +CPU_LOAD_STRESS=95 + + +echo "timestamp,pkg,ram,average_temperature,cpu_percent,mem_percent,process_1,process_2,process_3,process_4,process_5,process_6,process_7,process_8,process_9,process_10" > "${BASELINE_CONSUMPTION_FILE}" +# Init because of strict variables check on expression evaluation +NOW=$(date +%s) + +#### NOW +while [[ $((NOW - START_BASELINE)) -lt ${GLOBAL_BASELINE_MEASUREMENT_DURATION_SECONDS} ]] ; do + + + stress-ng --cpu 0 --cpu-load ${CPU_LOAD_STRESS} --timeout ${TIMEOUT_STRESS} + + # Observe for this temperature range + OBSERVATION_START=$(date +%s) + while [[ $((NOW - OBSERVATION_START)) -lt ${OBSERVATION_DURATION_SECONDS} ]] ; do + + TEMPERATURE_START=$(get_average_temperature) + PROCESSES=$(ps aux --sort -%cpu | head -$((NUMBER_OF_TOP_PROCESSES + 1)) | tail -${NUMBER_OF_TOP_PROCESSES}) + ${SUDO_CMD} perf stat -a -o "${PERF_STAT_FILE}" {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} sleep ${ITERATION_STEP_INTERVAL_SECONDS} + TEMPERATURE_STOP=$(get_average_temperature) + AVERAGE_TEMPERATURE=$(( (TEMPERATURE_START + TEMPERATURE_STOP) / 2 )) + + PKG_CONSUMPTION=$(grep "Joules" "${PERF_STAT_FILE}" | grep "pkg" | awk '{print $1}' | cut -d',' -f1 || echo "0") + RAM_CONSUMPTION=$(grep "Joules" "${PERF_STAT_FILE}" | grep "ram" | awk '{print $1}' | cut -d',' -f1 || echo "0") + echo "$PROCESSES" | awk -v TIMESTAMP="$(date +%s.%N)" -v TEMPERATURE="${AVERAGE_TEMPERATURE}" -v PKG_CONSUMPTION="${PKG_CONSUMPTION}" -v RAM_CONSUMPTION="${RAM_CONSUMPTION}" '{cpu+=$3; ram+=$4; names=names"\"" $11"\","} END {printf("%s,%s,%s,%s,%s,%s,%s\n",TIMESTAMP,PKG_CONSUMPTION,RAM_CONSUMPTION,TEMPERATURE,cpu,ram,substr(names,1,length(names)-1))}' >> "${BASELINE_CONSUMPTION_FILE}" + + NOW=$(date +%s) + done + +done + +echo "Baseline measurement complete." + + + diff --git a/templates/benchmark.sh b/templates/benchmark.sh index f617af6..96fa614 100644 --- a/templates/benchmark.sh +++ b/templates/benchmark.sh @@ -5,45 +5,209 @@ set -ueo pipefail {% include "oar_directives.sh" %} SECONDS=0 -######################## -### INSTALL PACKAGES ### -######################## + + {% include "install_packages.sh" %} +{% include "rust_setup.sh" %} + +{% if codecarbon_alone || codecarbon_and_perf %} +${SUDO_CMD}apt install -y python3.12-venv stress-ng python3-pip +cd /tmp +git clone https://github.com/mlco2/codecarbon.git +python3.12 -m venv codecarbon/ +source codecarbon/bin/activate +sed -i 's/Timer(self.interval, self._run)/Timer(self.interval\/1000, self._run)/' codecarbon/codecarbon/external/scheduler.py +sed -i 's/logging.Formatter(format, datefmt="%H:%M:%S")/logging.Formatter(format, datefmt=None)/' codecarbon/codecarbon/external/logger.py +pip install /tmp/codecarbon +${SUDO_CMD}ln -s /home/nleblond/.local/bin/codecarbon /usr/local/bin/codecarbon +{% endif %} + + +{% if alumet_alone || alumet_and_perf %} +${SUDO_CMD}apt install -y build-essential libssl-dev pkg-config +cd /tmp +git clone https://github.com/alumet-dev/alumet.git +git config --global --add safe.directory /tmp/alumet +cd alumet +git checkout "v0.7.0" +cd app-agent +cargo build --release --features local_x86 +${SUDO_CMD}ln -s $(realpath ../target/release/alumet-local-agent) /usr/local/bin/alumet +cd /home/nleblond +alumet regen-config +{% endif %} + +{% if scaphandre_alone || scaphandre_and_perf %} +${SUDO_CMD}apt install -y build-essential libssl-dev pkg-config +cd /tmp +git clone https://github.com/hubblo-org/scaphandre.git +git clone https://github.com/borntyping/rust-riemann_client.git +git config --global --add safe.directory /tmp/rust-riemann_client +git config --global --add safe.directory /tmp/scaphandre +sed -i 's/\#!\[rustfmt::skip\]//' rust-riemann_client/src/proto/mod_pb.rs | head -10 +cd scaphandre +git checkout "v1.0.1" +sed -i 's/riemann_client = { version = "0.9.0"/riemann_client = { path = "..\/rust-riemann_client"/' Cargo.toml +cargo build --release +${SUDO_CMD}ln -s $(realpath ./target/release/scaphandre) /usr/local/bin/scaphandre +cd /home/nleblond +{% endif %} + +{% if vjoule_alone || vjoule_and_perf %} +cd /tmp +wget https://github.com/davidson-consulting/vjoule/releases/download/v1.3.0/vjoule-tools_1.3.0.deb +dpkg -i vjoule-tools_1.3.0.deb +${SUDO_CMD}systemctl start vjoule_service +cd /home/nleblond + +${SUDO_CMD}systemctl status vjoule_service +sleep 30 +${SUDO_CMD}systemctl status vjoule_service +{% endif %} -################################# -### CREATES RESULTS_DIRECTORY ### -################################# {% include "results_directory_preparation.sh" %} + +get_average_temperature() { + NB_SENSORS=$(sensors | grep "Package id" | wc -l) + SUM_TEMP=$(sensors | grep "Package id" | awk '{print $4}' | cut -d'+' -f2 | cut -d'.' -f1 | paste -sd'+' | bc) + AVG_TMP=$(( SUM_TEMP / NB_SENSORS )) + echo $AVG_TMP +} + +{% include "baseline_consumption.sh" %} + +{% for target_frequency in target_frequencies %} + +PERF_AND_HWPC_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_perf_and_hwpc" +PERF_AND_HWPC_FREQUENCY_TEMPERATURES_FILE="{{ results_directory }}/temperatures_frequency_{{ target_frequency }}_perf_and_hwpc.csv" +HWPC_AND_PERF_FREQUENCY_DIR="{{ results_directory }}/frequency_{{ target_frequency }}_hwpc_and_perf" +touch $PERF_AND_HWPC_FREQUENCY_FILE +mkdir -p $HWPC_AND_PERF_FREQUENCY_DIR +echo "temperature_start,temperature_stop,iteration" > $PERF_AND_HWPC_FREQUENCY_TEMPERATURES_FILE + +PERF_AND_CODECARBON_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_perf_and_codecarbon" +PERF_AND_CODECARBON_FREQUENCY_TEMPERATURES_FILE="{{ results_directory }}/temperatures_frequency_{{ target_frequency }}_perf_and_codecarbon.csv" +CODECARBON_AND_PERF_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_codecarbon_and_perf.csv" +touch $PERF_AND_CODECARBON_FREQUENCY_FILE +echo "domain,timestamp,energy,iteration" > $CODECARBON_AND_PERF_FREQUENCY_FILE +echo "temperature_start,temperature_stop,iteration" > $PERF_AND_CODECARBON_FREQUENCY_TEMPERATURES_FILE + +PERF_AND_ALUMET_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_perf_and_alumet" +PERF_AND_ALUMET_FREQUENCY_TEMPERATURES_FILE="{{ results_directory }}/temperatures_frequency_{{ target_frequency }}_perf_and_alumet.csv" +ALUMET_AND_PERF_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_alumet_and_perf.csv" +ALUMET_AND_PERF_FREQUENCY_DIR="{{ results_directory }}/frequency_{{ target_frequency }}_alumet_and_perf" +touch $PERF_AND_ALUMET_FREQUENCY_FILE +echo "domain,timestamp,energy,iteration" > $ALUMET_AND_PERF_FREQUENCY_FILE +echo "temperature_start,temperature_stop,iteration" > $PERF_AND_ALUMET_FREQUENCY_TEMPERATURES_FILE +mkdir -p $ALUMET_AND_PERF_FREQUENCY_DIR + +PERF_AND_SCAPHANDRE_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_perf_and_scaphandre" +PERF_AND_SCAPHANDRE_FREQUENCY_TEMPERATURES_FILE="{{ results_directory }}/temperatures_frequency_{{ target_frequency }}_perf_and_scaphandre.csv" +SCAPHANDRE_AND_PERF_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_scaphandre_and_perf.csv" +touch $PERF_AND_SCAPHANDRE_FREQUENCY_FILE +echo "domain,timestamp,energy,iteration" > $SCAPHANDRE_AND_PERF_FREQUENCY_FILE +echo "temperature_start,temperature_stop,iteration" > $PERF_AND_SCAPHANDRE_FREQUENCY_TEMPERATURES_FILE + +PERF_AND_VJOULE_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_perf_and_vjoule" +PERF_AND_VJOULE_FREQUENCY_TEMPERATURES_FILE="{{ results_directory }}/temperatures_frequency_{{ target_frequency }}_perf_and_vjoule.csv" +VJOULE_AND_PERF_FREQUENCY_FILE="{{ results_directory }}/frequency_{{ target_frequency }}_vjoule_and_perf.csv" +touch $PERF_AND_VJOULE_FREQUENCY_FILE +echo "domain,timestamp,energy,iteration" > $VJOULE_AND_PERF_FREQUENCY_FILE +echo "temperature_start,temperature_stop,iteration" > $PERF_AND_VJOULE_FREQUENCY_TEMPERATURES_FILE + +for i in {1..{{ nb_iterations_frequencies }}}; do + export i=$i + +{% if frequencies_benchmark %} +{% include "frequencies_benchmark.sh" %} +{% endif %} + +done + +{% endfor %} + + +{% for core_value in core_values %} +CORE_VALUE={{ core_value }} + {% for cpu_ops_per_core in cpu_ops_per_core_list %} +CPU_OPS_PER_CORE={{ cpu_ops_per_core }} + +touch {{ results_directory }}/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }} +mkdir -p {{ results_directory }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }} +echo "temperature_start, temperature_stop, iteration" > {{ results_directory }}/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }}_temperatures.csv + +echo "domain,energy,iteration" > {{ results_directory }}/codecarbon_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}.csv +echo "temperature_start, temperature_stop, iteration" > {{ results_directory }}/perf_and_codecarbon_{{ core_value }}_{{ cpu_ops_per_core }}_temperatures.csv +touch {{ results_directory }}/perf_and_codecarbon_${CORE_VALUE}_${CPU_OPS_PER_CORE} + +echo "domain,energy,iteration" > {{ results_directory }}/alumet_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}.csv +echo "temperature_start, temperature_stop, iteration" > {{ results_directory }}/perf_and_alumet_{{ core_value }}_{{ cpu_ops_per_core }}_temperatures.csv +mkdir -p {{ results_directory }}/alumet_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE} +touch {{ results_directory }}/perf_and_alumet_${CORE_VALUE}_${CPU_OPS_PER_CORE} + +echo "domain,energy,iteration" > {{ results_directory }}/scaphandre_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}.csv +echo "temperature_start, temperature_stop, iteration" > {{ results_directory }}/perf_and_scaphandre_{{ core_value }}_{{ cpu_ops_per_core }}_temperatures.csv +touch {{ results_directory }}/perf_and_scaphandre_{{ core_value }}_{{ cpu_ops_per_core }} + +touch {{ results_directory }}/vjoule_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}.csv +echo "domain,energy,iteration" > {{ results_directory }}/vjoule_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}.csv +touch {{ results_directory }}/perf_and_vjoule_{{ core_value }}_{{ cpu_ops_per_core }} +echo "temperature_start, temperature_stop, iteration" > {{ results_directory }}/perf_and_vjoule_{{ core_value }}_{{ cpu_ops_per_core }}_temperatures.csv + +for i in {1..{{ nb_iterations }}}; do + {% if perf_alone %} -################# -### ONLY PERF ### -################# {% include "perf_alone.sh" %} {% endif %} {% if hwpc_alone %} -################# -### ONLY HWPC ### -################# {% include "hwpc_alone.sh" %} {% endif %} +{% if codecarbon_alone %} +{% include "codecarbon_alone.sh" %} +{% endif %} + +{% if alumet_alone %} +{% include "alumet_alone.sh" %} +{% endif %} + +{% if scaphandre_alone %} +{% include "scaphandre_alone.sh" %} +{% endif %} + +{% if vjoule_alone %} +{% include "vjoule_alone.sh" %} +{% endif %} + {% if hwpc_and_perf %} -################### -### HWPC & PERF ### -################### {% include "hwpc_and_perf.sh" %} {% endif %} -############################# -### ZIP RESULTS_DIRECTORY ### -############################# +{% if codecarbon_and_perf %} +{% include "codecarbon_and_perf.sh" %} +{% endif %} + +{% if alumet_and_perf %} +{% include "alumet_and_perf.sh" %} +{% endif %} + +{% if scaphandre_and_perf %} +{% include "scaphandre_and_perf.sh" %} +{% endif %} + +{% if vjoule_and_perf %} +{% include "vjoule_and_perf.sh" %} +{% endif %} + +done + + {% endfor %} +{% endfor %} + + {% include "zip_results.sh" %} -############ -### EXIT ### -############ -duration=$SECONDS -echo "$(($diff / 3600)) hours, $((duration / 60)) minutes and $((duration % 60)) seconds elapsed." + {% include "exit.sh" %} diff --git a/templates/codecarbon_alone.sh b/templates/codecarbon_alone.sh new file mode 100644 index 0000000..0edf764 --- /dev/null +++ b/templates/codecarbon_alone.sh @@ -0,0 +1,24 @@ +${SUDO_CMD}apt install -y python3.12-venv stress-ng +python3.12 -m venv codecarbon +cd codecarbon/ +source bin/activate +pip install codecarbon +cd /home/nleblond + +{% for core_value in core_values %} + {% for cpu_ops_per_core in cpu_ops_per_core_list %} +echo "domain,energy,iteration" > {{ results_directory }}/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +for i in {1..{{ nb_iterations }}}; do +### codecarbon with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS + ${SUDO_CMD}bash -c "codecarbon monitor 1 --no-api > /tmp/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}_${i} 2>&1 & echo \$!" > /tmp/codecarbon_pid_$i + CODECARBON_PID=$(cat /tmp/codecarbon_pid_$i) + while ! (grep 'Energy consumed for all CPU' /tmp/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}_${i}); do sleep 0.02s ; done + stress-ng --cpu {{ core_value }} --cpu-ops {{ core_value * cpu_ops_per_core }} -q + sleep 1s + ${SUDO_CMD}kill -2 $CODECARBON_PID + cat /tmp/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}_${i} | grep 'Energy consumed for all CPU' | tail -1 | cut -d':' -f4 | awk -v ITER=$i '{printf("%s,%s,%s\n","CPU",$1,ITER)}' >> {{ results_directory }}/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv + cat /tmp/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}_${i} | grep 'Energy consumed for RAM' | tail -1 | cut -d':' -f4 | awk -v ITER=$i '{printf("%s,%s,%s\n","RAM",$1,ITER)}' >> {{ results_directory }}/codecarbon_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +done + {% endfor %} +{% endfor %} + diff --git a/templates/codecarbon_and_perf.sh b/templates/codecarbon_and_perf.sh new file mode 100644 index 0000000..1e72be2 --- /dev/null +++ b/templates/codecarbon_and_perf.sh @@ -0,0 +1,18 @@ +### codecarbon with ${CORE_VALUE} CPU * ${CPU_OPS_PER_CORE} OPS + TEMPERATURE_START=$(get_average_temperature) + ${SUDO_CMD}bash -c "codecarbon monitor 1000 --no-api > /tmp/codecarbon_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i} 2>&1 & echo \$!" > /tmp/codecarbon_pid_$i + CODECARBON_PID=$(cat /tmp/codecarbon_pid_$i) + ${SUDO_CMD}bash -c "perf stat -a -o /tmp/perf_and_codecarbon_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} & echo \$!" > /tmp/perf_pid_$i + PERF_PID=$(cat /tmp/perf_pid_$i) + while ! (grep 'Energy consumed for All CPU' /tmp/codecarbon_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}); do sleep 0.02s ; done + stress-ng --cpu ${CORE_VALUE} --cpu-ops $(( CORE_VALUE * CPU_OPS_PER_CORE )) -q + sleep 1s + TEMPERATURE_STOP=$(get_average_temperature) + ${SUDO_CMD}kill -2 $CODECARBON_PID + ${SUDO_CMD}kill -2 $PERF_PID + sleep 5s + cat /tmp/codecarbon_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i} | grep 'Energy consumed for All CPU' | tail -1 | cut -d':' -f4 | awk -v ITER=$i '{printf("%s,%s,%s\n","CPU",$1,ITER)}' >> {{ results_directory }}/codecarbon_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv + cat /tmp/codecarbon_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i} | grep 'Energy consumed for RAM' | tail -1 | cut -d':' -f4 | awk -v ITER=$i '{printf("%s,%s,%s\n","RAM",$1,ITER)}' >> {{ results_directory }}/codecarbon_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv + cat /tmp/perf_and_codecarbon_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i} >> {{ results_directory }}/perf_and_codecarbon_${CORE_VALUE}_${CPU_OPS_PER_CORE} + echo "$TEMPERATURE_START, $TEMPERATURE_STOP, $i" >> {{ results_directory }}/perf_and_codecarbon_${CORE_VALUE}_${CPU_OPS_PER_CORE}_temperatures.csv + diff --git a/templates/exit.sh b/templates/exit.sh index c7656ee..7c3f2f9 100644 --- a/templates/exit.sh +++ b/templates/exit.sh @@ -1,2 +1,4 @@ +duration=$SECONDS +echo "$(($duration / 3600)) hours, $((duration / 60)) minutes and $((duration % 60)) seconds elapsed." exit 0 diff --git a/templates/frequencies_benchmark.sh b/templates/frequencies_benchmark.sh new file mode 100644 index 0000000..9d84459 --- /dev/null +++ b/templates/frequencies_benchmark.sh @@ -0,0 +1,76 @@ + #HWPC RUN + TEMPERATURE_START=$(get_average_temperature) + docker run --rm -d --net=host --privileged --pid=host --name hwpc_{{ target_frequency }}_$i \ + -v /sys:/sys \ + -v /var/lib/docker/containers:/var/lib/docker/containers:ro \ + -v /tmp/power-api-sensor-reporting:/reporting \ + -v $(pwd):{{ hwpc_home_directory }} \ + powerapi/hwpc-sensor:1.4.0 \ + -n hwpc_{{ target_frequency }}_$i \ + -f {{ 1000 / target_frequency }} \ + -p {{ hwpc_and_perf_configs.get(core_values[0]).unwrap().cgroup_basepath }} \ + -r {{ hwpc_and_perf_configs.get(core_values[0]).unwrap().output.type }} -U {{ hwpc_home_directory }}/${HWPC_AND_PERF_FREQUENCY_DIR}/frequency_{{ target_frequency }}_hwpc_and_perf_$i \ + {% if hwpc_and_perf_configs.get(core_values[0]).unwrap().system.rapl.events.len() > 0 %} -s "rapl" -o {{ hwpc_and_perf_configs.get(core_values[0]).unwrap().system.rapl.monitoring_type }} {%~ for event in hwpc_and_perf_configs.get(core_values[0]).unwrap().system.rapl.events %}-e "{{ event }}" {% endfor %}{% endif %} {% if hwpc_and_perf_configs.get(core_values[0]).unwrap().system.msr.events.len() > 0 %} -s "msr" {%~ for event in hwpc_and_perf_configs.get(core_values[0]).unwrap().system.msr.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_and_perf_configs.get(core_values[0]).unwrap().system.core.events.len() > 0 %} -c "core" {%~ for event in hwpc_and_perf_configs.get(core_values[0]).unwrap().system.core.events %}-e "{{ event }}" {% endfor %} {% endif %} + + ${SUDO_CMD}perf stat -a -o /tmp/frequency_{{ target_frequency }}_perf_and_hwpc_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} sleep 40 + TEMPERATURE_STOP=$(get_average_temperature) + docker stop hwpc_{{ target_frequency }}_$i + cat /tmp/frequency_{{ target_frequency }}_perf_and_hwpc_$i >> $PERF_AND_HWPC_FREQUENCY_FILE || true + echo "$TEMPERATURE_START,$TEMPERATURE_STOP,$i" >> $PERF_AND_HWPC_FREQUENCY_TEMPERATURES_FILE + + #CODECARBON RUN + TEMPERATURE_START=$(get_average_temperature) + ${SUDO_CMD}bash -c "codecarbon monitor {{ 1000 / target_frequency }} --no-api > /tmp/frequency_{{ target_frequency }}_codecarbon_and_perf_${i} 2>&1 & echo \$!" > /tmp/codecarbon_pid_$i + CODECARBON_PID=$(cat /tmp/codecarbon_pid_$i) + ${SUDO_CMD}perf stat -a -o /tmp/frequency_{{ target_frequency }}_perf_and_codecarbon_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} sleep 40 + TEMPERATURE_STOP=$(get_average_temperature) + ${SUDO_CMD}kill -2 $CODECARBON_PID + sleep 10 + cat /tmp/frequency_{{ target_frequency }}_codecarbon_and_perf_${i} | grep 'Energy consumed for All CPU' | awk -F' ' '{print $4" "$5 $12}' | tr ',' '.' | awk -F']' '{print $1" "$2}' | awk -v ITER=$i '{printf("%s,%s %s,%s,%s\n","CPU",$1,$2,$3,ITER)}' >> $CODECARBON_AND_PERF_FREQUENCY_FILE || true + cat /tmp/frequency_{{ target_frequency }}_codecarbon_and_perf_${i} | grep 'Energy consumed for RAM' | awk -F' ' '{print $4" "$5 $11}' | tr ',' '.' | awk -F']' '{print $1" "$2}' | awk -v ITER=$i '{printf("%s,%s %s,%s,%s\n","RAM",$1,$2,$3,ITER)}' >> $CODECARBON_AND_PERF_FREQUENCY_FILE || true + cat /tmp/frequency_{{ target_frequency }}_perf_and_codecarbon_${i} >> $PERF_AND_CODECARBON_FREQUENCY_FILE || true + echo "$TEMPERATURE_START,$TEMPERATURE_STOP,$i" >> $PERF_AND_CODECARBON_FREQUENCY_TEMPERATURES_FILE + + + + #ALUMET + TEMPERATURE_START=$(get_average_temperature) + sed -i 's/poll_interval = "[0-9]*m\{0,1\}s"/poll_interval = "{{ 1000 / target_frequency }}ms"/' /home/nleblond/alumet-config.toml + ${SUDO_CMD}bash -c "alumet --plugins 'csv,rapl' --output '/tmp/frequency_{{ target_frequency }}_alumet_and_perf_${i}.csv' & echo \$!" > /tmp/alumet_pid_$i + ALUMET_PID=$(cat /tmp/alumet_pid_$i) + ${SUDO_CMD}perf stat -a -o /tmp/frequency_{{ target_frequency }}_perf_and_alumet_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} sleep 40 + TEMPERATURE_STOP=$(get_average_temperature) + ${SUDO_CMD}kill -2 $ALUMET_PID + sleep 10 + cat /tmp/frequency_{{ target_frequency }}_alumet_and_perf_${i}.csv | grep rapl | awk -v ITER=$i -F';' '{printf("%s,%s,%s,%s\n",$8,$2,$3,ITER)}' >> $ALUMET_AND_PERF_FREQUENCY_FILE || true + cat /tmp/frequency_{{ target_frequency }}_perf_and_alumet_$i >> $PERF_AND_ALUMET_FREQUENCY_FILE || true + echo "$TEMPERATURE_START,$TEMPERATURE_STOP,$i" >> $PERF_AND_ALUMET_FREQUENCY_TEMPERATURES_FILE + + #SCAPHANDRE RUN + TEMPERATURE_START=$(get_average_temperature) + ${SUDO_CMD}bash -c "scaphandre json -s 0 --step-nano {{ 1000000000 / target_frequency }} -f /tmp/frequency_{{ target_frequency }}_scaphandre_and_perf_$i & echo \$!" > /tmp/scaphandre_pid_$i + SCAPHANDRE_PID=$(cat /tmp/scaphandre_pid_$i) + ${SUDO_CMD}perf stat -a -o /tmp/frequency_{{ target_frequency }}_perf_and_scaphandre_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} sleep 40 + TEMPERATURE_STOP=$(get_average_temperature) + ${SUDO_CMD}kill -2 $SCAPHANDRE_PID + sleep 10 + yq '.[].host | "package" + "," + .timestamp + "," + .consumption + "," + env(i)' /tmp/frequency_{{ target_frequency }}_scaphandre_and_perf_$i >> $SCAPHANDRE_AND_PERF_FREQUENCY_FILE || true + cat /tmp/frequency_{{ target_frequency }}_perf_and_scaphandre_$i >> $PERF_AND_SCAPHANDRE_FREQUENCY_FILE + echo "$TEMPERATURE_START,$TEMPERATURE_STOP,$i" >> $PERF_AND_SCAPHANDRE_FREQUENCY_TEMPERATURES_FILE + + + #VJOULE RUN + TEMPERATURE_START=$(get_average_temperature) + sed -i "s/freq = [0-9]*/freq = {{ target_frequency }}/" /etc/vjoule/config.toml + ${SUDO_CMD}systemctl restart vjoule_service.service + sleep 10 + ${SUDO_CMD}bash -c "vjoule top --output /tmp/frequency_{{ target_frequency }}_vjoule_and_perf_$i 1>/dev/null & echo \$!" > /tmp/vjoule_pid_$i + VJOULE_PID=$(cat /tmp/vjoule_pid_$i) + ${SUDO_CMD}perf stat -a -o /tmp/frequency_{{ target_frequency }}_perf_and_vjoule_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} sleep 40 + ${SUDO_CMD}kill -2 $VJOULE_PID + sleep 10 + TEMPERATURE_STOP=$(get_average_temperature) + cat /tmp/frequency_{{ target_frequency }}_vjoule_and_perf_$i | tail -n +2 | awk -v ITER=$i -F';' '{printf("%s,%s,%s,%s\n","CPU",$1,$3,ITER)}' >> $VJOULE_AND_PERF_FREQUENCY_FILE || true + cat /tmp/frequency_{{ target_frequency }}_vjoule_and_perf_$i | tail -n +2 | awk -v ITER=$i -F';' '{printf("%s,%s,%s,%s\n","RAM",$1,$4,ITER)}' >> $VJOULE_AND_PERF_FREQUENCY_FILE || true + cat /tmp/frequency_{{ target_frequency }}_perf_and_vjoule_$i >> $PERF_AND_VJOULE_FREQUENCY_FILE || true + echo "$TEMPERATURE_START,$TEMPERATURE_STOP,$i" >> $PERF_AND_VJOULE_FREQUENCY_TEMPERATURES_FILE diff --git a/templates/hwpc_alone.sh b/templates/hwpc_alone.sh index 7807ac1..06eb76a 100644 --- a/templates/hwpc_alone.sh +++ b/templates/hwpc_alone.sh @@ -12,7 +12,7 @@ for i in {1..{{ nb_iterations }}}; do -n {{ hwpc_alone_configs.get(core_value).unwrap().name }}_{{ cpu_ops_per_core }}_$i \ -p {{ hwpc_alone_configs.get(core_value).unwrap().cgroup_basepath }} \ -r {{ hwpc_alone_configs.get(core_value).unwrap().output.type }} -U {{ hwpc_home_directory}}/{{ results_directory }}/hwpc_alone_{{ core_value }}_{{ cpu_ops_per_core }}/hwpc_alone_{{ core_value }}_{{ cpu_ops_per_core }}_$i \ - {% if hwpc_alone_configs.get(core_value).unwrap().system.rapl.events.len() > 0 %} -s "rapl" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.rapl.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_alone_configs.get(core_value).unwrap().system.msr.events.len() > 0 %} -s "msr" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.msr.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_alone_configs.get(core_value).unwrap().system.core.events.len() > 0 %} -c "core" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.core.events %}-e "{{ event }}" {% endfor %} {% endif %} + {% if hwpc_alone_configs.get(core_value).unwrap().system.rapl.events.len() > 0 %} -s "rapl" -o {{ hwpc_and_perf_configs.get(core_values[0]).unwrap().system.rapl.monitoring_type }} {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.rapl.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_alone_configs.get(core_value).unwrap().system.msr.events.len() > 0 %} -s "msr" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.msr.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_alone_configs.get(core_value).unwrap().system.core.events.len() > 0 %} -c "core" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.core.events %}-e "{{ event }}" {% endfor %} {% endif %} while ! [[ -e "{{ results_directory }}/hwpc_alone_{{ core_value }}_{{ cpu_ops_per_core }}/hwpc_alone_{{ core_value }}_{{ cpu_ops_per_core }}_$i/rapl.csv" ]]; do sleep 0.02s ; done stress-ng --cpu {{ core_value }} --cpu-ops {{ core_value * cpu_ops_per_core }} -q sleep 1s diff --git a/templates/hwpc_and_perf.sh b/templates/hwpc_and_perf.sh index 9105a26..abe51e0 100644 --- a/templates/hwpc_and_perf.sh +++ b/templates/hwpc_and_perf.sh @@ -1,32 +1,28 @@ -{% for core_value in core_values %} - {% for cpu_ops_per_core in cpu_ops_per_core_list %} -touch {{ results_directory }}/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }} -mkdir -p {{ results_directory }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }} -for i in {1..{{ nb_iterations }}}; do - ### HWPC sensor dedicated to stress-ng with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS - docker run --rm -d --net=host --privileged --pid=host --name {{ hwpc_and_perf_configs.get(core_value).unwrap().name }}_{{ cpu_ops_per_core }}_$i \ + ### HWPC sensor dedicated to stress-ng with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS + TEMPERATURE_START=$(get_average_temperature) + docker run --rm -d --net=host --privileged --pid=host --name {{ hwpc_and_perf_configs.get(core_value).unwrap().name }}_{{ cpu_ops_per_core }}_$i \ -v /sys:/sys \ -v /var/lib/docker/containers:/var/lib/docker/containers:ro \ -v /tmp/power-api-sensor-reporting:/reporting \ -v $(pwd):{{ hwpc_home_directory }} \ powerapi/hwpc-sensor:1.4.0 \ -n {{ hwpc_and_perf_configs.get(core_value).unwrap().name }}_{{ cpu_ops_per_core }}_$i \ + -f 1000 \ -p {{ hwpc_and_perf_configs.get(core_value).unwrap().cgroup_basepath }} \ -r {{ hwpc_and_perf_configs.get(core_value).unwrap().output.type }} -U {{ hwpc_home_directory }}/{{ results_directory }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}_$i \ - {% if hwpc_alone_configs.get(core_value).unwrap().system.rapl.events.len() > 0 %} -s "rapl" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.rapl.events %}-e "{{ event }}" {% endfor %}{% endif %} {% if hwpc_alone_configs.get(core_value).unwrap().system.msr.events.len() > 0 %} -s "msr" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.msr.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_alone_configs.get(core_value).unwrap().system.core.events.len() > 0 %} -c "core" {%~ for event in hwpc_alone_configs.get(core_value).unwrap().system.core.events %}-e "{{ event }}" {% endfor %} {% endif %} + {% if hwpc_and_perf_configs.get(core_value).unwrap().system.rapl.events.len() > 0 %} -s "rapl" -o {{ hwpc_and_perf_configs.get(core_value).unwrap().system.rapl.monitoring_type }} {%~ for event in hwpc_and_perf_configs.get(core_value).unwrap().system.rapl.events %}-e "{{ event }}" {% endfor %}{% endif %} {% if hwpc_and_perf_configs.get(core_value).unwrap().system.msr.events.len() > 0 %} -s "msr" {%~ for event in hwpc_and_perf_configs.get(core_value).unwrap().system.msr.events %}-e "{{ event }}" {% endfor %} {% endif %} {% if hwpc_and_perf_configs.get(core_value).unwrap().system.core.events.len() > 0 %} -c "core" {%~ for event in hwpc_and_perf_configs.get(core_value).unwrap().system.core.events %}-e "{{ event }}" {% endfor %} {% endif %} - ${SUDO_CMD}bash -c "perf stat -a -o /tmp/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }}_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} & echo \$!" > /tmp/perf_pid_$i - PERF_PID=$(cat /tmp/perf_pid_$i) - while ! [[ -e "{{ results_directory }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}_$i/rapl.csv" ]]; do sleep 0.02s ; done - ### PERF with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS - stress-ng --cpu {{ core_value }} --cpu-ops {{ core_value * cpu_ops_per_core }} -q - sleep 1s + ${SUDO_CMD}bash -c "perf stat -a -o /tmp/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }}_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} & echo \$!" > /tmp/perf_pid_$i + PERF_PID=$(cat /tmp/perf_pid_$i) + while ! [[ -e "{{ results_directory }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}/hwpc_and_perf_{{ core_value }}_{{ cpu_ops_per_core }}_$i/rapl.csv" ]]; do sleep 0.02s ; done + ### PERF with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS + stress-ng --cpu {{ core_value }} --cpu-ops {{ core_value * cpu_ops_per_core }} -q + sleep 1s - ${SUDO_CMD}kill -2 $PERF_PID - docker stop {{ hwpc_and_perf_configs.get(core_value).unwrap().name }}_{{ cpu_ops_per_core }}_$i - cat /tmp/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }}_$i >> {{ results_directory }}/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }} - sleep 15 -done - - {% endfor %} -{% endfor %} + ${SUDO_CMD}kill -2 $PERF_PID + docker stop {{ hwpc_and_perf_configs.get(core_value).unwrap().name }}_{{ cpu_ops_per_core }}_$i + sleep 5s + TEMPERATURE_STOP=$(get_average_temperature) + echo "$TEMPERATURE_START, $TEMPERATURE_STOP, $i" >> {{ results_directory }}/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }}_temperatures.csv + cat /tmp/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }}_$i >> {{ results_directory }}/perf_and_hwpc_{{ core_value }}_{{ cpu_ops_per_core }} + sleep 15 diff --git a/templates/install_packages.sh b/templates/install_packages.sh index dfce85e..63ffe4a 100644 --- a/templates/install_packages.sh +++ b/templates/install_packages.sh @@ -1,21 +1,25 @@ {% if os_flavor != super::DEFAULT_OS_FLAVOR %} - curl -sSL https://get.docker.com/ | sh - sudo curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose - sudo chmod +x /usr/local/bin/docker-compose - sudo mkdir -p /etc/docker - echo "{ \"registry-mirrors\": [\"http://docker-cache.grid5000.fr\"] }" | sudo tee /etc/docker/daemon.json - sudo systemctl restart docker - sudo chmod o+rw /var/run/docker.sock - SUDO_CMD="" +curl -sSL https://get.docker.com/ | sh +sudo curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose +sudo chmod +x /usr/local/bin/docker-compose +sudo mkdir -p /etc/docker +echo "{ \"registry-mirrors\": [\"http://docker-cache.grid5000.fr\"] }" | sudo tee /etc/docker/daemon.json +sudo systemctl restart docker +sudo chmod o+rw /var/run/docker.sock +SUDO_CMD="" {% else %} - g5k-setup-docker - SUDO_CMD="sudo-g5k " +g5k-setup-docker +SUDO_CMD="sudo-g5k " {% endif %} ${SUDO_CMD}apt-get install -y stress-ng +${SUDO_CMD}apt-get install -y lm-sensors ${SUDO_CMD}rm -f /etc/apt/sources.list.d/repo.radeon.com-amdgpu.list docker login -u {{ docker_hub_username }} -p {{ docker_hub_token }} docker run --rm -d --name mongo -p 27017:27017 mongo:latest sleep 30 + +${SUDO_CMD}wget https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 -O /usr/local/bin/yq +${SUDO_CMD}chmod +x /usr/local/bin/yq \ No newline at end of file diff --git a/templates/rust_setup.sh b/templates/rust_setup.sh new file mode 100644 index 0000000..e379d7c --- /dev/null +++ b/templates/rust_setup.sh @@ -0,0 +1,8 @@ +# Installation +curl https://sh.rustup.rs -sSf | sh -s -- -y + +# Add to path +source ~/.cargo/env + +# Check if ok +cargo version || exit 0 diff --git a/templates/scaphandre_alone.sh b/templates/scaphandre_alone.sh new file mode 100644 index 0000000..9032e3d --- /dev/null +++ b/templates/scaphandre_alone.sh @@ -0,0 +1,26 @@ +${SUDO_CMD}apt install -y build-essential libssl-dev pkg-config +cd /tmp +git clone https://github.com/hubblo-org/scaphandre.git +git config --global --add safe.directory /tmp/scaphandre +cd scaphandre +git checkout "v1.0.1" +cargo build --release +${SUDO_CMD}ln -s $(realpath ./target/release/scaphandre) /usr/local/bin/scaphandre +cd /home/nleblond + +{% for core_value in core_values %} + {% for cpu_ops_per_core in cpu_ops_per_core_list %} +echo "domain,energy,iteration" > {{ results_directory }}/scaphandre_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +for i in {1..{{ nb_iterations }}}; do +### SCAPHANDRE with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS + ${SUDO_CMD}bash -c "scaphandre stdout --timeout=-1 -s 1 -p 0 > /tmp/scaphandre_alone_{{ core_value }}_{{ cpu_ops_per_core }}_$i & echo \$!" > /tmp/scaphandre_pid_$i + SCAPHANDRE_PID=$(cat /tmp/scaphandre_pid_$i) + while ! (grep 'consumers' /tmp/scaphandre_alone_{{ core_value }}_{{ cpu_ops_per_core }}_${i}); do sleep 0.02s ; done + stress-ng --cpu {{ core_value }} --cpu-ops {{ core_value * cpu_ops_per_core }} -q + sleep 1s + ${SUDO_CMD}kill -2 $SCAPHANDRE_PID + cat /tmp/scaphandre_alone_{{ core_value }}_{{ cpu_ops_per_core}}_$i | grep "Host" | awk -v ITER=$i '{printf("%s,%s,%s\n","pkg",$2,ITER)}' >> {{ results_directory }}/scaphandre_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +done + {% endfor %} +{% endfor %} + diff --git a/templates/scaphandre_and_perf.sh b/templates/scaphandre_and_perf.sh new file mode 100644 index 0000000..e242030 --- /dev/null +++ b/templates/scaphandre_and_perf.sh @@ -0,0 +1,17 @@ +### SCAPHANDRE with ${CORE_VALUE} CPU * ${CPU_OPS_PER_CORE} OPS + TEMPERATURE_START=$(get_average_temperature) + ${SUDO_CMD}bash -c "scaphandre stdout --timeout=-1 -s 1 -p 0 > /tmp/scaphandre_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i & echo \$!" > /tmp/scaphandre_pid_$i + SCAPHANDRE_PID=$(cat /tmp/scaphandre_pid_$i) + ${SUDO_CMD}bash -c "perf stat -a -o /tmp/perf_and_scaphandre_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} & echo \$!" > /tmp/perf_pid_$i + PERF_PID=$(cat /tmp/perf_pid_$i) + while ! (grep 'consumers' /tmp/scaphandre_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_${i}); do sleep 0.02s ; done + stress-ng --cpu ${CORE_VALUE} --cpu-ops $(( CPU_OPS_PER_CORE * CORE_VALUE )) -q + sleep 1s + TEMPERATURE_STOP=$(get_average_temperature) + ${SUDO_CMD}kill -2 $SCAPHANDRE_PID + cat /tmp/scaphandre_and_perf_${CORE_VALUE}_{{ cpu_ops_per_core}}_$i | grep "Host" | awk -v ITER=$i '{printf("%s,%s,%s\n","pkg",$2,ITER)}' >> {{ results_directory }}/scaphandre_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv + ${SUDO_CMD}kill -2 $PERF_PID + sleep 5s + cat /tmp/perf_and_scaphandre_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i >> {{ results_directory }}/perf_and_scaphandre_${CORE_VALUE}_${CPU_OPS_PER_CORE} + echo "$TEMPERATURE_START, $TEMPERATURE_STOP, $i" >> {{ results_directory }}/perf_and_scaphandre_${CORE_VALUE}_${CPU_OPS_PER_CORE}_temperatures.csv + diff --git a/templates/vjoule_alone.sh b/templates/vjoule_alone.sh new file mode 100644 index 0000000..2194aeb --- /dev/null +++ b/templates/vjoule_alone.sh @@ -0,0 +1,24 @@ +cd /tmp +wget https://github.com/davidson-consulting/vjoule/releases/download/v1.3.0/vjoule-tools_1.3.0.deb +dpkg -i vjoule-tools_1.3.0.deb +${SUDO_CMD}systemctl start vjoule_service +cd /home/nleblond + +${SUDO_CMD}systemctl status vjoule_service +sleep 30 +${SUDO_CMD}systemctl status vjoule_service + + +{% for core_value in core_values %} + {% for cpu_ops_per_core in cpu_ops_per_core_list %} +touch {{ results_directory }}/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +echo "domain,energy,iteration" > {{ results_directory }}/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +for i in {1..{{ nb_iterations }}}; do +### vjoule with {{ core_value }} CPU * {{ cpu_ops_per_core }} OPS + vjoule stress-ng --cpu {{ core_value }} --cpu-ops {{ core_value * cpu_ops_per_core }} -- > /tmp/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}_$i + cat /tmp/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}_$i | grep "RAM" | awk -v ITER=$i '{printf("%s,%s,%s\n","RAM",$2,ITER)}' >> {{ results_directory }}/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv + cat /tmp/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}_$i | grep "CPU" | awk -v ITER=$i '{printf("%s,%s,%s\n","CPU",$2,ITER)}' >> {{ results_directory }}/vjoule_alone_{{ core_value }}_{{ cpu_ops_per_core }}.csv +done + {% endfor %} +{% endfor %} + diff --git a/templates/vjoule_and_perf.sh b/templates/vjoule_and_perf.sh new file mode 100644 index 0000000..d1514a9 --- /dev/null +++ b/templates/vjoule_and_perf.sh @@ -0,0 +1,15 @@ + TEMPERATURE_START=$(get_average_temperature) + sed -i "s/freq = [0-9]*/freq = 1/" /etc/vjoule/config.toml + ${SUDO_CMD}systemctl restart vjoule_service.service + sleep 10 + ${SUDO_CMD}bash -c "perf stat -a -o /tmp/perf_and_vjoule_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i {% for perf_event in perf_events.iter() %}-e {{ perf_event }} {% endfor %} & echo \$!" > /tmp/perf_pid_$i + PERF_PID=$(cat /tmp/perf_pid_$i) + vjoule stress-ng --cpu ${CORE_VALUE} --cpu-ops $(( CORE_VALUE * CPU_OPS_PER_CORE )) -- > /tmp/vjoule_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i + TEMPERATURE_STOP=$(get_average_temperature) + cat /tmp/vjoule_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i | grep "RAM" | awk -v ITER=$i '{printf("%s,%s,%s\n","RAM",$2,ITER)}' >> {{ results_directory }}/vjoule_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv + cat /tmp/vjoule_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i | grep "CPU" | awk -v ITER=$i '{printf("%s,%s,%s\n","CPU",$2,ITER)}' >> {{ results_directory }}/vjoule_and_perf_${CORE_VALUE}_${CPU_OPS_PER_CORE}.csv + ${SUDO_CMD}kill -2 $PERF_PID + sleep 5s + cat /tmp/perf_and_vjoule_${CORE_VALUE}_${CPU_OPS_PER_CORE}_$i >> {{ results_directory }}/perf_and_vjoule_${CORE_VALUE}_${CPU_OPS_PER_CORE} + echo "$TEMPERATURE_START, $TEMPERATURE_STOP, $i" >> {{ results_directory }}/perf_and_vjoule_${CORE_VALUE}_${CPU_OPS_PER_CORE}_temperatures.csv + diff --git a/utils/transform.sh b/utils/transform.sh new file mode 100755 index 0000000..48853c4 --- /dev/null +++ b/utils/transform.sh @@ -0,0 +1,11 @@ +echo "Transforming alumet files" +./transform_alumet.sh $1 + +echo "Transforming codecarbon files" +./transform_codecarbon.sh $1 + +echo "Transforming scaphandre files" +./transform_scaphandre.sh $1 + +echo "Transforming vjoule files" +./transform_vjoule.sh $1 diff --git a/utils/transform_alumet.sh b/utils/transform_alumet.sh new file mode 100755 index 0000000..92ac0ae --- /dev/null +++ b/utils/transform_alumet.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +dossier=$1 + +find "$dossier" -type f -regex ".*/alumet_.*_[0-9]+_[0-9]+\.csv" | while read -r fichier; do + nb_core=$(echo $fichier | cut -d'_' -f4) + nb_ops_per_core=$(echo $fichier | cut -d'_' -f5 | cut -d'.' -f1) + + if ! grep -q "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration" "$fichier"; then + awk -F, -v nb_core="$nb_core" -v nb_ops_per_core="$nb_ops_per_core" ' + NR==1 {print "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration"; next} + { + if ($1 == "cores") { + cores[$3] += $2 + } else if ($1 == "dram") { + ram[$3] += $2 + } else if ($1 == "package") { + pkg[$3] += $2 + } + } + END { + for (i in pkg) { + print cores[i] "," pkg[i] "," ram[i] "," nb_core "," nb_ops_per_core "," i + } + } + ' OFS=, "$fichier" > tmp_file && mv tmp_file "$fichier" + fi +done + diff --git a/utils/transform_codecarbon.sh b/utils/transform_codecarbon.sh new file mode 100755 index 0000000..c905f04 --- /dev/null +++ b/utils/transform_codecarbon.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# Dossier contenant les fichiers CSV +dossier=$1 + +find "$dossier" -type f -name "codecarbon*.csv" | while read -r fichier; do + nb_core=$(echo $fichier | tr "_" "\n" | tail -2 | paste -sd "," | cut -d"." -f1 | cut -d"," -f1) + nb_ops_per_core=$(echo $fichier | tr "_" "\n" | tail -2 | paste -sd "," | cut -d"." -f1 | cut -d"," -f2) + + if ! grep -q "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration" "$fichier"; then + awk -F, -v nb_core="$nb_core" -v nb_ops_per_core="$nb_ops_per_core" ' + NR==1 {print "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration"; next} + { + if ($1 == "CPU") { + cpu[$3] = $2 + } else if ($1 == "RAM") { + ram[$3] = $2 + } + } + END { + for (i in cpu) { + print cpu[i] ",0.0," ram[i] "," nb_core "," nb_ops_per_core "," i + } + } + ' OFS=, "$fichier" > tmp_file && mv tmp_file "$fichier" + fi +done + + diff --git a/utils/transform_scaphandre.sh b/utils/transform_scaphandre.sh new file mode 100755 index 0000000..785bf4c --- /dev/null +++ b/utils/transform_scaphandre.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +dossier=$1 + +find "$dossier" -type f -regex ".*/scaphandre_.*_[0-9]+_[0-9]+\.csv" | while read -r fichier; do + nb_core=$(echo $fichier | tr "_" "\n" | tail -2 | paste -sd "," | cut -d"." -f1 | cut -d"," -f1) + nb_ops_per_core=$(echo $fichier | tr "_" "\n" | tail -2 | paste -sd "," | cut -d"." -f1 | cut -d"," -f2) + + if ! grep -q "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration" "$fichier"; then + awk -F, -v nb_core="$nb_core" -v nb_ops_per_core="$nb_ops_per_core" ' + NR==1 {print "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration"; next} + { + if ($1 == "pkg") { + pkg[$3] += $2 + } else if ($1 == "dram") { + dram[$3] += $2 + } else if ($1 == "cores") { + cores[$3] += $2 + } + } + END { + for (i in pkg) { + print cores[i] "," pkg[i] "," ram[i] "," nb_core "," nb_ops_per_core "," i + } + } + ' OFS=, "$fichier" > tmp_file && mv tmp_file "$fichier" + fi +done diff --git a/utils/transform_vjoule.sh b/utils/transform_vjoule.sh new file mode 100755 index 0000000..4a845c1 --- /dev/null +++ b/utils/transform_vjoule.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Dossier contenant les fichiers CSV +dossier=$1 + +find "$dossier" -type f -name "vjoule*.csv" | while read -r fichier; do + nb_core=$(echo $fichier | tr "_" "\n" | tail -2 | paste -sd "," | cut -d"." -f1 | cut -d"," -f1) + nb_ops_per_core=$(echo $fichier | tr "_" "\n" | tail -2 | paste -sd "," | cut -d"." -f1 | cut -d"," -f2) + + if ! grep -q "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration" "$fichier"; then + awk -F, -v nb_core="$nb_core" -v nb_ops_per_core="$nb_ops_per_core" ' + NR==1 {print "energy_cores,energy_pkg,energy_ram,nb_core,nb_ops_per_core,iteration"; next} + { + if ($1 == "CPU") { + if ($2 < 100) { + cpu[$3] = 1000*$2 + } else { + cpu[$3] = $2 + } + } else if ($1 == "RAM") { + ram[$3] = $2 + } + } + END { + for (i in cpu) { + print cpu[i] ",0.0," ram[i] "," nb_core "," nb_ops_per_core "," i + } + } + ' OFS=, "$fichier" > tmp_file && mv tmp_file "$fichier" + fi +done