Skip to content

Commit

Permalink
Changes to the report tools
Browse files Browse the repository at this point in the history
  • Loading branch information
Danaisis Vargas Oliva authored and Danaisis Vargas Oliva committed Sep 4, 2024
1 parent b9681ee commit 3e5c2a9
Show file tree
Hide file tree
Showing 4 changed files with 373 additions and 371 deletions.
3 changes: 3 additions & 0 deletions tools/Basic_run_report.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@
"source": [
"# Import the modules needed, defining paths and functions\n",
"from basic_functions import *\n",
"from fpdf import FPDF \n",
"from fpdf.enums import XPos, YPos\n",
"from PIL import Image\n",
"\n",
"pcm_columns_list_0 = ['C0 Core C-state residency', 'Socket0 Memory Bandwidth',\n",
" 'Socket0 Instructions Per Cycle', 'Socket0 Instructions Retired Any (Million)',\n",
Expand Down
253 changes: 11 additions & 242 deletions tools/Performance_report.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -35,252 +35,21 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"id": "0ebfdfe7",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Ready to run and process\n"
]
}
],
"source": [
"# Import the modules needed, defining paths and functions\n",
"from basic_functions import *\n",
"\n",
"pcm_columns_list_0 = ['C0 Core C-state residency', 'Socket0 Memory Bandwidth',\n",
" 'Socket0 Instructions Per Cycle', 'Socket0 Instructions Retired Any (Million)',\n",
" 'Socket0 L2 Cache Misses', 'Socket0 L2 Cache Hits',\n",
" 'Socket0 L3 Cache Misses', 'Socket0 L3 Cache Hits']\n",
"pcm_columns_list_1 = ['C0 Core C-state residency', 'Socket1 Memory Bandwidth',\n",
" 'Socket1 Instructions Per Cycle', 'Socket1 Instructions Retired Any (Million)',\n",
" 'Socket1 L2 Cache Misses', 'Socket1 L2 Cache Hits',\n",
" 'Socket1 L3 Cache Misses', 'Socket1 L3 Cache Hits']\n",
"uprof_columns_list_0 = [' Utilization (%) Socket0', 'Total Mem Bw (GB/s) Socket0',\n",
" 'IPC (Sys + User) Socket0', 'IRA Socket0', #<------------- we don't have this (IRA) data \n",
" 'L2 Miss (pti) Socket0', 'L2 Access (pti) Socket0',\n",
" 'L3 Miss Socket0', 'L3 Miss % Socket0']\n",
"uprof_columns_list_1 = ['Utilization (%) Socket1', 'Total Mem Bw (GB/s) Socket1',\n",
" 'IPC (Sys + User) Socket1', 'IRA Socket1', #<------------- we don't have this (IRA) data \n",
" 'L2 Miss (pti) Socket1', 'L2 Access (pti) Socket1',\n",
" 'L3 Miss Socket1', 'L3 Miss % Socket1']\n",
"label_names = ['CPU Utilization (%)', 'Memory Bandwidth (GB/sec)',\n",
" 'Instructions Per Cycle', 'Instructions Retired Any (Million)',\n",
" 'L2 Cache Misses (Million)', 'L2 Cache [Misses/Accesses] (%)',\n",
" 'L3 Cache Misses (Million)', 'L3 Cache [Misses/Accesses] (%)']\n",
"label_columns = ['Socket0','Socket1']\n",
"\n",
"def plot_vars_comparison(input_dir, output_dir, all_files, pdf_name):\n",
" X_plot, Y_plot_0, Y_plot_1, label_plot_0, label_plot_1 = [], [], [], [], []\n",
" \n",
" for i, file_i in enumerate(all_files): \n",
" info = break_file_name(file_i)\n",
" data_frame = pd.read_csv(f'{input_dir}/{file_i}.csv')\n",
" X_plot.append(data_frame['NewTime'].values.tolist())\n",
" \n",
" Y_tmp_0, Y_tmp_1, label_tmp_0, label_tmp_1 = [], [], [], []\n",
" \n",
" if info[0]=='grafana':\n",
" for k, (columns_pcm_0, columns_pcm_1) in enumerate(zip(pcm_columns_list_0, pcm_columns_list_1)):\n",
" Y_0, label_0 = get_column_val(data_frame, [columns_pcm_0], [label_columns[0]], file_i) \n",
" Y_1, label_1 = get_column_val(data_frame, [columns_pcm_1], [label_columns[1]], file_i) \n",
" Y_tmp_0.append(Y_0)\n",
" label_tmp_0.append(label_0)\n",
" Y_tmp_1.append(Y_1)\n",
" label_tmp_1.append(label_1)\n",
" else:\n",
" for k, (columns_uprof_0, columns_uprof_1) in enumerate(zip(uprof_columns_list_0, uprof_columns_list_1)):\n",
" Y_0, label_0 = get_column_val(data_frame, [columns_uprof_0], [label_columns[0]], file_i)\n",
" Y_1, label_1 = get_column_val(data_frame, [columns_uprof_1], [label_columns[1]], file_i)\n",
" Y_tmp_0.append(Y_0)\n",
" label_tmp_0.append(label_0)\n",
" Y_tmp_1.append(Y_1)\n",
" label_tmp_1.append(label_1)\n",
" \n",
" Y_plot_0.append(Y_tmp_0)\n",
" label_plot_0.append(label_tmp_0)\n",
" Y_plot_1.append(Y_tmp_1)\n",
" label_plot_1.append(label_tmp_1)\n",
" \n",
" # Here we make the plot:\n",
" matplotlib.rcParams['font.family'] = 'DejaVu Serif'\n",
" rows=cols=2\n",
" rows_cols = rows*cols\n",
" fig, axs = plt.subplots(rows, cols, figsize=(18, 8))\n",
" plt.style.use('default')\n",
" axs = axs.flatten()\n",
" #axs[3].axis('off')\n",
" \n",
" for i in range(len(Y_plot_0)): #number of files or tests\n",
" for j in range(len(Y_plot_0[i])): #number of metrix\n",
" if j < rows_cols:\n",
" label0_ij0 = re.sub('_', ' ', label_plot_0[i][j][0])\n",
" axs[j].plot(X_plot[i], Y_plot_0[i][j][0], color=color_list[i], label=label0_ij0, linestyle=linestyle_list[0])\n",
" axs[j].set_ylabel(f'{label_names[j]}')\n",
" axs[j].set_xlabel('Time (min)')\n",
" axs[j].grid(which='major', color='gray', linestyle='dashed')\n",
" axs[j].legend(loc='upper left')\n",
" else:\n",
" pass\n",
" \n",
" plt.tight_layout()\n",
" plt.savefig(f'{output_dir}/Fig0_{pdf_name}_results_socket0.png')\n",
" print(f'{output_dir}/Fig0_{pdf_name}_results_socket0.png')\n",
" plt.close() \n",
" \n",
" fig, axs = plt.subplots(rows, cols, figsize=(18, 8))\n",
" plt.style.use('default')\n",
" axs = axs.flatten() \n",
" \n",
" for i in range(len(Y_plot_0)): \n",
" for j in range(len(Y_plot_0[i])):\n",
" if j < rows_cols:\n",
" pass\n",
" else:\n",
" label0_ij0 = re.sub('_', ' ', label_plot_0[i][j][0])\n",
" axs[j-rows_cols].plot(X_plot[i], Y_plot_0[i][j][0], color=color_list[i], label=label0_ij0, linestyle=linestyle_list[0])\n",
" axs[j-rows_cols].set_ylabel(f'{label_names[j]}')\n",
" axs[j-rows_cols].set_xlabel('Time (min)')\n",
" axs[j-rows_cols].grid(which='major', color='gray', linestyle='dashed')\n",
" axs[j-rows_cols].legend(loc='upper left')\n",
" \n",
" plt.tight_layout()\n",
" plt.savefig(f'{output_dir}/Fig1_{pdf_name}_results_cache_socket0.png')\n",
" print(f'{output_dir}/Fig1_{pdf_name}_results_cache_socket0.png')\n",
" plt.close() \n",
" \n",
" fig, axs = plt.subplots(rows, cols, figsize=(18, 8))\n",
" plt.style.use('default')\n",
" axs = axs.flatten()\n",
" \n",
" for i in range(len(Y_plot_1)): \n",
" for j in range(len(Y_plot_1[i])):\n",
" if j < rows_cols:\n",
" label1_ij0 = re.sub('_', ' ', label_plot_1[i][j][0])\n",
" axs[j].plot(X_plot[i], Y_plot_1[i][j][0], color=color_list[i], label=label1_ij0, linestyle=linestyle_list[0])\n",
" axs[j].set_ylabel(f'{label_names[j]}')\n",
" axs[j].set_xlabel('Time (min)')\n",
" axs[j].grid(which='major', color='gray', linestyle='dashed')\n",
" axs[j].legend(loc='upper left')\n",
" else:\n",
" pass\n",
" \n",
" plt.tight_layout()\n",
" plt.savefig(f'{output_dir}/Fig2_{pdf_name}_results_socket1.png')\n",
" print(f'{output_dir}/Fig2_{pdf_name}_results_socket1.png')\n",
" plt.close() \n",
" \n",
" fig, axs = plt.subplots(rows, cols, figsize=(18, 8))\n",
" plt.style.use('default')\n",
" axs = axs.flatten()\n",
" \n",
" for i in range(len(Y_plot_1)): \n",
" for j in range(len(Y_plot_1[i])):\n",
" if j < rows_cols:\n",
" pass\n",
" else:\n",
" label1_ij0 = re.sub('_', ' ', label_plot_1[i][j][0])\n",
" axs[j-rows_cols].plot(X_plot[i], Y_plot_1[i][j][0], color=color_list[i], label=label1_ij0, linestyle=linestyle_list[0])\n",
" axs[j-rows_cols].set_ylabel(f'{label_names[j]}')\n",
" axs[j-rows_cols].set_xlabel('Time (min)')\n",
" axs[j-rows_cols].grid(which='major', color='gray', linestyle='dashed')\n",
" axs[j-rows_cols].legend(loc='upper left')\n",
" \n",
" plt.tight_layout()\n",
" plt.savefig(f'{output_dir}/Fig3_{pdf_name}_results_cache_socket1.png')\n",
" print(f'{output_dir}/Fig3_{pdf_name}_results_cache_socket1.png')\n",
" plt.close() \n",
"\n",
"def create_report_performance(input_dir, output_dir, all_files, readout_name, daqconf_files, core_utilization_files, parent_folder_dir, print_info=True, pdf_name='performance_report', repin_threads_file=[None], comment=['TBA']): \n",
" directory([input_dir, output_dir])\n",
"\n",
" # Open pdf file\n",
" pdf = FPDF()\n",
" pdf.add_page()\n",
" pdf.ln(1)\n",
" pdf.image(f'{parent_folder_dir}/tools/dune_logo.jpg', w=180)\n",
" pdf.ln(2)\n",
" pdf.set_font('Times', 'B', 16)\n",
" pdf.cell(40,10,'Performance Report')\n",
" pdf.ln(10)\n",
" \n",
" # creating report\n",
" pdf.set_font('Times', '', 10)\n",
" pdf.write(5, 'The tests were run for the WIBEth data format. The Figures 1 and 2 show the results of the tests ran (Table1) using the different metrics. \\n')\n",
" pdf.write(5, ' * L2-hits is the fraction of requests that make it to L2 at all. Similar for L3. \\n')\n",
" pdf.write(5, ' * L2-misses is the fraction of requests that make it to L2 at all and then miss in L2. Similar for L3. \\n')\n",
" pdf.ln(10)\n",
" \n",
" #-------------------------------------------TABLE-----------------------------------------------\n",
" # Data to tabular\n",
" rows_data = []\n",
" headers = ['Test', 'Readout SRV', 'dunedaq', 'Socket', 'General comments']\n",
" rows_data.append(headers)\n",
" \n",
" line_height = pdf.font_size * 2\n",
" col_width = [pdf.epw/3.8, pdf.epw/8, pdf.epw/7, pdf.epw/12, pdf.epw/4] \n",
" lh_list = [] #list with proper line_height for each row\n",
" \n",
" for i, file_i in enumerate(all_files):\n",
" info = break_file_name(file_i)\n",
" test_info = re.sub('_', ' ', info[5])\n",
" line = [test_info, info[2], info[1], info[3], comment[i]]\n",
" rows_data.append(line)\n",
" \n",
" # Determine line heights based on the number of words in each cell\n",
" for row in rows_data:\n",
" max_lines = 1 # Initialize with a minimum of 1 line\n",
" for datum in row:\n",
" lines_needed = len(str(datum).split('\\n')) # Count the number of lines\n",
" max_lines = max(max_lines, lines_needed)\n",
"\n",
" lh_list.append(line_height * max_lines)\n",
" \n",
" # Add table rows with word wrapping and dynamic line heights\n",
" for j, row in enumerate(rows_data):\n",
" line_height_table = lh_list[j] \n",
" for k, datum in enumerate(row):\n",
" pdf.multi_cell(col_width[k], line_height_table, datum, border=1, align='L', new_x=XPos.RIGHT, new_y=YPos.TOP, max_line_height=pdf.font_size)\n",
" \n",
" pdf.ln(line_height_table)\n",
" \n",
" pdf.write(5, 'Table. Summary of the tests ran. \\n') \n",
" pdf.ln(10)\n",
" \n",
" #-------------------------------------------- FIGURES START ------------------------------------------------\n",
" plot_vars_comparison(input_dir, output_dir, all_files, pdf_name)\n",
" \n",
" if info[3] == '0' or info[3] == '01':\n",
" pdf.image(f'{output_dir}/Fig0_{pdf_name}_results_socket0.png', w=180)\n",
" pdf.write(5, 'Figure. Socket0 results of the tests ran using the metrics CPU Utilization (%), Memory Bandwidth (GB/sec), Instructions Per Cycle, Instructions Retired Any (Million).')\n",
" pdf.ln(10)\n",
" pdf.image(f'{output_dir}/Fig1_{pdf_name}_results_cache_socket0.png', w=180)\n",
" pdf.write(5, 'Figure. Socket0 results of the tests ran using the metrics L2 Cache Misses (Million), L2 Cache [Misses/Hits] (%), L3 Cache Misses (Million), and L3 Cache [Misses/Hits] (%).')\n",
" pdf.ln(10)\n",
" \n",
" if info[3] == '1' or info[3] == '01':\n",
" pdf.image(f'{output_dir}/Fig2_{pdf_name}_results_socket1.png', w=180)\n",
" pdf.write(5, 'Figure. Socket1 results of the tests ran using the metrics CPU Utilization (%), Memory Bandwidth (GB/sec), Instructions Per Cycle, Instructions Retired Any (Million).')\n",
" pdf.ln(10)\n",
" pdf.image(f'{output_dir}/Fig3_{pdf_name}_results_cache_socket1.png', w=180)\n",
" pdf.write(5, 'Figure. Socket1 results of the tests ran using the metrics L2 Cache Misses (Million), L2 Cache [Misses/Hits] (%), L3 Cache Misses (Million), and L3 Cache [Misses/Hits] (%).')\n",
" pdf.ln(10)\n",
" #-------------------------------------------- FIGURES END ------------------------------------------------\n",
" \n",
" #---------------------------------------- CONFIGURATIONS START ---------------------------------------------\n",
" if print_info:\n",
" pdf.write(5, 'Configurations: \\n', 'B')\n",
" for i in range(len(all_files)):\n",
" info = break_file_name(all_files[i])\n",
" var_i = readout_name[i]\n",
" file_daqconf_i = daqconf_files[i]\n",
" file_core_i = core_utilization_files[i]\n",
" repin_threads_file_i = repin_threads_file[i]\n",
" \n",
" json_info(file_daqconf=file_daqconf_i, file_core=file_core_i, parent_folder_dir=parent_folder_dir, input_dir=input_dir, var=var_i, pdf=pdf, if_pdf=print_info, repin_threads_file=repin_threads_file_i) \n",
"\n",
" pdf.ln(20)\n",
" pdf.set_font('Times', '', 10)\n",
" pdf.write(5, f'The End, made on {current_time()}')\n",
" pdf.output(f'{output_dir}/{pdf_name}_report.pdf')\n",
" #---------------------------------------- CONFIGURATIONS END ---------------------------------------------\n",
" \n",
" print(f'The report was create and saved to {output_dir}/{pdf_name}.pdf')\n",
"from basic_functions_performance import *\n",
"\n",
"print('Ready to run and process')"
]
Expand Down
Loading

0 comments on commit 3e5c2a9

Please sign in to comment.