1212import seaborn as sns
1313import argparse
1414from pathlib import Path
15- from multiprocessing import Process , Lock , Queue
15+ from multiprocessing import Lock
16+ from concurrent .futures import ThreadPoolExecutor
1617
1718# Output directory
18- output_dir = ".. /tasks/lookahead_verifier_output"
19+ output_dir = "./vtr_flow /tasks/lookahead_verifier_output"
1920# The graph types (pie, heatmap, bar, scatter) that will be created
2021graph_types : list
2122# The components that will be used for graphs (cost, delay, congestion)
7576 "test name"
7677]
7778
78- # Lock and Queue for multithreading
79+ # Lock and Pool for multithreading
7980lock = Lock ()
80- q = Queue ( )
81+ pool = ThreadPoolExecutor ( 1 )
8182
8283
8384# Check if a component is valid, otherwise raise exception
@@ -350,10 +351,7 @@ def make_standard_scatter_plots(self, test_name_plot: bool):
350351 if first_it and col == "iteration no." :
351352 continue
352353
353- proc = Process (
354- target = self .make_scatter_plot , args = (comp , plot_type , col , first_it )
355- )
356- q .put (proc )
354+ pool .submit (self .make_scatter_plot , comp , plot_type , col , first_it )
357355
358356 # Create a bar graph displaying average error
359357 # comp: The component (cost, delay, or congestion)
@@ -427,6 +425,7 @@ def make_bar_graph(self, comp: str, column: str, first_it_only: bool, use_absolu
427425 avg_error_df .plot .bar (title = title , xlabel = column , ylabel = y_label , legend = False )
428426
429427 self .write_exclusions_info ()
428+ print (os .path .join (curr_dir , file_name ))
430429 plt .savefig (os .path .join (curr_dir , file_name ), dpi = 300 , bbox_inches = "tight" )
431430 plt .close ()
432431
@@ -447,10 +446,7 @@ def make_standard_bar_graphs(self, test_name_plot: bool):
447446 for col in columns :
448447 for use_abs in [True , False ]:
449448 for first_it in [True , False ]:
450- proc = Process (
451- target = self .make_bar_graph , args = (comp , col , use_abs , first_it )
452- )
453- q .put (proc )
449+ pool .submit (self .make_bar_graph , comp , col , use_abs , first_it )
454450
455451 # Create a heatmap comparing two quantitative columns
456452 # comp: The component (cost, delay, or congestion)
@@ -559,23 +555,14 @@ def make_standard_heatmaps(self):
559555 for comp in components :
560556 for first_it in [True , False ]:
561557 for use_abs in [True , False ]:
562- proc = Process (
563- target = self .make_heatmap ,
564- args = (
565- comp ,
566- "sink cluster tile width" ,
567- "sink cluster tile height" ,
568- first_it ,
569- use_abs ,
570- ),
571- )
572- q .put (proc )
573-
574- proc = Process (
575- target = self .make_heatmap ,
576- args = (comp , "delta x" , "delta y" , first_it , use_abs ),
577- )
578- q .put (proc )
558+ pool .submit (self .make_heatmap ,
559+ comp ,
560+ "sink cluster tile width" ,
561+ "sink cluster tile height" ,
562+ first_it ,
563+ use_abs ,
564+ )
565+ pool .submit (self .make_heatmap , comp , "delta x" , "delta y" , first_it , use_abs )
579566
580567 # Create a pie chart showing the proportion of cases where error is under percent_error_threshold
581568 # comp: The component (cost, delay, or congestion)
@@ -671,16 +658,13 @@ def make_standard_pie_charts(self, test_name_plot: bool):
671658 if test_name_plot :
672659 columns = self .__standard_bar_columns
673660 else :
674- columns = self .__standard_bar_columns [0 :- 1 ]
661+ columns = self .__standard_bar_columns [:- 1 ]
675662
676663 for comp in components :
677664 for col in columns :
678665 for first_it in [True , False ]:
679666 for weighted in [True , False ]:
680- proc = Process (
681- target = self .make_pie_chart , args = (comp , col , first_it , weighted )
682- )
683- q .put (proc )
667+ pool .submit (self .make_pie_chart , comp , col , first_it , weighted )
684668
685669 # Make "standard" graphs of all types.
686670 # test_name_plot: whether to create plots where data is split by test name. This option
@@ -777,10 +761,7 @@ def make_csv(df_out: pd.DataFrame, file_name: str):
777761
778762 # Write out the csv
779763 if csv_data and (not os .path .exists (os .path .join (directory , "data.csv" )) or not no_replace ):
780- proc = Process (
781- target = make_csv , args = (df , os .path .join (directory , "data.csv" ))
782- )
783- q .put (proc )
764+ pool .submit (make_csv , df , os .path .join (directory , "data.csv" ))
784765
785766 if should_print :
786767 print ("Created " , os .path .join (directory , "data.csv" ), sep = "" )
@@ -916,8 +897,8 @@ def main():
916897
917898 args = parser .parse_args ()
918899
919- global q
920- q = Queue (args .j )
900+ global pool
901+ pool = ThreadPoolExecutor (args .j )
921902
922903 global graph_types
923904 global components
@@ -1098,21 +1079,19 @@ def main():
10981079
10991080 # If --collect used, create output files for all csv files provided
11001081
1101- if args .collect == "" :
1102- return
1103-
1104- results_folder = args .collect [0 ]
1105- results_folder_path = os .path .join (output_dir , results_folder )
1106- if len (args .dir_app ) > 0 :
1107- results_folder_path += f"{ args .dir_app [0 ]} "
1108- make_dir (results_folder_path , False )
1082+ if args .collect != "" :
1083+ results_folder = args .collect [0 ]
1084+ results_folder_path = os .path .join (output_dir , results_folder )
1085+ if len (args .dir_app ) > 0 :
1086+ results_folder_path += f"{ args .dir_app [0 ]} "
1087+ make_dir (results_folder_path , False )
11091088
1110- df_complete = df_complete .reset_index (drop = True )
1089+ df_complete = df_complete .reset_index (drop = True )
11111090
1112- record_df_info (df_complete , results_folder_path )
1091+ record_df_info (df_complete , results_folder_path )
11131092
1114- global_plots = Graphs (df_complete , os .path .join (results_folder , "plots" ), "All Tests" )
1115- global_plots .make_standard_plots (True )
1093+ global_plots = Graphs (df_complete , os .path .join (results_folder , "plots" ), "All Tests" )
1094+ global_plots .make_standard_plots (True )
11161095
11171096
11181097if __name__ == "__main__" :
0 commit comments