diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 0665cc6a24f..c0581a929fc 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -128,14 +128,27 @@ jobs:
fail-fast: false
matrix:
include:
- - { name: 'C/C++', script: 'check-format.sh' , with_python: 'no', pkgs: 'clang-format-18' }
- - { name: 'Python', script: 'check-format-py.sh', with_python: 'yes', pkgs: '' }
- - { name: 'Python Lint', script: 'pylint_check.py' , with_python: 'yes', pkgs: '' }
+ - { name: 'C/C++', script: 'check-format.sh' , with_python: 'yes', with_submodules: 'true', all_vtr_pkgs: 'yes', pkgs: 'clang-format-18' }
+ - { name: 'Python', script: 'check-format-py.sh', with_python: 'yes', with_submodules: 'true', all_vtr_pkgs: 'yes', pkgs: '' }
+ - { name: 'Python Lint', script: 'pylint_check.py' , with_python: 'yes', with_submodules: 'false', all_vtr_pkgs: 'no', pkgs: '' }
name: 'F: ${{ matrix.name }}'
steps:
- uses: actions/checkout@v4
- # NOTE: We do not need sub-modules. We do not check sub-modules for formatting.
+ with:
+ submodules: ${{ matrix.with_submodules }}
+ # NOTE: We usually do not need sub-modules. We do not check sub-modules
+ # for formatting. However we may need to build make-format which
+ # requires building VTR which will not build without submodules.
+ # TODO: We should relax this requirement.
+
+ # In order to perform a cpp format of VTR, make format is used which requires
+ # all of the vpr dependencies to be installed.
+ # TODO: This should be fixed so this test is not as heavy. We do not need
+ # these dependencies to perform a simple format.
+ - name: Install dependencies
+ if: ${{ matrix.all_vtr_pkgs == 'yes' }}
+ run: ./install_apt_packages.sh
# TODO: This should be on the same version of Python as would be found on
# Ubuntu 24.04 (3.12.3); however that version has some linting errors.
diff --git a/dev/check-format.sh b/dev/check-format.sh
index e8d970926ff..dd193af10b4 100755
--- a/dev/check-format.sh
+++ b/dev/check-format.sh
@@ -8,7 +8,11 @@ if [ $clean -ne 0 ]; then
else
echo "Code Formatting Check"
echo "====================="
- make format"$1" > /dev/null 2>&1
+ make format"$1" > /dev/null
+ if [ $? -ne 0 ]; then
+ echo "make format failed!"
+ exit 1
+ fi
valid_format=$(git diff | wc -l)
diff --git a/libs/librtlnumber/src/include/internal_bits.hpp b/libs/librtlnumber/src/include/internal_bits.hpp
index ffefb5a4d9a..0d5c7388470 100644
--- a/libs/librtlnumber/src/include/internal_bits.hpp
+++ b/libs/librtlnumber/src/include/internal_bits.hpp
@@ -27,14 +27,14 @@ constexpr short integer_t_size = (sizeof(integer_t) * 8);
}
#define unroll_1d(lut) \
- {lut[_0], lut[_1], lut[_x], lut[_z]}
+ { lut[_0], lut[_1], lut[_x], lut[_z] }
#define unroll_2d(lut) \
- {unroll_1d(lut[_0]), unroll_1d(lut[_1]), unroll_1d(lut[_x]), unroll_1d(lut[_z])}
+ { unroll_1d(lut[_0]), unroll_1d(lut[_1]), unroll_1d(lut[_x]), unroll_1d(lut[_z]) }
#define unroll_1d_invert(lut) \
- {l_not[lut[_0]], l_not[lut[_1]], l_not[lut[_x]], l_not[lut[_z]]}
+ { l_not[lut[_0]], l_not[lut[_1]], l_not[lut[_x]], l_not[lut[_z]] }
#define unroll_2d_invert(lut) \
- {unroll_1d_invert(lut[_0]), unroll_1d_invert(lut[_1]), unroll_1d_invert(lut[_x]), unroll_1d_invert(lut[_z])}
+ { unroll_1d_invert(lut[_0]), unroll_1d_invert(lut[_1]), unroll_1d_invert(lut[_x]), unroll_1d_invert(lut[_z]) }
namespace BitSpace {
typedef uint8_t bit_value_t;
diff --git a/odin_ii/regression_test/tools/ODIN_CONFIG.py b/odin_ii/regression_test/tools/ODIN_CONFIG.py
index e264bdfad37..4c856ee7114 100755
--- a/odin_ii/regression_test/tools/ODIN_CONFIG.py
+++ b/odin_ii/regression_test/tools/ODIN_CONFIG.py
@@ -1542,7 +1542,7 @@ def buildChildren(self, child_, nodeName_):
def usage():
- print USAGE_TEXT
+ print(USAGE_TEXT)
sys.exit(1)
diff --git a/odin_ii/regression_test/tools/odin_config_maker.py b/odin_ii/regression_test/tools/odin_config_maker.py
index 3fee5ab7a41..c5a73f51f3a 100644
--- a/odin_ii/regression_test/tools/odin_config_maker.py
+++ b/odin_ii/regression_test/tools/odin_config_maker.py
@@ -28,13 +28,15 @@ def main(argv=None):
# Check our options for input errors
if not options.arch:
- print "\tDid not get an architecture file; use odin_config_maker.py -h for help."
+ print("\tDid not get an architecture file; use odin_config_maker.py -h for help.")
return -1
if options.individual is None and options.directory is None and options.files is None:
- print "\tDid not get any input options; use odin_config_maker.py -h for help."
+ print("\tDid not get any input options; use odin_config_maker.py -h for help.")
return -1
if options.individual is not None and options.directory is not None:
- print "\tThe -i and -d options are mutually exclusive; use odin_config_maker.py -h for help."
+ print(
+ "\tThe -i and -d options are mutually exclusive; use odin_config_maker.py -h for help."
+ )
return -1
# Create our Config Files
@@ -54,7 +56,7 @@ def main(argv=None):
)
for file in file_list:
- print file[file.rfind("/") + 1 : file.rfind(".v")]
+ print(file[file.rfind("/") + 1 : file.rfind(".v")])
base = file[file.rfind("/") + 1 : file.rfind(".v")]
create_odin_projects(options, [file], base, path)
@@ -78,7 +80,7 @@ def main(argv=None):
create_odin_projects(options, file_list, base, path)
else:
- print "Something Failed!"
+ print("Something Failed!")
return -1
diff --git a/odin_ii/regression_test/tools/synth_using_odin.py b/odin_ii/regression_test/tools/synth_using_odin.py
index e7757942c87..20efcfdd454 100644
--- a/odin_ii/regression_test/tools/synth_using_odin.py
+++ b/odin_ii/regression_test/tools/synth_using_odin.py
@@ -9,7 +9,7 @@
if len(sys.argv) is not 2:
- print "usage: " + sys.argv[0] + "
"
+ print("usage: " + sys.argv[0] + " ")
path = abspath(sys.argv[1]) + "/"
slog = open(path + "ODIN_success.lst", "w")
diff --git a/odin_ii/regression_test/tools/synth_using_quartus.py b/odin_ii/regression_test/tools/synth_using_quartus.py
index 72bc6f34b88..33610837825 100644
--- a/odin_ii/regression_test/tools/synth_using_quartus.py
+++ b/odin_ii/regression_test/tools/synth_using_quartus.py
@@ -19,8 +19,8 @@
# print sys.argv
if len(sys.argv) != 2:
- print "Take our Verilog benchmarks and synthesize them using QIS (Quartus)"
- print "Usage: GenQuartusBlifs.py "
+ print("Take our Verilog benchmarks and synthesize them using QIS (Quartus)")
+ print("Usage: GenQuartusBlifs.py ")
sys.exit(0)
projNames = map(odin.trimDotV, filter(odin.isVerilog, os.listdir(sys.argv[1])))
@@ -68,11 +68,11 @@
process = subprocess.Popen(clean, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
(out, err) = process.communicate()
- print out + "\n"
+ print(out + "\n")
process = subprocess.Popen(create, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
(out, err) = process.communicate()
- print out + "\n"
+ print(out + "\n")
process = subprocess.Popen(move, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
(out, err) = process.communicate()
diff --git a/odin_ii/regression_test/tools/synth_using_vl2mv.py b/odin_ii/regression_test/tools/synth_using_vl2mv.py
index 3e5ee44306f..4347912f69b 100644
--- a/odin_ii/regression_test/tools/synth_using_vl2mv.py
+++ b/odin_ii/regression_test/tools/synth_using_vl2mv.py
@@ -10,7 +10,7 @@
if len(sys.argv) is not 2:
- print "usage: " + sys.argv[0] + " "
+ print("usage: " + sys.argv[0] + " ")
path = abspath(sys.argv[1]) + "/"
os.system('mkdir -p "' + path + 'VL2MV_Blifs/"')
diff --git a/odin_ii/usefull_tools/restore_blackboxed_latches_from_blif_file.py b/odin_ii/usefull_tools/restore_blackboxed_latches_from_blif_file.py
index 0a6a00715be..fdc7a539f71 100755
--- a/odin_ii/usefull_tools/restore_blackboxed_latches_from_blif_file.py
+++ b/odin_ii/usefull_tools/restore_blackboxed_latches_from_blif_file.py
@@ -49,20 +49,24 @@ def main():
# print "DEBUG: RESTORED_BLIF_FILE: {}".format(args.restoredBlifFile)
if args.inplace:
- print "Inplace Restortation:"
+ print("Inplace Restortation:")
elif not args.restoredBlifFile:
- print '\n\nERROR: Must Specify Either Inplace Restoration "-i,--inplace" or a file to restore to "-r\--restoredBlifFile"\nExiting...\n'
+ print(
+ '\n\nERROR: Must Specify Either Inplace Restoration "-i,--inplace" or a file to restore to "-r\--restoredBlifFile"\nExiting...\n'
+ )
parser.print_help()
return -1
if not os.path.isfile(args.blifFileToRestore):
- print '\n\nERROR: BLIF File to Restore "{}" doesn not exist\nExiting...\n'.format(
- args.blifFileToRestore
+ print(
+ '\n\nERROR: BLIF File to Restore "{}" doesn not exist\nExiting...\n'.format(
+ args.blifFileToRestore
+ )
)
parser.print_help()
return -1
- print "Restoring original inputs, ouptuts, types, controls and init_vals:"
+ print("Restoring original inputs, ouptuts, types, controls and init_vals:")
if args.inplace:
args.restoredBlifFile = "restored.blif.tmp"
@@ -81,7 +85,7 @@ def main():
line = line.replace(src, target)
outfile.write(line)
- print "Removing BlackBoxed Latch Model:"
+ print("Removing BlackBoxed Latch Model:")
ignore = False
for line in fileinput.input(args.restoredBlifFile, inplace=True):
@@ -89,7 +93,7 @@ def main():
if line.startswith(".model bb_latch"):
ignore = True
else:
- print line,
+ print(line),
if ignore and line.isspace():
ignore = False
@@ -98,9 +102,9 @@ def main():
dest_filename = os.path.join(args.blifFileToRestore)
shutil.move(src_filename, dest_filename)
- print "BLIF File Restored. See: {}".format(args.blifFileToRestore)
+ print("BLIF File Restored. See: {}".format(args.blifFileToRestore))
else:
- print "BLIF File Restored. See: {}".format(args.restoredBlifFile)
+ print("BLIF File Restored. See: {}".format(args.restoredBlifFile))
return
diff --git a/vpr/src/base/read_netlist.cpp b/vpr/src/base/read_netlist.cpp
index b70cdafe547..d5a5b9e8135 100644
--- a/vpr/src/base/read_netlist.cpp
+++ b/vpr/src/base/read_netlist.cpp
@@ -319,11 +319,11 @@ static void sync_clustered_and_atom_netlists(ClusteredNetlist& clb_nlist,
}
static void process_complex_block(pugi::xml_node clb_block,
- const ClusterBlockId index,
- int& num_primitives,
- const pugiutil::loc_data& loc_data,
- const std::unordered_map& logical_block_type_name_to_index,
- ClusteredNetlist& clb_nlist) {
+ const ClusterBlockId index,
+ int& num_primitives,
+ const pugiutil::loc_data& loc_data,
+ const std::unordered_map& logical_block_type_name_to_index,
+ ClusteredNetlist& clb_nlist) {
const auto& logical_block_types = g_vpr_ctx.device().logical_block_types;
//Parse cb attributes
diff --git a/vpr/src/pack/prepack.cpp b/vpr/src/pack/prepack.cpp
index 9743d969fe6..c4445bc4074 100644
--- a/vpr/src/pack/prepack.cpp
+++ b/vpr/src/pack/prepack.cpp
@@ -169,7 +169,6 @@ static std::vector alloc_and_load_pack_patterns(const std::vect
std::vector packing_patterns = alloc_and_init_pattern_list_from_hash(pattern_names);
-
/* load packing patterns by traversing the edges to find edges belonging to pattern */
for (size_t i = 0; i < pattern_names.size(); i++) {
for (const t_logical_block_type& type : logical_block_types) {
diff --git a/vpr/src/route/check_route.cpp b/vpr/src/route/check_route.cpp
index abf6aff6138..4e52b987b89 100644
--- a/vpr/src/route/check_route.cpp
+++ b/vpr/src/route/check_route.cpp
@@ -740,23 +740,23 @@ static bool check_non_configurable_edges(const Netlist<>& net_list,
//forward/reverse edge is used.
std::vector dedupped_difference;
std::ranges::copy_if(difference,
- std::back_inserter(dedupped_difference),
- [&](t_node_edge forward_edge) {
- VTR_ASSERT_MSG(!routing_edges.contains(forward_edge), "Difference should not contain used routing edges");
-
- t_node_edge reverse_edge = {forward_edge.to_node, forward_edge.from_node};
-
- //Check whether the reverse edge was used
- if (rr_edges.contains(reverse_edge) && routing_edges.contains(reverse_edge)) {
- //The reverse edge exists in the set of rr_edges, and was used
- //by the routing.
- //
- //We can therefore safely ignore the fact that this (forward) edge is un-used
- return false; //Drop from difference
- } else {
- return true; //Keep, this edge should have been used
- }
- });
+ std::back_inserter(dedupped_difference),
+ [&](t_node_edge forward_edge) {
+ VTR_ASSERT_MSG(!routing_edges.contains(forward_edge), "Difference should not contain used routing edges");
+
+ t_node_edge reverse_edge = {forward_edge.to_node, forward_edge.from_node};
+
+ //Check whether the reverse edge was used
+ if (rr_edges.contains(reverse_edge) && routing_edges.contains(reverse_edge)) {
+ //The reverse edge exists in the set of rr_edges, and was used
+ //by the routing.
+ //
+ //We can therefore safely ignore the fact that this (forward) edge is un-used
+ return false; //Drop from difference
+ } else {
+ return true; //Keep, this edge should have been used
+ }
+ });
//At this point only valid missing node pairs are in the set
if (!dedupped_difference.empty()) {
diff --git a/vpr/src/route/rr_graph_generation/rr_graph_intra_cluster.cpp b/vpr/src/route/rr_graph_generation/rr_graph_intra_cluster.cpp
index d2982da59b8..a4934318cd6 100644
--- a/vpr/src/route/rr_graph_generation/rr_graph_intra_cluster.cpp
+++ b/vpr/src/route/rr_graph_generation/rr_graph_intra_cluster.cpp
@@ -202,7 +202,7 @@ static std::vector get_cluster_block_pins(t_physical_tile_type_ptr physical
bool found_sub_tile = false;
// Iterate over all the sub-tiles to find the sub-tile instance that the cluster block is mapped to.
- for (const t_sub_tile& sub_tile: physical_tile->sub_tiles) {
+ for (const t_sub_tile& sub_tile : physical_tile->sub_tiles) {
if (sub_tile.capacity.is_in_range(sub_tile_index)) {
// This sub-tile type is the one that the cluster block is mapped to.
found_sub_tile = true;
diff --git a/vpr/src/server/taskresolver.cpp b/vpr/src/server/taskresolver.cpp
index 2911dd417d7..7e31841142a 100644
--- a/vpr/src/server/taskresolver.cpp
+++ b/vpr/src/server/taskresolver.cpp
@@ -8,7 +8,6 @@
#include "telegramoptions.h"
#include
-
#include
namespace server {
diff --git a/vpr/src/util/vpr_utils.cpp b/vpr/src/util/vpr_utils.cpp
index 2fd19c0a719..a2562460941 100644
--- a/vpr/src/util/vpr_utils.cpp
+++ b/vpr/src/util/vpr_utils.cpp
@@ -1372,9 +1372,7 @@ std::tuple parse_direct_pin_name(std::string
std::string source_string{src_string};
// Replace '.' and '[' characters with ' '
- std::ranges::replace_if(source_string,
- [](char c) noexcept { return c == '.' || c == '[' || c == ':' || c == ']'; },
- ' ');
+ std::ranges::replace_if(source_string, [](char c) noexcept { return c == '.' || c == '[' || c == ':' || c == ']'; }, ' ');
std::istringstream source_iss(source_string);
int start_pin_index, end_pin_index;
diff --git a/vtr_flow/scripts/benchtracker/plotter-offline.py b/vtr_flow/scripts/benchtracker/plotter-offline.py
index f73c5d94bc1..fee68d1cfb6 100644
--- a/vtr_flow/scripts/benchtracker/plotter-offline.py
+++ b/vtr_flow/scripts/benchtracker/plotter-offline.py
@@ -120,7 +120,7 @@ def __init__(self, axis_name_supmap, xy_name_map, y_raw_list):
def transpose_overlay_axes(self, overlay_axis, y_type="raw"):
if y_type == "gmean" and self.axis_cur_gmean_order == []:
- print "**** CANNOT FILTER ON GMEAN YET. AXIS NOT SET ****"
+ print("**** CANNOT FILTER ON GMEAN YET. AXIS NOT SET ****")
axis_cost_temp = (y_type == "gmean" and [self.axis_gmean_cost] or [self.axis_raw_cost])[0]
axis_cost_temp = {k: (v + (k in overlay_axis)) for (k, v) in axis_cost_temp.items()}
# NOTE: now axis_raw_cost is converted from dict to list of tuples, so that it is ordered by the cost value.
@@ -226,6 +226,7 @@ def subplot_traverser(
i.e.: y_sublist = y_raw_list[i]
figure_name
"""
+
# TODO: if some x,y series are all -1, then we should not create the figure
def figure_traverser(
self, y_sub_list, namemap, axis_left, xy_namemap, y_i, figure_name, plot_type="plot"
@@ -291,7 +292,7 @@ def plot_generator(self, data_collection, axis_order, overlay_axis, mode, plot_t
plot_type,
)
else:
- print err_msg["choose plot to show"]
+ print(err_msg["choose plot to show"])
plt.show()
@@ -303,37 +304,37 @@ def plot_generator(self, data_collection, axis_order, overlay_axis, mode, plot_t
def db_connector():
tasks = idb.list_tasks()
- print "available tasks: "
+ print("available tasks: ")
for i in range(len(tasks)):
- print "[" + str(i) + "]: ", tasks[i]
+ print("[" + str(i) + "]: ", tasks[i])
task_num = int(raw_input("which task to choose (input the index): "))
available_choice = idb.describe_tasks([tasks[task_num]])
available_name = [k.raw()[0] for k in available_choice]
available_type = [k.raw()[1] for k in available_choice]
- print "==========================================================="
- print "available choices:"
- print "\n".join(i for i in available_choice)
- print "==========================================================="
+ print("===========================================================")
+ print("available choices:")
+ print("\n".join(i for i in available_choice))
+ print("===========================================================")
while 1:
x = raw_input("choose a x axis name: ")
if x in available_name:
break
- print err_msg["choose axis"]
+ print(err_msg["choose axis"])
while 1:
y = raw_input("choose a y axis name: ")
if y in available_name:
break
- print err_msg["choose axis"]
+ print(err_msg["choose axis"])
filt_list = []
filt_name_list = []
cur_choice = None
- print "==========================================================="
+ print("===========================================================")
while 1:
while 1:
cur_choice = raw_input("choose filter name (enter empty string to exit): ")
if (cur_choice in available_name) or (cur_choice == ""):
break
- print err_msg["choose axis"]
+ print(err_msg["choose axis"])
if cur_choice == "":
break
filt_name_list.append(cur_choice)
@@ -341,10 +342,10 @@ def db_connector():
fname = cur_choice
fmethod = filter_method[cur_type]
param_range = idb.describe_param(cur_choice + " " + cur_type, "range", tasks[task_num])
- print "available range: ", param_range
+ print("available range: ", param_range)
frange = None
if len(param_range) == 1:
- print "set range to: ", param_range
+ print("set range to: ", param_range)
frange = param_range
else:
cur_range = raw_input(
@@ -356,15 +357,15 @@ def db_connector():
elif fmethod == "IN" and choice_fields != []:
frange = choice_fields
elif choice_fields == []:
- print "set range to: ", param_range
+ print("set range to: ", param_range)
frange = param_range
else:
- print err_msg["choose method"]
+ print(err_msg["choose method"])
# filt_list.append(idb.Task_filter(fname, fmethod, frange))
filt_list.append(frange[1])
filt_list = [item for sublist in filt_list for item in sublist]
- print "------"
- print filt_list
+ print("------")
+ print(filt_list)
data = idb.retrieve_data(x, y, filt_list, [tasks[task_num]])[1]
return {"data": data, "filt_name_list": filt_name_list, "x": x, "y": y}
@@ -380,15 +381,44 @@ def main():
filt_name_list = ret["filt_name_list"]
data_collection = data_converter(data, ret["x"], ret["y"], filt_name_list)
- print "########################################"
- print "---- Description: ----\n" + ">>\n" + "VPR benchmark experiment should have 2 types of data: \n" + "parameter: settings in for the experiment (e.g.: fc, wire length, switch block ...)\n" + "metrics: measurements from the VPR output (e.g.: min chan width, critical path delay ...)\n" + ">>\n" + "Data passed into this plotter should have already been classified into 3 axes: \n" + "one [x] axis (chosen from parameter)\n" + "multiple [y] axis (chosen from metrics)\n" + "multiple [filter] axis (all the unchosen parameters)\n" + ">>\n" + "For example, if the experiment has: \n" + "[arch, circuit, wire length, switch block, fc, min chan width, critical path delay, area, total wire length]\n" + "and you choose fc as x axis, [min chan width, critical path delay, area, total wire length] as y axes,\n" + "then filter axes are the unchosen parameters, i.e.: arch, circuit, wire length, switch block. "
- print "#########################################"
- print "---- Usage ----\n" + ">>\n" + "1. choose overlay axes among the filter axes (overlay axes will become legend in a single plot)\n" + '2. choose whether to whether to calculate the geo mean over the overlay axis ("merge" function)\n' + " (Notice: you can choose as many overlay axes as you like, but when you choose merge, it will only\n" + " calculate the geo mean over the last overlay axis. So for example, if your overlay axes are [fc, circuit],\n" + " the merge will only get geo mean over all the circuits rather that all the (circuit,fc) combination, and \n" + " fc will still be overlaid in the merged plot.)\n" + '3. the data after geo mean calcultion will be referred to as "gmean", and the data before the geo mean will be \n' + ' referred to as "raw", you can switch the overlay axes for both gmean data and raw data, for as many times \n' + ' as you like. But once you "merge" on a new axis, the old gmean data will be replaced by the new one, and further\n' + " operation will be acted on only the new gmean data."
+ print("########################################")
+ print(
+ "---- Description: ----\n"
+ + ">>\n"
+ + "VPR benchmark experiment should have 2 types of data: \n"
+ + "parameter: settings in for the experiment (e.g.: fc, wire length, switch block ...)\n"
+ + "metrics: measurements from the VPR output (e.g.: min chan width, critical path delay ...)\n"
+ + ">>\n"
+ + "Data passed into this plotter should have already been classified into 3 axes: \n"
+ + "one [x] axis (chosen from parameter)\n"
+ + "multiple [y] axis (chosen from metrics)\n"
+ + "multiple [filter] axis (all the unchosen parameters)\n"
+ + ">>\n"
+ + "For example, if the experiment has: \n"
+ + "[arch, circuit, wire length, switch block, fc, min chan width, critical path delay, area, total wire length]\n"
+ + "and you choose fc as x axis, [min chan width, critical path delay, area, total wire length] as y axes,\n"
+ + "then filter axes are the unchosen parameters, i.e.: arch, circuit, wire length, switch block. "
+ )
+ print("#########################################")
+ print(
+ "---- Usage ----\n"
+ + ">>\n"
+ + "1. choose overlay axes among the filter axes (overlay axes will become legend in a single plot)\n"
+ + '2. choose whether to whether to calculate the geo mean over the overlay axis ("merge" function)\n'
+ + " (Notice: you can choose as many overlay axes as you like, but when you choose merge, it will only\n"
+ + " calculate the geo mean over the last overlay axis. So for example, if your overlay axes are [fc, circuit],\n"
+ + " the merge will only get geo mean over all the circuits rather that all the (circuit,fc) combination, and \n"
+ + " fc will still be overlaid in the merged plot.)\n"
+ + '3. the data after geo mean calcultion will be referred to as "gmean", and the data before the geo mean will be \n'
+ + ' referred to as "raw", you can switch the overlay axes for both gmean data and raw data, for as many times \n'
+ + ' as you like. But once you "merge" on a new axis, the old gmean data will be replaced by the new one, and further\n'
+ + " operation will be acted on only the new gmean data."
+ )
while 1:
- print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
- print "available axis to overlay: "
- print "for the raw data", data_collection.axis_cur_raw_order
- print "for the gmean data", data_collection.axis_cur_gmean_order
+ print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
+ print("available axis to overlay: ")
+ print("for the raw data", data_collection.axis_cur_raw_order)
+ print("for the gmean data", data_collection.axis_cur_gmean_order)
overlay_type = None
overlay_axis = []
if data_collection.y_gmean_list != []:
@@ -399,7 +429,7 @@ def main():
elif overlay_type == "raw":
break
else:
- print err_msg["choose overlay type"]
+ print(err_msg["choose overlay type"])
else:
overlay_type = "raw"
while 1:
@@ -425,7 +455,7 @@ def main():
):
break
else:
- print err_msg["overlay axis"]
+ print(err_msg["overlay axis"])
data_collection.transpose_overlay_axes(overlay_axis, overlay_type)
overlay_merge = 0
@@ -440,7 +470,7 @@ def main():
overlay_merge = 0
break
else:
- print err_msg["yes or no"]
+ print(err_msg["yes or no"])
ui = UI()
if overlay_type == "raw":
axis_left = [k for k in data_collection.axis_cur_raw_order if k not in overlay_axis]
@@ -459,7 +489,7 @@ def main():
elif show_plot_type == "":
break
else:
- print err_msg["choose plot to show"]
+ print(err_msg["choose plot to show"])
if __name__ == "__main__":
diff --git a/vtr_flow/scripts/download_titan.py b/vtr_flow/scripts/download_titan.py
index 6cea78aa1f8..2fd4b4ea858 100755
--- a/vtr_flow/scripts/download_titan.py
+++ b/vtr_flow/scripts/download_titan.py
@@ -321,6 +321,7 @@ def extract_callback(members, args):
print(tarinfo.name)
yield tarinfo
+
def get_benchmark_subdirs(args):
"""
Decide which benchmark subdirectories to use depending on version
@@ -331,6 +332,7 @@ def get_benchmark_subdirs(args):
else:
return ["titan_new", "titan23", "other_benchmarks"]
+
def compare_versions(version1, version2):
"""
Compares two release versions to see which once is more recent
diff --git a/vtr_flow/scripts/python_libs/vtr/parse_vtr_task.py b/vtr_flow/scripts/python_libs/vtr/parse_vtr_task.py
index e31ef40c7e7..73272de8834 100755
--- a/vtr_flow/scripts/python_libs/vtr/parse_vtr_task.py
+++ b/vtr_flow/scripts/python_libs/vtr/parse_vtr_task.py
@@ -422,7 +422,6 @@ def check_two_files(
for (arch, circuit, script_params), _ in first_results.all_metrics().items():
first_primary_keys.append((arch, circuit, script_params))
-
# Warn about any elements in first result file that are not found in second result file
for arch, circuit, script_params in first_primary_keys:
if second_results.metrics(arch, circuit, script_params) is None:
@@ -443,9 +442,11 @@ def check_two_files(
"/".join(str((Path(config.config_dir).parent)).split("/")[-3:])
)
)
- print("Required case {}/{} missing from {} results: {}".format(
+ print(
+ "Required case {}/{} missing from {} results: {}".format(
arch, circuit, first_name, first_results_filepath
- ))
+ )
+ )
num_qor_failures += 1
continue
diff --git a/vtr_flow/scripts/spice/run_spice.py b/vtr_flow/scripts/spice/run_spice.py
index c5052fd2ca5..181c29394f2 100755
--- a/vtr_flow/scripts/spice/run_spice.py
+++ b/vtr_flow/scripts/spice/run_spice.py
@@ -10,7 +10,7 @@
my_dir = ""
if len(sys.argv) < 9:
- print (
+ print(
"Usage: spice.py "
)
sys.exit()
@@ -37,7 +37,7 @@
elif activity == "z":
na = 1
else:
- print ("Invalid activity type\n")
+ print("Invalid activity type\n")
sys.exit()
base_dir = os.path.join(my_dir)
@@ -99,12 +99,12 @@
stdout, stderr = p.communicate()
if re.search("error", stdout):
- print "Error"
+ print("Error")
else:
m = re.search("^\s*power=\s*(\S*).*$", stdout, re.MULTILINE)
if m:
- print m.group(1)
+ print(m.group(1))
# f = open("~/spice_modeling/" + sys.argv[1] + ".spx")