Skip to content

Commit

Permalink
get_uncompressed_router_tables() now just get_router_tables()
Browse files Browse the repository at this point in the history
  • Loading branch information
Christian-B committed Feb 10, 2022
1 parent 56c61d1 commit 52980bc
Show file tree
Hide file tree
Showing 12 changed files with 40 additions and 40 deletions.
22 changes: 11 additions & 11 deletions pacman/data/pacman_data_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,12 @@ class _PacmanDataModel(object):
"_machine_graph",
"_machine_partition_n_keys_map",
"_placements",
"_precompressed_router_tables",
"_uncompressed_router_tables",
"_precompressed",
"_routing_infos",
"_runtime_graph",
"_runtime_machine_graph",
"_tags"
"_tags",
"_uncompressed"
]

def __new__(cls):
Expand All @@ -78,8 +78,8 @@ def _hard_reset(self):
Clears out all data that should change after a reset and graaph change
"""
self._placements = None
self._precompressed_router_tables = None
self._uncompressed_router_tables = None
self._precompressed = None
self._uncompressed = None
self._runtime_graph = None
self._runtime_machine_graph = None
self._routing_infos = None
Expand Down Expand Up @@ -379,13 +379,13 @@ def get_machine_partition_n_keys_map(cls):
# RoutingTables

@classmethod
def get_uncompressed_router_tables(cls):
if cls.__pacman_data._uncompressed_router_tables is None:
def get_uncompressed(cls):
if cls.__pacman_data._uncompressed is None:
raise cls._exception("router_tables")
return cls.__pacman_data._uncompressed_router_tables
return cls.__pacman_data._uncompressed

@classmethod
def get_precompressed_router_tables(cls):
if cls.__pacman_data._precompressed_router_tables is None:
def get_precompressed(cls):
if cls.__pacman_data._precompressed is None:
raise cls._exception("precompressed_router_tables")
return cls.__pacman_data._precompressed_router_tables
return cls.__pacman_data._precompressed
12 changes: 6 additions & 6 deletions pacman/data/pacman_data_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,24 +180,24 @@ def set_machine_partition_n_keys_map(self, machine_partition_n_keys_map):
self.__pacman_data._machine_partition_n_keys_map = \
machine_partition_n_keys_map

def set_uncompressed_router_tables(self, router_tables):
def set_uncompressed(self, router_tables):
"""
Sets the router_tables value
Sets the uncompressed router_tables value
:param MulticastRoutingTables router_tables: new value
"""
if not isinstance(router_tables, MulticastRoutingTables):
raise TypeError(
"router_tables should be a MulticastRoutingTables")
self.__pacman_data._uncompressed_router_tables = router_tables
self.__pacman_data._uncompressed = router_tables

def set_precompressed_router_tables(self, router_tables):
def set_precompressed(self, router_tables):
"""
Sets the router_tables value
Sets the precompressed router_tables value
:param MulticastRoutingTables router_tables: new value
"""
if not isinstance(router_tables, MulticastRoutingTables):
raise TypeError(
"router_tables should be a MulticastRoutingTables")
self.__pacman_data._precompressed_router_tables = router_tables
self.__pacman_data._precompressed = router_tables
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _run(self):
"""
:rtype: MulticastRoutingTables
"""
router_tables = PacmanDataView.get_precompressed_router_tables()
router_tables = PacmanDataView.get_precompressed()
# create progress bar
progress = ProgressBar(
router_tables.routing_tables,
Expand Down
2 changes: 1 addition & 1 deletion pacman/operations/router_compressors/ranged_compressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def range_compressor(accept_overflow=True):
message = "Precompressing tables using Range Compressor"
else:
message = "Compressing tables using Range Compressor"
router_tables = PacmanDataView.get_uncompressed_router_tables()
router_tables = PacmanDataView.get_uncompressed()
progress = ProgressBar(len(router_tables.routing_tables), message)
compressor = RangeCompressor()
compressed_tables = MulticastRoutingTables()
Expand Down
4 changes: 2 additions & 2 deletions pacman_integration_tests/manual_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
# Hack to stop it throwing a wobly for too many entries
Machine.ROUTER_ENTRIES = 50000
set_config("Mapping", "router_table_compress_as_far_as_possible", True)
PacmanDataWriter.mock().set_uncompressed_router_tables(original_tables)
PacmanDataWriter.mock().set_uncompressed(original_tables)

if MUNDY:
start = time.time()
Expand All @@ -72,7 +72,7 @@
pair_tables = pair_compressor()
pair_time = time.time()
if MUNDY and PRE:
PacmanDataWriter.mock().set_uncompressed_router_tables(pre_tables)
PacmanDataWriter.mock().set_uncompressed(pre_tables)
both_tables = ordered_covering_compressor()
both_time = time.time()
for original in original_tables:
Expand Down
20 changes: 10 additions & 10 deletions unittests/data/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,23 +208,23 @@ def test_router_tables(self):
table = MulticastRoutingTables()
writer = PacmanDataWriter.setup()
with self.assertRaises(DataNotYetAvialable):
PacmanDataView.get_uncompressed_router_tables()
writer.set_uncompressed_router_tables(table)
self.assertEqual(table, PacmanDataView.get_uncompressed_router_tables())
PacmanDataView.get_uncompressed()
writer.set_uncompressed(table)
self.assertEqual(table, PacmanDataView.get_uncompressed())
with self.assertRaises(DataNotYetAvialable):
PacmanDataView.get_precompressed_router_tables()
PacmanDataView.get_precompressed()
with self.assertRaises(TypeError):
writer.set_uncompressed_router_tables("Bacon")
writer.set_uncompressed("Bacon")

def test_precompressed_router_tables(self):
table = MulticastRoutingTables()
writer = PacmanDataWriter.setup()
with self.assertRaises(DataNotYetAvialable):
PacmanDataView.get_precompressed_router_tables()
writer.set_precompressed_router_tables(table)
PacmanDataView.get_precompressed()
writer.set_precompressed(table)
self.assertEqual(
table, PacmanDataView.get_precompressed_router_tables())
table, PacmanDataView.get_precompressed())
with self.assertRaises(DataNotYetAvialable):
PacmanDataView.get_uncompressed_router_tables()
PacmanDataView.get_uncompressed()
with self.assertRaises(TypeError):
writer.set_precompressed_router_tables()
writer.set_precompressed()
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_onordered_pair_big(self):
j_router = os.path.join(path, "many_to_one.json.gz")
original_tables = from_json(j_router)

PacmanDataWriter.mock().set_precompressed_router_tables(original_tables)
PacmanDataWriter.mock().set_precompressed(original_tables)
with self.assertRaises(PacmanElementAllocationException):
pair_compressor(
ordered=False, accept_overflow=False, verify=True)
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ def setUp(self):
set_config(
"Mapping", "router_table_compress_as_far_as_possible", True)
writer = PacmanDataWriter.mock()
writer.set_uncompressed_router_tables(original_tables)
writer.set_precompressed_router_tables(original_tables)
writer.set_uncompressed(original_tables)
writer.set_precompressed(original_tables)

def check_compression(self, compressed_tables):
for original in PacmanDataView.get_precompressed_router_tables():
for original in PacmanDataView.get_precompressed():
compressed = compressed_tables.get_routing_table_for_chip(
original.x, original.y)
assert compressed.number_of_entries < original.number_of_entries
Expand All @@ -71,7 +71,7 @@ def test_pair_compressor(self):

def test_range_compressor_skipped(self):
compressed_tables = range_compressor()
for original in PacmanDataView.get_uncompressed_router_tables():
for original in PacmanDataView.get_uncompressed():
compressed = compressed_tables.get_routing_table_for_chip(
original.x, original.y)
self.assertEqual(original, compressed)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_oc_big(self):
j_router = os.path.join(path,
"many_to_one.json.gz")
original_tables = from_json(j_router)
PacmanDataWriter.mock().set_precompressed_router_tables(
PacmanDataWriter.mock().set_precompressed(
original_tables)

compressed_tables = ordered_covering_compressor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def test_pair_big(self):
path = os.path.dirname(os.path.abspath(class_file))
j_router = os.path.join(path, "many_to_one.json.gz")
original_tables = from_json(j_router)
PacmanDataWriter.mock().set_precompressed_router_tables(
PacmanDataWriter.mock().set_precompressed(
original_tables)

compressed_tables = pair_compressor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def test_tables(self):
table_path = os.path.join(path, "table2.csv.gz")
table = from_csv(table_path)
tables.add_routing_table(table)
PacmanDataWriter.mock().set_uncompressed_router_tables(tables)
PacmanDataWriter.mock().set_uncompressed(tables)
compressed = range_compressor()
c_table = compressed.get_routing_table_for_chip(0, 0)
compare_tables(table, c_table)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_onordered_pair_big(self):
path = os.path.dirname(os.path.abspath(class_file))
j_router = os.path.join(path, "many_to_one.json.gz")
original_tables = from_json(j_router)
PacmanDataWriter.mock().set_precompressed_router_tables(
PacmanDataWriter.mock().set_precompressed(
original_tables)

# Hack to stop it throwing a wobly for too many entries
Expand Down

0 comments on commit 52980bc

Please sign in to comment.