Skip to content

Commit

Permalink
Merge pull request #1112 from SpiNNakerManchester/with_database
Browse files Browse the repository at this point in the history
Use less transactions
  • Loading branch information
Christian-B committed Sep 25, 2023
2 parents 9a83667 + b02f10e commit 0f3b084
Show file tree
Hide file tree
Showing 29 changed files with 1,212 additions and 1,241 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -149,17 +149,19 @@ def test_database_interface():
db_path = database_interface(1000)
print(db_path)

reader = DatabaseReader(db_path)
assert reader.get_ip_address(0, 0) == writer.get_chip_at(0, 0).ip_address
assert all(db_p == placements.get_placement_of_vertex(m_vertex).location
for db_p, m_vertex in zip(
reader.get_placements(app_vertex_1.label),
app_vertex_1.machine_vertices))
assert reader.get_configuration_parameter_value("runtime") == 1000
assert (
reader.get_live_output_details(
app_vertex_1.label, lpg_vertex.label) ==
(tag.ip_address, tag.port, tag.strip_sdp, tag.board_address, tag.tag,
tag.destination_x, tag.destination_y))
assert reader.get_atom_id_to_key_mapping(app_vertex_1.label)
assert reader.get_key_to_atom_id_mapping(app_vertex_1.label)
with DatabaseReader(db_path) as reader:
assert (reader.get_ip_address(0, 0) ==
writer.get_chip_at(0, 0).ip_address)
assert all(db_p ==
placements.get_placement_of_vertex(m_vertex).location
for db_p, m_vertex in zip(
reader.get_placements(app_vertex_1.label),
app_vertex_1.machine_vertices))
assert reader.get_configuration_parameter_value("runtime") == 1000
assert (
reader.get_live_output_details(
app_vertex_1.label, lpg_vertex.label) ==
(tag.ip_address, tag.port, tag.strip_sdp, tag.board_address,
tag.tag, tag.destination_x, tag.destination_y))
assert reader.get_atom_id_to_key_mapping(app_vertex_1.label)
assert reader.get_key_to_atom_id_mapping(app_vertex_1.label)
18 changes: 10 additions & 8 deletions spinn_front_end_common/data/fec_data_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class _FecDataModel(object):
"_data_in_multicast_routing_tables",
"_database_file_path",
"_database_socket_addresses",
"_ds_database",
"_ds_database_path",
"_executable_targets",
"_executable_types",
"_first_machine_time_step",
Expand Down Expand Up @@ -133,7 +133,7 @@ def _hard_reset(self):
self._data_in_multicast_key_to_chip_map = None
self._data_in_multicast_routing_tables = None
self._database_file_path = None
self._ds_database = None
self._ds_database_path = None
self._next_ds_reference = 0
self._executable_targets = None
self._fixed_routes = None
Expand Down Expand Up @@ -970,17 +970,19 @@ def get_executable_targets(cls):
return cls.__fec_data._executable_targets

@classmethod
def get_ds_database(cls):
def get_ds_database_path(cls):
"""
Data Spec database.
Gets the path for the Data Spec database.
:rtype: ~spinn_front_end_common.interface.ds.DsSqlliteDatabase
:rtype: str
:raises ~spinn_utilities.exceptions.SpiNNUtilsException:
If the ds_database is currently unavailable
"""
if cls.__fec_data._ds_database is None:
raise cls._exception("_ds_database")
return cls.__fec_data._ds_database
if cls.__fec_data._ds_database_path is None:
if cls._is_mocked():
return os.path.join(cls._temporary_dir_path(), "ds.sqlite3")
raise cls._exception("_ds_database+path")
return cls.__fec_data._ds_database_path

@classmethod
def has_monitors(cls):
Expand Down
10 changes: 5 additions & 5 deletions spinn_front_end_common/data/fec_data_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
from pacman.data.pacman_data_writer import PacmanDataWriter
from pacman.model.routing_tables import MulticastRoutingTables
from spinn_front_end_common.interface.buffer_management import BufferManager
from spinn_front_end_common.interface.ds import DsSqlliteDatabase
from spinn_front_end_common.interface.java_caller import JavaCaller
from spinn_front_end_common.utilities.constants import (
MICRO_TO_MILLISECOND_CONVERSION, MICRO_TO_SECOND_CONVERSION)
Expand Down Expand Up @@ -479,16 +478,17 @@ def set_executable_targets(self, executable_targets):
raise TypeError("executable_targets must be a ExecutableTargets")
self.__fec_data._executable_targets = executable_targets

def set_ds_database(self, ds_database):
def set_ds_database_path(self, ds_database_path):
"""
Sets the Data Spec targets database.
:type ds_database:
~spinn_front_end_common.interface.ds.DsSqlliteDatabase
"""
if not isinstance(ds_database, DsSqlliteDatabase):
raise TypeError("ds_database must be a DsSqlliteDatabase")
self.__fec_data._ds_database = ds_database
if not os.path.isfile(ds_database_path):
raise TypeError("ds_database path must be a filee")

self.__fec_data._ds_database_path = ds_database_path

def __gatherer_map_error(self):
return TypeError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1420,7 +1420,7 @@ def _execute_graph_data_specification_writer(self):
Creates and fills the data spec database
"""
with FecTimer("Graph data specification writer", TimerWork.OTHER):
self._data_writer.set_ds_database(
self._data_writer.set_ds_database_path(
graph_data_specification_writer())

def _do_data_generation(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -370,9 +370,8 @@ def __python_get_data_for_placements(self, recording_placements):
len(recording_placements),
"Extracting buffers from the last run")

with BufferDatabase() as db:
for placement in progress.over(recording_placements):
self._retreive_by_placement(db, placement)
for placement in progress.over(recording_placements):
self._retreive_by_placement(placement)

def get_data_by_placement(self, placement, recording_region_id):
"""
Expand All @@ -397,11 +396,10 @@ def get_data_by_placement(self, placement, recording_region_id):
return db.get_region_data(
placement.x, placement.y, placement.p, recording_region_id)

def _retreive_by_placement(self, db, placement):
def _retreive_by_placement(self, placement):
"""
Retrieve the data for a vertex; must be locked first.
:param BufferDatabase db: database to store into
:param ~pacman.model.placements.Placement placement:
the placement to get the data from
:param int recording_region_id: desired recording data region
Expand All @@ -417,8 +415,10 @@ def _retreive_by_placement(self, db, placement):
size, addr, missing = sizes_and_addresses[region]
data = self._request_data(
placement.x, placement.y, addr, size)
db.store_data_in_region_buffer(
placement.x, placement.y, placement.p, region, missing, data)
with BufferDatabase() as db:
db.store_data_in_region_buffer(
placement.x, placement.y, placement.p, region, missing,
data)

def _get_region_information(self, addr, x, y):
"""
Expand Down
Loading

0 comments on commit 0f3b084

Please sign in to comment.