Skip to content

Commit

Permalink
Simulation recorders (#96)
Browse files Browse the repository at this point in the history
* SimulationResult & SimulationRecorder added

* import and instantiate a SimulationResult during prepare

* Implemented basic recorders for the NEURON adapter

* NEST simulation spike recorders attempt

* quick n dirty spike_detector solution,  replace with device wrapper

* recording from multiple mpi processes, and deleting all gdf

* fix num_neurons: PsthRows now take the full matrix and count cells

* added metadata required for plotting

* NEST device protocols (#63)

* added DeviceProtocols, a formalized way of wrapping NEST devices.

* added tests

* fixed get_device_protocol called

* adapted PSTH plotting to new results data structure

* Allow PSTH rows to be ordered

* Callable mod kwarg to manipulate PSTH fig

* SynapseRecorder basics, current and spike recorder still required

* fixed black

* skip empty connectivity sets and raise a warning. closes #70

* A "run_id" attr can be used to seperate spikes with identical cell ids

* changed variable name from 'stimulus' to 'device'

* Bad workaround: reinitialise new class of devices at prepare time

* Added recorder mixins for meta and path

* added patternless mixin for devices that don't need patterns

* Fixed exception handling of recorders

* Add all cell type keys to the statistics dict

* Added Neuron exceptions

* Caught missing cell model error

* Added option to select all with `section_count`

* Import all errors from .exceptions

* Terminal relays without connections are also added to map.

* Added cell_id metadata  and fixed SpikeRecorder data

* Added synapse recorder

* Spike generator inputs can be recorded and are recorded by default

* Small fixes

* Record multiple vectors. Spikes are recorded by recording `_connections`

* made the raster plot more robust

* Fixed relays without targets on node being treated as cells.

* Added synaptic multiplicity test

* Realigned with never arborize version that doesn't deduplicate synapses

* v3.4.0b0 - First BSB package for PizDaint

* Fixed comment indent

Co-authored-by: claudia casellato <claudia.casellato@unipv.it>
Co-authored-by: Stefano Casali <scasali84@gmail.com>
Co-authored-by: Luca Drera <luca.drera01@universitadipavia.it>
  • Loading branch information
4 people committed Sep 15, 2020
1 parent 7632b55 commit 7ea8fb7
Show file tree
Hide file tree
Showing 17 changed files with 664 additions and 159 deletions.
2 changes: 1 addition & 1 deletion bsb/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
__version__ = "3.3.0"
__version__ = "3.4.0b0"

from .reporting import set_verbosity, report, warn
4 changes: 2 additions & 2 deletions bsb/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,8 +187,8 @@ def _initialise_simulation(self, simulation):
sim_cell.initialise(self)
for sim_connection in simulation.connection_models.values():
sim_connection.initialise(self)
for stimulus in simulation.devices.values():
stimulus.initialise(self)
for device in simulation.devices.values():
device.initialise(self)

def place_cell_types(self):
"""
Expand Down
3 changes: 2 additions & 1 deletion bsb/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,14 @@
LayerNotFoundError=_e(),
SimulationNotFoundError=_e(),
AdapterError=_e(
NeuronError=_e(DeviceConnectionError=_e(),),
NestError=_e(
NestKernelError=_e(NestModuleError=_e(),),
NestModelError=_e(),
KernelLockedError=_e(),
SuffixTakenError=_e(),
ReceptorSpecificationError=_e(),
)
),
),
ConnectivityError=_e(),
MorphologyError=_e(
Expand Down
72 changes: 45 additions & 27 deletions bsb/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,29 +513,34 @@ def hdf5_plot_spike_raster(spike_recorders, input_region=None, show=True):
"""
Create a spike raster plot from an HDF5 group of spike recorders.
"""
cell_ids = [int(k) for k in spike_recorders.keys()]
x = {}
y = {}
colors = {}
ids = {}
for cell_id, dataset in spike_recorders.items():
data = dataset[:, 0]
attrs = dict(dataset.attrs)
label = attrs["label"]
if len(dataset.shape) == 1 or dataset.shape[1] == 1:
times = dataset[()]
set_ids = np.ones(len(times)) * int(
attrs.get("cell_id", attrs.get("cell", cell_id))
)
else:
times = dataset[:, 1]
set_ids = dataset[:, 0]
label = attrs.get("label", "unlabelled")
if not label in x:
x[label] = []
if not label in y:
y[label] = []
if not label in colors:
colors[label] = attrs["color"]
colors[label] = attrs.get("color", "black")
if not label in ids:
ids[label] = 0
cell_id = ids[label]
ids[label] += 1
# Add the spike timings on the X axis.
x[label].extend(data)
x[label].extend(times)
# Set the cell id for the Y axis of each added spike timing.
y[label].extend(cell_id for _ in range(len(data)))
y[label].extend(set_ids)
# Use the parallel arrays x & y to plot a spike raster
fig = go.Figure(
layout=dict(
Expand Down Expand Up @@ -710,60 +715,70 @@ def add_row(self, row):
row.index = len(self.rows)
self.rows.append(row)

def ordered_rows(self):
return sorted(self.rows, key=lambda t: t.order or 0)


class PSTHStack:
def __init__(self, name, color):
self.name = name
self.color = str(color)
self.cells = 0
self._included_ids = {0: np.empty(0)}
self.list = []

def extend(self, arr, num):
self.list.extend(arr)
self.cells += num
def extend(self, arr, run=0):
self.list.extend(arr[:, 1])
if run not in self._included_ids:
self._included_ids[run] = np.empty(0)
# Count all of the cells across the runs, but count unique cells per run
self._included_ids[run] = np.unique(
np.concatenate((self._included_ids[run], arr[:, 0]))
)
self.cells = sum(map(len, self._included_ids.values()))


class PSTHRow:
def __init__(self, name, color):
def __init__(self, name, color, order=0):
from colour import Color

self.name = name
color = Color(color) if color else Color(pick_for=random.random())
self.palette = list(color.range_to("black", 6))
self.stacks = {}
self.max = -float("inf")
self.order = order

def extend(self, arr, num=1, stack=None):
def extend(self, arr, stack=None, run=0):
if stack not in self.stacks:
self.stacks[stack] = PSTHStack(
stack or self.name, self.palette[len(self.stacks)]
)
self.stacks[stack].extend(arr, num)
self.max = max(self.max, np.max(arr)) if len(arr) > 0 else self.max
self.stacks[stack].extend(arr, run=run)
self.max = max(self.max, np.max(arr[:, 1])) if len(arr) > 0 else self.max


@_figure
def hdf5_plot_psth(handle, duration=3, cutoff=0, start=0, fig=None, **kwargs):
def hdf5_plot_psth(handle, duration=3, cutoff=0, start=0, fig=None, mod=None, **kwargs):
psth = PSTH()
row_map = {}
for g in handle.values():
l = g.attrs["label"]
l = g.attrs.get("label", "unlabelled")
if l not in row_map:
color = g.attrs.get("color", None)
row_map[l] = row = PSTHRow(l, color)
order = g.attrs.get("order", 0)
row_map[l] = row = PSTHRow(l, color, order=order)
psth.add_row(row)
else:
row = row_map[l]
adj = g[:, 0] - cutoff
# print(adj)
# Read how many neurons this spikes dataset represents
num_neurons = g.attrs.get("num_neurons", 1)
stack = g.attrs.get("stack", l)
row.extend(adj, num=num_neurons, stack=stack)
run_id = g.attrs.get("run_id", 0)
adjusted = g[()]
adjusted[:, 1] = adjusted[:, 1] - cutoff
row.extend(adjusted, stack=g.attrs.get("stack", None), run=run_id)
subplots_fig = make_subplots(
cols=1,
rows=len(psth.rows),
subplot_titles=[row.name for row in psth.rows],
subplot_titles=[row.name for row in psth.ordered_rows()],
x_title=kwargs.get("x_title", "Time (ms)"),
y_title=kwargs.get("y_title", "Population firing rate (Hz)"),
)
Expand All @@ -772,22 +787,25 @@ def hdf5_plot_psth(handle, duration=3, cutoff=0, start=0, fig=None, **kwargs):
_max = max(_max, row.max)
subplots_fig.update_xaxes(range=[start, _max])
subplots_fig.update_layout(title_text=kwargs.get("title", "PSTH"))
# Allow the original figure to be updated before messing with it.
if mod is not None:
mod(subplots_fig)
# Overwrite the layout and grid of the single plot that is handed to us
# to turn it into a subplots figure. All modifications except for adding traces
# should happen before this point.
fig._grid_ref = subplots_fig._grid_ref
fig._layout = subplots_fig._layout
for i, row in enumerate(psth.rows):
for i, row in enumerate(psth.ordered_rows()):
for name, stack in sorted(row.stacks.items(), key=lambda x: x[0]):
counts, bins = np.histogram(stack.list, bins=np.arange(start, _max, duration))
if name.startswith("##"):
if str(name).startswith("##"):
# Lazy way to order the stacks; Stack names can start with ## and a number
# and it will be sorted by name, but the ## and number are not displayed.
name = name[4:]
trace = go.Bar(
x=bins,
y=counts / stack.cells * 1000 / duration,
name=name,
name=name or row.name,
marker=dict(color=stack.color),
)
fig.add_trace(trace, row=i + 1, col=1)
Expand Down
6 changes: 1 addition & 5 deletions bsb/simulation/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
import abc, random, types
import numpy as np
from ..helpers import ConfigurableClass, SortableByAfter
from ..reporting import report
from ..exceptions import *
from .cell import SimulationCell
from .component import SimulationComponent
from .targetting import TargetsNeurons, TargetsSections
from .adapter import SimulatorAdapter
from .results import SimulationResult, SimulationRecorder
59 changes: 58 additions & 1 deletion bsb/simulation/results.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,60 @@
from ..reporting import warn
import traceback


class SimulationResult:
def __init__(self):
pass
self.recorders = []

def add(self, recorder):
self.recorders.append(recorder)

def create_recorder(self, path_func, data_func, meta_func=None):
recorder = ClosureRecorder(path_func, data_func, meta_func)
self.add(recorder)
return recorder

def collect(self):
for recorder in self.recorders:
yield recorder.get_path(), recorder.get_data(), recorder.get_meta()

def safe_collect(self):
gen = iter(self.collect())
while True:
try:
yield next(gen)
except StopIteration:
break
except Exception as e:
traceback.print_exc()
warn("Recorder errored out!")


class SimulationRecorder:
def get_path(self):
raise NotImplementedError("Recorders need to implement the `get_path` function.")

def get_data(self):
raise NotImplementedError("Recorders need to implement the `get_data` function.")

def get_meta(self):
return {}


class ClosureRecorder(SimulationRecorder):
def __init__(self, path_func, data_func, meta_func=None):
super().__init__()
self.get_path = path_func
self.get_data = data_func
if meta_func:
self.get_meta = meta_func


class PresetPathMixin:
def get_path(self):
return self.path


class PresetMetaMixin:
def get_meta(self):
return self.meta
2 changes: 2 additions & 0 deletions bsb/simulation/targetting.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,4 +148,6 @@ def _section_target_default(self, cell):
sections = [s for s in cell.sections if self.section_type in s.labels]
else:
sections = cell.soma
if self.section_count == "all":
return sections
return [random.choice(sections) for _ in range(self.section_count)]
47 changes: 0 additions & 47 deletions bsb/simulators/keyboard/__init__.py

This file was deleted.

0 comments on commit 7ea8fb7

Please sign in to comment.