Skip to content

Commit

Permalink
Merge 0484dea into f7e5404
Browse files Browse the repository at this point in the history
  • Loading branch information
jpgill86 committed Dec 9, 2022
2 parents f7e5404 + 0484dea commit 551f7f7
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions neurotic/datasets/data.py
Expand Up @@ -90,7 +90,7 @@ def load_dataset(metadata, blk=None, lazy=False, signal_group_mode='split-all',

# classify spikes by amplitude if not using lazy loading of signals
if not lazy:
blk.segments[0].spiketrains += _run_amplitude_discriminators(metadata, blk)
blk.segments[0].spiketrains.extend(_run_amplitude_discriminators(metadata, blk))

# read in spikes identified by spike sorting using tridesclous
spikes_dataframe = _read_spikes_file(metadata, blk)
Expand All @@ -99,7 +99,7 @@ def load_dataset(metadata, blk=None, lazy=False, signal_group_mode='split-all',
t_start = blk.segments[0].analogsignals[0].t_start # assuming all AnalogSignals start at the same time
t_stop = blk.segments[0].analogsignals[0].t_stop # assuming all AnalogSignals start at the same time
sampling_period = blk.segments[0].analogsignals[0].sampling_period # assuming all AnalogSignals have the same sampling rate
blk.segments[0].spiketrains += _create_neo_spike_trains_from_dataframe(spikes_dataframe, metadata, t_start, t_stop, sampling_period)
blk.segments[0].spiketrains.extend(_create_neo_spike_trains_from_dataframe(spikes_dataframe, metadata, t_start, t_stop, sampling_period))
else:
logger.warning('Ignoring tridesclous_file because the sampling rate and start time could not be inferred from analog signals')

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
@@ -1,6 +1,6 @@
# av # required but typically not installable via pip, try `conda install -c conda-forge av`
ephyviewer>=1.3.0
neo>=0.7.2,<0.10.0 # TODO: fix for neo>=0.10
neo>=0.7.2
numpy
packaging
pandas
Expand Down

0 comments on commit 551f7f7

Please sign in to comment.