Skip to content

Commit

Permalink
Remove unused fake_data_for_testing
Browse files Browse the repository at this point in the history
  • Loading branch information
jpgill86 committed Jul 27, 2019
1 parent 59ae5c0 commit 1b2a157
Showing 1 changed file with 1 addition and 8 deletions.
9 changes: 1 addition & 8 deletions neurotic/datasets/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@
import neo

from ..datasets.metadata import _abs_path
from neo.test.generate_datasets import fake_neo

def LoadDataset(metadata, lazy=False, signal_group_mode='split-all', filter_events_from_epochs=False, fake_data_for_testing=False):
def LoadDataset(metadata, lazy=False, signal_group_mode='split-all', filter_events_from_epochs=False):
"""
Load a dataset.
Expand Down Expand Up @@ -71,12 +70,6 @@ def LoadDataset(metadata, lazy=False, signal_group_mode='split-all', filter_even
spikes_dataframe = _read_spikes_file(metadata, blk)
blk.segments[0].spiketrains += _create_neo_spike_trains_from_dataframe(spikes_dataframe, metadata, t_start, t_stop, sampling_period)

if fake_data_for_testing:
# load fake data as a demo
blk.segments[0].epochs += [fake_neo('Epoch') for _ in range(5)]
blk.segments[0].events += [fake_neo('Event') for _ in range(5)]
blk.segments[0].spiketrains += [fake_neo('SpikeTrain') for _ in range(5)]

# alphabetize epoch and event channels by name
blk.segments[0].epochs.sort(key=lambda ep: ep.name)
blk.segments[0].events.sort(key=lambda ev: ev.name)
Expand Down

0 comments on commit 1b2a157

Please sign in to comment.