Skip to content

Commit

Permalink
Cumulative fix for various Neo 0.7.x related issues (NeuralEnsemble#203)
Browse files Browse the repository at this point in the history
* Change signal's `duplicate_with_new_array` to `duplicate_with_new_data`

For compatibility with Neo 0.7.0

* Fixed error by replacing deprecated pandas function

* Quick fix due to event labels being treated as array annotations

* Quick fix due to event labels being treated as array annotations

* Fixed handling of neo 0.7 array_annotations

* Fixed requirements for neo

* Removed left-over print statement

* Added a hot fix to circumtvent Neo problem in generating fake epochs

* Added fixed Neo Version 0.7.x

* Fixed failing test in spike_train_correlation by alternate numpy construct.
  • Loading branch information
mdenker committed Mar 31, 2019
1 parent c6ffc3b commit 7ebd0c8
Show file tree
Hide file tree
Showing 7 changed files with 111 additions and 10 deletions.
11 changes: 11 additions & 0 deletions elephant/neo_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ def extract_neo_attrs(obj, parents=True, child_first=True,
"""
attrs = obj.annotations.copy()
if not skip_array:
try:
for a in obj.array_annotations:
# Exclude labels and durations (and maybe other attributes) that are handled as array_annotations
# These would be duplicate
if a not in [_[0] for _ in obj._necessary_attrs + obj._recommended_attrs]:
if "array_annotations" not in attrs:
attrs["array_annotations"] = {}
attrs["array_annotations"][a] = obj.array_annotations[a].copy()
except AttributeError:
pass
for attr in obj._necessary_attrs + obj._recommended_attrs:
if skip_array and len(attr) >= 3 and attr[2]:
continue
Expand Down
2 changes: 1 addition & 1 deletion elephant/pandas_bridge.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def _sort_inds(obj, axis=0):
return obj

obj = obj.reorder_levels(sorted(obj.axes[axis].names), axis=axis)
return obj.sortlevel(0, axis=axis, sort_remaining=True)
return obj.sort_index(level=0, axis=axis, sort_remaining=True)


def _extract_neo_attrs_safe(obj, parents=True, child_first=True):
Expand Down
6 changes: 3 additions & 3 deletions elephant/signal_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def zscore(signal, inplace=True):
# Create new signal instance
result = []
for sig in signal:
sig_dimless = sig.duplicate_with_new_array(
sig_dimless = sig.duplicate_with_new_data(
(sig.magnitude - m.magnitude) / s.magnitude) / sig.units
result.append(sig_dimless)
else:
Expand Down Expand Up @@ -376,7 +376,7 @@ def _design_butterworth_filter(Fs, hpfreq=None, lpfreq=None, order=4):
)

if isinstance(signal, neo.AnalogSignal):
return signal.duplicate_with_new_array(np.rollaxis(filtered_data, -1, 0))
return signal.duplicate_with_new_data(np.rollaxis(filtered_data, -1, 0))
elif isinstance(signal, pq.quantity.Quantity):
return filtered_data * signal.units
else:
Expand Down Expand Up @@ -608,6 +608,6 @@ def hilbert(signal, N='nextpow'):
else:
raise ValueError("'{}' is an unknown N.".format(N))

output = signal.duplicate_with_new_array(
output = signal.duplicate_with_new_data(
scipy.signal.hilbert(signal.magnitude, N=n, axis=0)[:n_org])
return output / output.units
92 changes: 91 additions & 1 deletion elephant/test/test_neo_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
'index',
'channel_names',
'channel_ids',
'coordinates'
'coordinates',
'array_annotations'
]


Expand Down Expand Up @@ -446,6 +447,26 @@ def test__extract_neo_attrs__spiketrain_parents_empty_array(self):
def test__extract_neo_attrs__epoch_parents_empty_array(self):
obj = fake_neo('Epoch', seed=0)
targ = get_fake_values('Epoch', seed=0)

# TODO: Circumvent bug in neo 0.7.1, where fake objects are not
# properly constructed. Here, the returned fake values do not match
# with the corresponding object in terms of the length of the Epoch
# object. We introduce a manual fix, cutting the fake Neo object to the
# number of items returned by the get_fake_values() function.
num_times = len(targ['times'])
array_annotation_save = obj.array_annotations
obj.array_annotations = {
'durations': array_annotation_save['durations'],
'labels': array_annotation_save['labels']}
obj.durations = obj.durations[:num_times]
obj.labels = obj.labels[:num_times]
for k in array_annotation_save:
obj.array_annotations[k] = array_annotation_save[k][:num_times]

# TODO: Fix once inconsistencies in handling array annotations
# are properly fixed in neo
del targ['array_annotations']

del targ['times']

res000 = nt.extract_neo_attrs(obj, parents=False)
Expand Down Expand Up @@ -616,6 +637,26 @@ def test__extract_neo_attrs__spiketrain_noparents_array(self):
def test__extract_neo_attrs__epoch_noparents_array(self):
obj = self.block.list_children_by_class('Epoch')[0]
targ = get_fake_values('Epoch', seed=obj.annotations['seed'])

# TODO: Circumvent bug in neo 0.7.1, where fake objects are not
# properly constructed. Here, the returned fake values do not match
# with the corresponding object in terms of the length of the Epoch
# object. We introduce a manual fix, cutting the fake Neo object to the
# number of items returned by the get_fake_values() function.
num_times = len(targ['times'])
array_annotation_save = obj.array_annotations
obj.array_annotations = {
'durations': array_annotation_save['durations'],
'labels': array_annotation_save['labels']}
obj.durations = obj.durations[:num_times]
obj.labels = obj.labels[:num_times]
for k in array_annotation_save:
obj.array_annotations[k] = array_annotation_save[k][:num_times]

# TODO: Fix once inconsistencies in handling array annotations
# are properly fixed in neo
del targ['array_annotations']

del targ['times']

res00 = nt.extract_neo_attrs(obj, parents=False, skip_array=False)
Expand All @@ -627,6 +668,15 @@ def test__extract_neo_attrs__epoch_noparents_array(self):
res11 = nt.extract_neo_attrs(obj, parents=False, child_first=True)
res21 = nt.extract_neo_attrs(obj, parents=False, child_first=False)

for k in res00:
print(k,res00[k])
print('-=-')
for k in res01:
print(k,res01[k])
print('-=-')
for k in targ:
print(k,targ[k])

del res00['i']
del res10['i']
del res20['i']
Expand Down Expand Up @@ -866,6 +916,26 @@ def test__extract_neo_attrs__epoch_parents_childfirst_array(self):
targ = get_fake_values('Block', seed=blk.annotations['seed'])
targ.update(get_fake_values('Segment', seed=seg.annotations['seed']))
targ.update(get_fake_values('Epoch', seed=obj.annotations['seed']))

# TODO: Circumvent bug in neo 0.7.1, where fake objects are not
# properly constructed. Here, the returned fake values do not match
# with the corresponding object in terms of the length of the Epoch
# object. We introduce a manual fix, cutting the fake Neo object to the
# number of items returned by the get_fake_values() function.
num_times = len(targ['times'])
array_annotation_save = obj.array_annotations
obj.array_annotations = {
'durations': array_annotation_save['durations'],
'labels': array_annotation_save['labels']}
obj.durations = obj.durations[:num_times]
obj.labels = obj.labels[:num_times]
for k in array_annotation_save:
obj.array_annotations[k] = array_annotation_save[k][:num_times]

# TODO: Fix once inconsistencies in handling array annotations
# are properly fixed in neo
del targ['array_annotations']

del targ['times']

res00 = nt.extract_neo_attrs(obj, parents=True, skip_array=False)
Expand Down Expand Up @@ -959,6 +1029,26 @@ def test__extract_neo_attrs__epoch_parents_parentfirst_array(self):
targ = get_fake_values('Epoch', seed=obj.annotations['seed'])
targ.update(get_fake_values('Segment', seed=seg.annotations['seed']))
targ.update(get_fake_values('Block', seed=blk.annotations['seed']))

# TODO: Circumvent bug in neo 0.7.1, where fake objects are not
# properly constructed. Here, the returned fake values do not match
# with the corresponding object in terms of the length of the Epoch
# object. We introduce a manual fix, cutting the fake Neo object to the
# number of items returned by the get_fake_values() function.
num_times = len(targ['times'])
array_annotation_save = obj.array_annotations
obj.array_annotations = {
'durations': array_annotation_save['durations'],
'labels': array_annotation_save['labels']}
obj.durations = obj.durations[:num_times]
obj.labels = obj.labels[:num_times]
for k in array_annotation_save:
obj.array_annotations[k] = array_annotation_save[k][:num_times]

# TODO: Fix once inconsistencies in handling array annotations
# are properly fixed in neo
del targ['array_annotations']

del targ['times']

res0 = nt.extract_neo_attrs(obj, parents=True, skip_array=False,
Expand Down
4 changes: 2 additions & 2 deletions elephant/test/test_spike_train_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -521,8 +521,8 @@ def test_border_correction(self):
border_correction=False, binary=False, kernel=None,
method='memory')

self.assertNotEqual(cch.all(), cch_corrected.all())
self.assertNotEqual(cch_mem.all(), cch_corrected_mem.all())
self.assertEqual(np.any(np.not_equal(cch, cch_corrected)), True)
self.assertEqual(np.any(np.not_equal(cch_mem, cch_corrected_mem)), True)

def test_kernel(self):
'''Test if the smoothing kernel is correctly defined, and wheter it is
Expand Down
2 changes: 1 addition & 1 deletion elephant/test/test_sta.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ def test_wrong_input_type(self):
self.anasig0, [1, 2, 3])
self.assertRaises(ValueError,
sta.spike_field_coherence,
self.anasig0.duplicate_with_new_array([]), self.bst0)
self.anasig0.duplicate_with_new_data([]), self.bst0)

def test_start_stop_times_out_of_range(self):
self.assertRaises(ValueError,
Expand Down
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
neo>=0.5.0
neo<=0.7.1,<0.8.0
numpy>=1.8.2
quantities>=0.10.1
scipy>=0.14.0
six>=1.10.0
six>=1.10.0

0 comments on commit 7ebd0c8

Please sign in to comment.