Skip to content

Commit

Permalink
Merge pull request #71 from janscience/master
Browse files Browse the repository at this point in the history
new event detection functions. fixed threshold estimations.
  • Loading branch information
tillraab committed Oct 30, 2018
2 parents d59e056 + 1626b64 commit 2c1ca56
Show file tree
Hide file tree
Showing 18 changed files with 1,255 additions and 598 deletions.
121 changes: 75 additions & 46 deletions tests/test_peakdetection.py → tests/test_eventdetection.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from nose.tools import assert_true, assert_equal, assert_almost_equal, assert_raises
import numpy as np
import thunderfish.peakdetection as pd
import thunderfish.eventdetection as ed


def test_detect_peaks():
Expand Down Expand Up @@ -36,24 +36,16 @@ def test_detect_peaks():
threshold = 0.5
min_thresh = 0.3

assert_raises(ValueError, pd.detect_peaks, data, 0.0)
assert_raises(ValueError, ed.detect_peaks, data, 0.0)

assert_raises(ValueError, pd.detect_peaks, data, -1.0)
assert_raises(ValueError, ed.detect_peaks, data, -1.0)

assert_raises(IndexError, pd.detect_peaks, data, threshold, time[:len(time)//2])

peaks, troughs = pd.detect_peaks(data, threshold)
peaks, troughs = ed.detect_peaks(data, threshold)
assert_true(np.all(peaks == peak_indices),
"detect_peaks(data, threshold) did not correctly detect peaks")
assert_true(np.all(troughs == trough_indices),
"detect_peaks(data, threshold) did not correctly detect troughs")

peaks, troughs = pd.detect_peaks(data, threshold, time)
assert_true(np.all(peaks == peak_times),
"detect_peaks(data, threshold, time) did not correctly detect peaks")
assert_true(np.all(troughs == trough_times),
"detect_peaks(data, threshold, time) did not correctly detect troughs")


def test_detect_dynamic_peaks():
# generate data:
Expand Down Expand Up @@ -88,55 +80,92 @@ def test_detect_dynamic_peaks():
threshold = 0.5
min_thresh = 0.3

assert_raises(ValueError, pd.detect_dynamic_peaks, data, 0.0, min_thresh, 0.5, time,
pd.accept_peak_size_threshold)
assert_raises(ValueError, ed.detect_dynamic_peaks, data, 0.0, min_thresh, 0.5, time,
ed.accept_peak_size_threshold)

assert_raises(ValueError, pd.detect_dynamic_peaks, data, -1.0, min_thresh, 0.5, time,
pd.accept_peak_size_threshold)
assert_raises(ValueError, ed.detect_dynamic_peaks, data, -1.0, min_thresh, 0.5, time,
ed.accept_peak_size_threshold)

assert_raises(ValueError, pd.detect_dynamic_peaks, data, threshold, 0.0, 0.5, time,
pd.accept_peak_size_threshold)
assert_raises(ValueError, ed.detect_dynamic_peaks, data, threshold, 0.0, 0.5, time,
ed.accept_peak_size_threshold)

assert_raises(ValueError, pd.detect_dynamic_peaks, data, threshold, -1.0, 0.5, time,
pd.accept_peak_size_threshold)
assert_raises(ValueError, ed.detect_dynamic_peaks, data, threshold, -1.0, 0.5, time,
ed.accept_peak_size_threshold)

assert_raises(ValueError, pd.detect_dynamic_peaks, data, threshold, min_thresh, 0.0, time,
pd.accept_peak_size_threshold)
assert_raises(ValueError, ed.detect_dynamic_peaks, data, threshold, min_thresh, 0.0, time,
ed.accept_peak_size_threshold)

assert_raises(ValueError, pd.detect_dynamic_peaks, data, threshold, min_thresh, -1.0, time,
pd.accept_peak_size_threshold)
assert_raises(ValueError, ed.detect_dynamic_peaks, data, threshold, min_thresh, -1.0, time,
ed.accept_peak_size_threshold)

assert_raises(IndexError, pd.detect_dynamic_peaks, data, threshold, min_thresh, 0.5, time[:len(time)//2],
pd.accept_peak_size_threshold)
assert_raises(IndexError, ed.detect_dynamic_peaks, data, threshold, min_thresh, 0.5, time[:len(time)//2],
ed.accept_peak_size_threshold)

peaks, troughs = pd.detect_dynamic_peaks(data, threshold, min_thresh, 0.5, time,
pd.accept_peak_size_threshold)
peaks, troughs = ed.detect_dynamic_peaks(data, threshold, min_thresh, 0.5, time,
ed.accept_peak_size_threshold)
assert_true(np.all(peaks == peak_times),
"detect_dynamic_peaks(data, threshold, time, accept_peak_size_threshold) did not correctly detect peaks")
assert_true(np.all(troughs == trough_times),
"detect_dynamic_peaks(data, threshold, time, accept_peak_size_threshold) did not correctly detect troughs")

peaks, troughs = pd.detect_dynamic_peaks(data, threshold, min_thresh, 0.5, None,
pd.accept_peak_size_threshold,
peaks, troughs = ed.detect_dynamic_peaks(data, threshold, min_thresh, 0.5, None,
ed.accept_peak_size_threshold,
thresh_ampl_fac=0.9, thresh_weight=0.1)
assert_true(np.all(peaks == peak_indices),
"detect_dynamic_peaks(data, threshold, time, accept_peak_size_threshold) did not correctly detect peaks")
assert_true(np.all(troughs == trough_indices),
"detect_dynamic_peaks(data, threshold, time, accept_peak_size_threshold) did not correctly detect troughs")


def test_threshold_crossings():
# generate data:
time = np.arange(0.0, 10.0, 0.01)
data = np.zeros(time.shape)
pt_indices = np.random.randint(5, len(data) - 10, size=40)
pt_indices.sort()
while np.any(np.diff(pt_indices).min() < 5):
pt_indices = np.random.randint(5, len(data) - 10, size=40)
pt_indices.sort()
up_indices = pt_indices[0::2]
down_indices = pt_indices[1::2]
up = True
for i in range(len(pt_indices) - 1):
if up:
data[pt_indices[i]:pt_indices[i + 1]] = 1.0
else:
data[pt_indices[i]:pt_indices[i + 1]] = 0.0
up = not up
if up:
data[pt_indices[-1]:] = 1.0

threshold = 0.5
up, down = ed.threshold_crossings(data, threshold)
assert_true(np.all(up == up_indices-1),
"threshold_crossings(data, threshold) did not correctly detect up crossings")
assert_true(np.all(down == down_indices-1),
"threshold_crossings(data, threshold) did not correctly detect down crossings")

threshold = 0.1 + 0.8/10.0*time
assert_raises(IndexError, ed.threshold_crossings, data, threshold[1:])
up, down = ed.threshold_crossings(data, threshold)
assert_true(np.all(up == up_indices-1),
"threshold_crossings(data, threshold) did not correctly detect up crossings")
assert_true(np.all(down == down_indices-1),
"threshold_crossings(data, threshold) did not correctly detect down crossings")


def test_thresholds():
# generate data:
data = np.random.randn(10000)
std_th = pd.std_threshold(data, th_factor=1.0)
prc_th = pd.percentile_threshold(data, th_factor=1.0, percentile=16.0)
std_th = ed.std_threshold(data, th_factor=1.0)
prc_th = ed.percentile_threshold(data, th_factor=1.0, percentile=16.0)
assert_almost_equal(std_th, 1.0, 1, 'std_threshold %g esimate failed' % std_th)
assert_true(np.abs(prc_th-2.0) < 0.1, 'percentile_threshold %g esimate failed' % prc_th)
time = np.arange(0.0, 10.0, 0.01)
data = np.sin(2.0*np.pi*21.7*time)
mm_th = pd.minmax_threshold(data, th_factor=1.0)
mm_th = ed.minmax_threshold(data, th_factor=1.0)
assert_almost_equal(mm_th, 2.0, 2, 'minmax_threshold %g esimate failed' % mm_th)
prc_th = pd.percentile_threshold(data, th_factor=1.0, percentile=0.1)
prc_th = ed.percentile_threshold(data, th_factor=1.0, percentile=0.1)
assert_true(np.abs(prc_th-2.0) < 0.1, 'percentile_threshold %g esimate failed' % prc_th)


Expand All @@ -148,28 +177,28 @@ def test_trim():
trough_indices = pt_indices[1:n:2]

# peak first, same length:
p_inx, t_inx = pd.trim(peak_indices, trough_indices)
p_inx, t_inx = ed.trim(peak_indices, trough_indices)
assert_true(len(p_inx) == len(peak_indices) and np.all(p_inx == peak_indices),
"trim(peak_indices, trough_indices) failed on peaks")
assert_true(len(t_inx) == len(trough_indices) and np.all(t_inx == trough_indices),
"trim(peak_indices, trough_indices) failed on troughs")

# trough first, same length:
p_inx, t_inx = pd.trim(peak_indices[1:], trough_indices[:-1])
p_inx, t_inx = ed.trim(peak_indices[1:], trough_indices[:-1])
assert_true(len(p_inx) == len(peak_indices[1:]) and np.all(p_inx == peak_indices[1:]),
"trim(peak_indices[1:], trough_indices[:-1]) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[:-1]) and np.all(t_inx == trough_indices[:-1]),
"trim(peak_indices[1:], trough_indices[:-1]) failed on troughs")

# peak first, more peaks:
p_inx, t_inx = pd.trim(peak_indices, trough_indices[:-2])
p_inx, t_inx = ed.trim(peak_indices, trough_indices[:-2])
assert_true(len(p_inx) == len(peak_indices[:-2]) and np.all(p_inx == peak_indices[:-2]),
"trim(peak_indices, trough_indices[:-2]) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[:-2]) and np.all(t_inx == trough_indices[:-2]),
"trim(peak_indices, trough_indices[:-2]) failed on troughs")

# trough first, more troughs:
p_inx, t_inx = pd.trim(peak_indices[1:-2], trough_indices)
p_inx, t_inx = ed.trim(peak_indices[1:-2], trough_indices)
assert_true(len(p_inx) == len(peak_indices[1:-2]) and np.all(p_inx == peak_indices[1:-2]),
"trim(peak_indices[1:-2], trough_indices) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[:-3]) and np.all(t_inx == trough_indices[:-3]),
Expand All @@ -184,28 +213,28 @@ def test_trim_to_peak():
trough_indices = pt_indices[1:n:2]

# peak first, same length:
p_inx, t_inx = pd.trim_to_peak(peak_indices, trough_indices)
p_inx, t_inx = ed.trim_to_peak(peak_indices, trough_indices)
assert_true(len(p_inx) == len(peak_indices) and np.all(p_inx == peak_indices),
"trim_to_peak(peak_indices, trough_indices) failed on peaks")
assert_true(len(t_inx) == len(trough_indices) and np.all(t_inx == trough_indices),
"trim_to_peak(peak_indices, trough_indices) failed on troughs")

# trough first, same length:
p_inx, t_inx = pd.trim_to_peak(peak_indices[1:], trough_indices[:-1])
p_inx, t_inx = ed.trim_to_peak(peak_indices[1:], trough_indices[:-1])
assert_true(len(p_inx) == len(peak_indices[1:-1]) and np.all(p_inx == peak_indices[1:-1]),
"trim_to_peak(peak_indices[1:], trough_indices[:-1]) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[1:-1]) and np.all(t_inx == trough_indices[1:-1]),
"trim_to_peak(peak_indices[1:], trough_indices[:-1]) failed on troughs")

# peak first, more peaks:
p_inx, t_inx = pd.trim_to_peak(peak_indices, trough_indices[:-2])
p_inx, t_inx = ed.trim_to_peak(peak_indices, trough_indices[:-2])
assert_true(len(p_inx) == len(peak_indices[:-2]) and np.all(p_inx == peak_indices[:-2]),
"trim_to_peak(peak_indices, trough_indices[:-2]) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[:-2]) and np.all(t_inx == trough_indices[:-2]),
"trim_to_peak(peak_indices, trough_indices[:-2]) failed on troughs")

# trough first, more troughs:
p_inx, t_inx = pd.trim_to_peak(peak_indices[1:-2], trough_indices)
p_inx, t_inx = ed.trim_to_peak(peak_indices[1:-2], trough_indices)
assert_true(len(p_inx) == len(peak_indices[1:-2]) and np.all(p_inx == peak_indices[1:-2]),
"trim_to_peak(peak_indices[1:-2], trough_indices) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[1:-2]) and np.all(t_inx == trough_indices[1:-2]),
Expand All @@ -220,25 +249,25 @@ def test_trim_closest():
trough_indices = pt_indices[1:n:2]

trough_indices = peak_indices - np.random.randint(1, 5, size=len(peak_indices))
p_inx, t_inx = pd.trim_closest(peak_indices, trough_indices)
p_inx, t_inx = ed.trim_closest(peak_indices, trough_indices)
assert_true(len(p_inx) == len(peak_indices) and np.all(p_inx == peak_indices),
"trim_closest(peak_indices, peak_indices-5) failed on peaks")
assert_true(len(t_inx) == len(trough_indices) and np.all(t_inx == trough_indices),
"trim_closest(peak_indices, peak_indices-5) failed on troughs")

p_inx, t_inx = pd.trim_closest(peak_indices[1:], trough_indices[:-1])
p_inx, t_inx = ed.trim_closest(peak_indices[1:], trough_indices[:-1])
assert_true(len(p_inx) == len(peak_indices[1:-1]) and np.all(p_inx == peak_indices[1:-1]),
"trim_closest(peak_indices[1:], peak_indices-5) failed on peaks")
assert_true(len(t_inx) == len(trough_indices[1:-1]) and np.all(t_inx == trough_indices[1:-1]),
"trim_closest(peak_indices[1:], peak_indices-5) failed on troughs")

trough_indices = peak_indices + np.random.randint(1, 5, size=len(peak_indices))
p_inx, t_inx = pd.trim_closest(peak_indices, trough_indices)
p_inx, t_inx = ed.trim_closest(peak_indices, trough_indices)
assert_true(len(p_inx) == len(peak_indices) and np.all(p_inx == peak_indices),
"trim_closest(peak_indices, peak_indices+5) failed on peaks")
assert_true(len(t_inx) == len(trough_indices) and np.all(t_inx == trough_indices),
"trim_closest(peak_indices, peak_indices+5) failed on troughs")

p_inx, t_inx = pd.trim_closest(np.array([]), np.array([]))
p_inx, t_inx = ed.trim_closest(np.array([]), np.array([]))
assert_equal(len(p_inx), 0, "trim_closest([], []) failed on peaks")
assert_equal(len(t_inx), 0, "trim_closest([], []) failed on troughs")
23 changes: 18 additions & 5 deletions tests/test_harmonicgroups.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,31 @@ def test_harmonic_groups():
amplitudes=[1.0, 0.7, 0.2, 0.1],
phases=[0.0, 0.0, 0.0, 0.0])
fish3 = ff.generate_wavefish(eodfs[2], samplerate, duration=8.0, noise_std=0.0,
amplitudes=[10.0, 5.0, 1.0],
phases=[0.0, 0.0, 0.0])
amplitudes=[10.0, 5.0, 1.0, 0.1],
phases=[0.0, 0.0, 0.0, 0.0])
fish4 = ff.generate_wavefish(eodfs[3], samplerate, duration=8.0, noise_std=0.0,
amplitudes=[6.0, 3.0, 1.0],
phases=[0.0, 0.0, 0.0])
amplitudes=[6.0, 3.0, 1.0, 0.1],
phases=[0.0, 0.0, 0.0, 0.0])
data = fish1 + fish2 + fish3 + fish4

# analyse:
psd_data = ps.psd(data, samplerate, fresolution=df)
groups = hg.harmonic_groups(psd_data[1], psd_data[0])[0]
fundamentals = hg.fundamental_freqs(groups)

fdbs = hg.fundamental_freqs_and_power(groups)
# check:
assert_true(np.all(np.abs(eodfs-fundamentals) < df),
'harmonic_groups() did not correctly detect all fundamental frequencies')

fundamentals = hg.fundamental_freqs([groups, [groups[1], groups[3]]])
fdbs = hg.fundamental_freqs_and_power([groups, [groups[1], groups[3]]], 3)
# check:
assert_true(np.all(np.abs(eodfs-fundamentals[0]) < df),
'harmonic_groups() did not correctly detect all fundamental frequencies')

fundamentals = hg.fundamental_freqs([[groups, [groups[1], groups[3]]]])
fdbs = hg.fundamental_freqs_and_power([[groups, [groups[1], groups[3]]]], 10, True)
# check:
assert_true(np.all(np.abs(eodfs-fundamentals[0][0]) < df),
'harmonic_groups() did not correctly detect all fundamental frequencies')

36 changes: 29 additions & 7 deletions tests/test_powerspectrum.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from nose.tools import assert_equal
from nose.tools import assert_equal, assert_true
import numpy as np
import thunderfish.powerspectrum as ps

# run this with "nosetests tests/test_powerspectrum.py" in the first thunderfish folder.

def test_powerspectrum():
# generate data
fundamental = 300. # Hz
Expand All @@ -14,13 +15,34 @@ def test_powerspectrum():
psd_data = ps.multi_resolution_psd(data, samplerate, fresolution=[0.5, 1])

# test the results
assert_equal(round(psd_data[0][1][np.argmax(psd_data[0][0])]), fundamental, 'peak in PSD is not the fundamental '
'frequency given.')
assert_equal(round(psd_data[1][1][np.argmax(psd_data[1][0])]), fundamental, 'peak in PSD is not the fundamental '
'frequency given.')
assert_equal(round(psd_data[0][1][np.argmax(psd_data[0][0])]), fundamental,
'peak in PSD is not the fundamental frequency given.')
assert_equal(round(psd_data[1][1][np.argmax(psd_data[1][0])]), fundamental,
'peak in PSD is not the fundamental frequency given.')
# run multi_resolution_psd with 1 fresolutions (float)
psd_data = ps.multi_resolution_psd(data, samplerate, fresolution=0.5)

# test the result
assert_equal(round(psd_data[1][np.argmax(psd_data[0])]), fundamental, 'peak in PSD is not the fundamental '
'frequency given.')
assert_equal(round(psd_data[1][np.argmax(psd_data[0])]), fundamental,
'peak in PSD is not the fundamental frequency given.')


def test_peak_freqs():
# generate data:
dt = 0.001
time = np.arange(0.0, 10.0, dt)
data = np.zeros(len(time))
w = len(data)//10
freqs = 20.0 + 80.0*np.random.rand(10)
onsets = []
offsets = []
for k in range(10):
i0 = k*w
i1 = i0 + w
data[i0:i1] = np.sin(2.0*np.pi*freqs[k]*time[i0:i1])
onsets.append(i0+w//10)
offsets.append(i1-w//10)
df = 0.5
mfreqs = ps.peak_freqs(onsets, offsets, data, 1.0/dt, freq_resolution=df)
assert_true(np.all(np.abs(freqs - mfreqs) <= 2.0*df), "peak_freqs() failed")

2 changes: 1 addition & 1 deletion thunderfish/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

__all__ = ['dataloader',
'configfile',
'peakdetection',
'eventdetection',
'bestwindow',
'powerspectrum',
'harmonicgroups',
Expand Down
Loading

0 comments on commit 2c1ca56

Please sign in to comment.