Skip to content

Commit

Permalink
update SampEn and AppEn to allow for variable tolerance values (#32)
Browse files Browse the repository at this point in the history
* update SampEn and AppEn to allow for variable tolerance values
Implementation as value relative to SD or absolute value.

* - Removal of tolerance_reference for less confusion
-- Updated docstring

- Fix inconsistency with r / tolerance

* - specify docstring for SampEn and ApEn
- add test for verification of datatype of r

* formatting with black
  • Loading branch information
JAC28 committed Aug 4, 2023
1 parent 14e7181 commit 14b0734
Showing 1 changed file with 25 additions and 9 deletions.
34 changes: 25 additions & 9 deletions antropy/entropy.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ def svd_entropy(x, order=3, delay=1, normalize=False):
return svd_e


def _app_samp_entropy(x, order, metric="chebyshev", approximate=True):
def _app_samp_entropy(x, order, r, metric="chebyshev", approximate=True):
"""Utility function for `app_entropy`` and `sample_entropy`."""
_all_metrics = KDTree.valid_metrics
_all_metrics = _all_metrics() if callable(_all_metrics) else _all_metrics
Expand All @@ -380,7 +380,6 @@ def _app_samp_entropy(x, order, metric="chebyshev", approximate=True):
"metric names are: %s" % (metric, _all_metrics)
)
phi = np.zeros(2)
r = 0.2 * np.std(x, ddof=0)

# compute phi(order, r)
_emb_data1 = _embed(x, order, 1)
Expand Down Expand Up @@ -455,7 +454,7 @@ def _numba_sampen(sequence, order, r):
return -log(numerator / denominator)


def app_entropy(x, order=2, metric="chebyshev"):
def app_entropy(x, order=2, tolerance=None, metric="chebyshev"):
"""Approximate Entropy.
Parameters
Expand All @@ -464,12 +463,14 @@ def app_entropy(x, order=2, metric="chebyshev"):
One-dimensional time series of shape (n_times).
order : int
Embedding dimension. Default is 2.
tolerance : float
Tolerance value for acceptance of the template vector. Default is 0.2
times the standard deviation of x.
metric : str
Name of the distance metric function used with
:py:class:`sklearn.neighbors.KDTree`. Default is to use the
`Chebyshev <https://en.wikipedia.org/wiki/Chebyshev_distance>`_
distance.
Returns
-------
ae : float
Expand All @@ -481,7 +482,7 @@ def app_entropy(x, order=2, metric="chebyshev"):
regularity and the unpredictability of fluctuations over time-series data.
Smaller values indicates that the data is more regular and predictable.
The tolerance value (:math:`r`) is set to :math:`0.2 * \\text{std}(x)`.
The default tolerance value (:math:`r`) is set to :math:`0.2 * \\text{std}(x)`.
Code adapted from the `mne-features <https://mne.tools/mne-features/>`_
package by Jean-Baptiste Schiratti and Alexandre Gramfort.
Expand Down Expand Up @@ -543,11 +544,17 @@ def app_entropy(x, order=2, metric="chebyshev"):
>>> print(f"{ant.app_entropy(x):.4f}")
-0.0010
"""
phi = _app_samp_entropy(x, order=order, metric=metric, approximate=True)
# define r
if tolerance is None:
r = 0.2 * np.std(x, ddof=0)
else:
assert isinstance(tolerance, (float, int))
r = tolerance
phi = _app_samp_entropy(x, order=order, r=r, metric=metric, approximate=True)
return np.subtract(phi[0], phi[1])


def sample_entropy(x, order=2, metric="chebyshev"):
def sample_entropy(x, order=2, tolerance=None, metric="chebyshev"):
"""Sample Entropy.
Parameters
Expand All @@ -556,6 +563,9 @@ def sample_entropy(x, order=2, metric="chebyshev"):
One-dimensional time series of shape (n_times).
order : int
Embedding dimension. Default is 2.
tolerance : float
Tolerance value for acceptance of the template vector. Default is 0.2
times the standard deviation of x.
metric : str
Name of the distance metric function used with
:py:class:`sklearn.neighbors.KDTree`. Default is to use the
Expand Down Expand Up @@ -652,11 +662,17 @@ def sample_entropy(x, order=2, metric="chebyshev"):
>>> print(f"{ant.sample_entropy(x):.4f}")
-0.0000
"""
# define r
if tolerance is None:
r = 0.2 * np.std(x, ddof=0)
else:
assert isinstance(tolerance, (float, int))
r = tolerance
x = np.asarray(x, dtype=np.float64)
if metric == "chebyshev" and x.size < 5000:
return _numba_sampen(x, order=order, r=(0.2 * x.std(ddof=0)))
return _numba_sampen(x, order=order, r=r)
else:
phi = _app_samp_entropy(x, order=order, metric=metric, approximate=False)
phi = _app_samp_entropy(x, order=order, r=r, metric=metric, approximate=False)
return -np.log(np.divide(phi[1], phi[0]))


Expand Down

0 comments on commit 14b0734

Please sign in to comment.