Skip to content

Commit

Permalink
add study to TOC
Browse files Browse the repository at this point in the history
  • Loading branch information
DominiqueMakowski committed Jun 5, 2022
1 parent 8c61b7d commit 2d91d36
Show file tree
Hide file tree
Showing 15 changed files with 691,505 additions and 18,829 deletions.
4 changes: 4 additions & 0 deletions docs/functions/complexity.rst
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,10 @@ Other indices
""""""""""""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.fishershannon_information

*mutual_information()*
""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.mutual_information

*complexity_hjorth()*
"""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.complexity_hjorth
Expand Down
2 changes: 2 additions & 0 deletions docs/studies/complexity_eeg.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
.. include:: ../../studies/complexity_eeg/README.md
:parser: myst_parser.sphinx_
1 change: 1 addition & 0 deletions docs/studies/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ Some of the work-in-progress studies.
HRV Structure <https://psyarxiv.com/mwa6x/>
Complexity Review <https://psyarxiv.com/f8k3x/>
Complexity Structure <https://psyarxiv.com/v5tqw/>
EEG Complexity: Parameters Selection <complexity_eeg>
ECG Benchmark <ecg_benchmark>
EEG Analysis with GAMs <erp_gam>
EOG blink template <eog_blinktemplate>
Expand Down
9 changes: 4 additions & 5 deletions neurokit2/complexity/information_mutual.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ def mutual_information(x, y, method="varoquaux", bins="default", **kwargs):
combination of number of bins.
Parameters
----------
x : Union[list, np.array, pd.Series]
Expand Down Expand Up @@ -74,12 +73,12 @@ def mutual_information(x, y, method="varoquaux", bins="default", **kwargs):
import numpy as np
import pandas as pd
x = np.random.normal(size=200)
x = np.random.normal(size=400)
y = x**2
data = pd.DataFrame()
for level in np.linspace(0.01, 4, 200):
noise = np.random.normal(scale=level, size=200)
for level in np.linspace(0.01, 3, 200):
noise = np.random.normal(scale=level, size=400)
rez = pd.DataFrame({"Noise": [level],
"MI1": [nk.mutual_information(x, y + noise, method="varoquaux", sigma=1)],
"MI2": [nk.mutual_information(x, y + noise, method="varoquaux", sigma=0)],
Expand Down Expand Up @@ -129,7 +128,7 @@ def mutual_information(x, y, method="varoquaux", bins="default", **kwargs):
continue
p_x = pd.cut(x, i, labels=False)
p_y = pd.cut(y, j, labels=False)
new_mi = _mutual_information_sklearn(p_x, p_y) / np.log2(np.min([i,j]))
new_mi = _mutual_information_sklearn(p_x, p_y) / np.log2(np.min([i, j]))
if new_mi > mi:
mi = new_mi
else:
Expand Down
4 changes: 2 additions & 2 deletions neurokit2/complexity/optim_complexity_delay.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,11 +432,11 @@ def _embedding_delay_metric(
embedded = complexity_embedding(signal, delay=current_tau, dimension=2)
if metric == "Mutual Information":
values[i] = mutual_information(
embedded[:, 0], embedded[:, 1], method="shannon"
embedded[:, 0], embedded[:, 1], method="varoquaux"
)
elif metric == "Mutual Information 2":
values[i] = mutual_information(
embedded[:, 0], embedded[:, 1], method="varoquaux"
embedded[:, 0], embedded[:, 1], method="knn"
)
elif metric == "Displacement":
dimension = 2
Expand Down
139 changes: 138 additions & 1 deletion studies/complexity_eeg/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ Complexity Analysis
================

- [Introduction](#introduction)
- [Methods](#methods)
- [Results](#results)
- [Optimization of Delay](#optimization-of-delay)
- [References](#references)

*This study can be referenced by* [*citing the package and the
Expand All @@ -14,6 +17,140 @@ us!**

## Introduction

blabla
The aim is to assess the optimal complexity parameters.

## Methods

``` r
library(tidyverse)
library(easystats)
library(patchwork)
```

``` r
read.csv("data_delay.csv") |>
group_by(Dataset) |>
summarise(SamplingRate = mean(SamplingRate),
Lowpass = mean(Lowpass),
n_Participants = n_distinct(Participant),
n_Channels = n_distinct(Channel))
## # A tibble: 4 × 5
## Dataset SamplingRate Lowpass n_Participants n_Channels
## <chr> <dbl> <dbl> <int> <int>
## 1 Lemon 250 50 2 61
## 2 SRM 1024 50 4 64
## 3 Texas 256 50 4 64
## 4 Wang (2022) 500 50 2 61
```

## Results

### Optimization of Delay

``` r
data_delay <- read.csv("data_delay.csv") |>
mutate(Metric = str_remove_all(Metric, fixed(" (FFT)")),
Metric = fct_relevel(Metric, "Mutual Information", "Autocorrelation", "Displacement"),
Value = Value/SamplingRate*1000,
Optimal = Optimal/SamplingRate*1000) |>
mutate(Area = str_remove_all(Channel, "[:digit:]|z"),
Area = substring(Channel, 1, 1),
Area = case_when(Area == "I" ~ "O",
Area == "A" ~ "F",
TRUE ~ Area))


# summarize(group_by(data_delay, Dataset), Value = max(Value, na.rm=TRUE))
```

``` r
# data_delay |>
# mutate(group = paste0(Dataset, "_", Metric)) |>
# estimate_density(method="kernel", select="Optimal", at = "group") |>
# separate("group", into = c("Dataset", "Metric")) |>
# ggplot(aes(x = x, y = y)) +
# geom_line(aes(color = Dataset)) +
# facet_wrap(~Metric, scales = "free_y")
```

#### Per Channel

``` r
delay_perchannel <- function(data_delay, dataset="Lemon") {
data <- filter(data_delay, Dataset == dataset)


by_channel <- data |>
group_by(Condition, Metric, Area, Channel, Value) |>
summarise_all(mean, na.rm=TRUE)
by_area <- data |>
group_by(Condition, Metric, Area, Value) |>
summarise_all(mean, na.rm=TRUE)

by_channel |>
ggplot(aes(x = Value, y = Score, color = Area)) +
geom_line(aes(group=Channel), alpha = 0.20) +
geom_line(data=by_area, aes(group=Area), size=1) +
# geom_vline(xintercept = 10, linetype = "dashed", size = 0.5) +
facet_wrap(~Condition*Metric, scales = "free_y") +
see::scale_color_flat_d(palette = "rainbow") +
scale_y_continuous(expand = c(0, 0)) +
labs(title = paste0("Dataset: ", dataset), x = NULL, y = NULL) +
guides(colour = guide_legend(override.aes = list(alpha = 1))) +
see::theme_modern() +
theme(plot.title = element_text(face = "plain", hjust = 0))
}

p1 <- delay_perchannel(data_delay, dataset="Lemon")
p2 <- delay_perchannel(data_delay, dataset="Texas")
p3 <- delay_perchannel(data_delay, dataset="SRM")
p4 <- delay_perchannel(data_delay, dataset="Wang (2022)")

p1 / p2 / p3 / p4 +
plot_layout(heights = c(2, 1, 1, 2)) +
plot_annotation(title = "Optimization of Delay", theme = theme(plot.title = element_text(hjust = 0.5, face = "bold")))
```

![](../../studies/complexity_eeg/figures/unnamed-chunk-7-1.png)<!-- -->

#### Per Subject

``` r
delay_persubject <- function(data_delay, dataset="Lemon") {
data <- filter(data_delay, Dataset == dataset)

by_subject <- data |>
group_by(Condition, Metric, Area, Participant, Value) |>
summarise_all(mean)
by_area <- data |>
group_by(Condition, Metric, Area, Value) |>
summarise_all(mean)

by_subject |>
mutate(group = paste0(Participant, Area)) |>
ggplot(aes(x = Value, y = Score, color = Area)) +
geom_line(aes(group=group), alpha = 0.20) +
geom_line(data=by_area, aes(group=Area), size=1) +
# geom_vline(xintercept = 10, linetype = "dashed", size = 0.5) +
facet_wrap(~Condition*Metric, scales = "free_y") +
see::scale_color_flat_d(palette = "rainbow") +
scale_y_continuous(expand = c(0, 0)) +
labs(title = paste0("Dataset: ", dataset), x = NULL, y = NULL) +
guides(colour = guide_legend(override.aes = list(alpha = 1))) +
see::theme_modern() +
theme(plot.title = element_text(face = "bold", hjust = 0.5))
}

p1 <- delay_persubject(data_delay, dataset="Lemon")
p2 <- delay_persubject(data_delay, dataset="Texas")
p3 <- delay_persubject(data_delay, dataset="SRM")
p4 <- delay_persubject(data_delay, dataset="Wang (2022)")

p1 / p2 / p3 / p4 +
plot_layout(heights = c(2, 1, 1, 2)) +
plot_annotation(title = "Optimization of Delay", theme = theme(plot.title = element_text(hjust = 0.5, face = "bold")))
```

![](../../studies/complexity_eeg/figures/unnamed-chunk-8-1.png)<!-- -->

## References

0 comments on commit 2d91d36

Please sign in to comment.