Skip to content

Commit

Permalink
Add the function to compute the PID
Browse files Browse the repository at this point in the history
  • Loading branch information
EtienneCmb committed Jun 3, 2023
1 parent df026e6 commit ac9798d
Show file tree
Hide file tree
Showing 10 changed files with 330 additions and 21 deletions.
24 changes: 14 additions & 10 deletions docs/source/refs.bib
Expand Up @@ -153,7 +153,6 @@ @article{wilcox_guide_2017
month = jun,
year = {2017},
keywords = {stat},
file = {Wilcox et Rousselet - 2017 - A Guide to Robust Statistical Methods in Neuroscie.pdf:/home/etienne/Zotero/storage/EK3F3Q2B/Wilcox et Rousselet - 2017 - A Guide to Robust Statistical Methods in Neuroscie.pdf:application/pdf}
}


Expand All @@ -166,7 +165,6 @@ @article{friston_detecting_1996
year = {1996},
keywords = {stat},
pages = {223--235},
file = {Friston et al. - 1996 - Detecting Activations in PET and fMRI Levels of I.pdf:/home/etienne/Zotero/storage/JD7FEZ95/Friston et al. - 1996 - Detecting Activations in PET and fMRI Levels of I.pdf:application/pdf}
}


Expand All @@ -186,7 +184,6 @@ @article{nichols_nonparametric_2002
year = {2002},
keywords = {stat},
pages = {1--25},
file = {Nichols et Holmes - 2002 - Nonparametric permutation tests for functional neu.pdf:/home/etienne/Zotero/storage/5FGDE599/Nichols et Holmes - 2002 - Nonparametric permutation tests for functional neu.pdf:application/pdf}
}


Expand All @@ -205,7 +202,6 @@ @article{cao_multiple_2014
month = aug,
year = {2014},
pages = {543--544},
file = {Cao et Zhang - 2014 - Multiple Comparison Procedures.pdf:/home/etienne/Zotero/storage/5WIZ9LQX/Cao et Zhang - 2014 - Multiple Comparison Procedures.pdf:application/pdf;Snapshot:/home/etienne/Zotero/storage/A37SQ9PG/1892228.html:text/html}
}


Expand All @@ -223,7 +219,6 @@ @article{timme_tutorial_2018
year = {2018},
pmid = {30211307},
pmcid = {PMC6131830},
file = {Timme et Lapish - 2018 - A Tutorial for Information Theory in Neuroscience.pdf:/home/etienne/Zotero/storage/6V8FQKUI/Timme et Lapish - 2018 - A Tutorial for Information Theory in Neuroscience.pdf:application/pdf}
}


Expand All @@ -242,7 +237,6 @@ @article{combrisson_exceeding_2015
year = {2015},
keywords = {stat},
pages = {126--136},
file = {Combrisson et Jerbi - 2015 - Exceeding chance level by chance The caveat of th.pdf:/home/etienne/Zotero/storage/ARX2855B/Combrisson et Jerbi - 2015 - Exceeding chance level by chance The caveat of th.pdf:application/pdf}
}

@article{deco2021revisiting,
Expand Down Expand Up @@ -271,7 +265,6 @@ @article{combrisson_group-level_2022
year = {2022},
keywords = {Cluster-based, Group-level statistics, Information-based measures, Neurophysiology, Non-parametric, python, Reproducibility},
pages = {119347},
file = {Combrisson et al. - 2022 - Group-level inference of information-based measure.pdf:/home/etienne/Zotero/storage/ZRQPHPVA/Combrisson et al. - 2022 - Group-level inference of information-based measure.pdf:application/pdf;ScienceDirect Snapshot:/home/etienne/Zotero/storage/CVAP7X27/S1053811922004669.html:text/html},
}


Expand All @@ -280,14 +273,25 @@ @article{mcgill1954
volume = {4},
issn = {2168-2704},
doi = {10.1109/TIT.1954.1057469},
abstract = {A multivariate analysis based on transmitted information is presented. It is shown that sample transmitted information provides a simple method for measuring and testing association in multidimensional contingency tables. Relations with analysis of variance are pointed out, and statistical tests are described.},
number = {4},
journal = {Transactions of the IRE Professional Group on Information Theory},
author = {McGill, W.},
month = sep,
year = {1954},
note = {Conference Name: Transactions of the IRE Professional Group on Information Theory},
keywords = {Humans, Psychology, Analysis of variance, Communication systems, Contracts, Information analysis, Laboratories, Multidimensional systems, Organisms, Testing},
pages = {93--111},
file = {IEEE Xplore Abstract Record:/home/etienne/Zotero/storage/46JKBNL2/1057469.html:text/html},
}


@article{williamsBeers2010,
title = {Nonnegative {Decomposition} of {Multivariate} {Information}},
url = {http://arxiv.org/abs/1004.2515},
urldate = {2019-05-02},
journal = {arXiv:1004.2515 [math-ph, physics:physics, q-bio]},
author = {Williams, Paul L. and Beer, Randall D.},
month = apr,
year = {2010},
note = {arXiv: 1004.2515},
keywords = {Quantitative Biology - Neurons and Cognition, infoth, Computer Science - Information Theory, Mathematical Physics, Physics - Biological Physics, Physics - Data Analysis, Statistics and Probability, Quantitative Biology - Quantitative Methods},
annote = {Comment: 14 pages, 9 figures},
}
2 changes: 1 addition & 1 deletion examples/conn/plot_ii.py
Expand Up @@ -76,7 +76,7 @@

# compute the II
ii = conn_ii(
x, y, roi='roi', times='times', mi_type='cc'
x, 'trials', roi='roi', times='times', mi_type='cc'
)
print(ii)

Expand Down
83 changes: 83 additions & 0 deletions examples/conn/plot_pid.py
@@ -0,0 +1,83 @@
"""
PID: Decomposing the information carried by pairs of brain regions
==================================================================
This example illustrates how to decompose the information carried by pairs of
brain regions about a behavioral variable `y` (e.g. stimulus, outcome, learning
curve, etc.). Here, we use the Partial Information Decomposition (PID) that
leads four non-negative and exclusive atoms of information:
- The unique information carried by the first brain region about `y`
- The unique information carried by the second brain region about `y`
- The redundant information carried by both regions about `y`
- The synergistic r complementary information carried by both regions about `y`
"""
import numpy as np
import xarray as xr

from frites.simulations import StimSpecAR
from frites.conn import conn_pid

from frites import set_mpl_style

import matplotlib.pyplot as plt
set_mpl_style()


###############################################################################
# Data simulation
# ---------------
#
# Let's simulate some data. Here, we use an auto-regressive simulating a gamma
# increase. The gamma increase is modulated according to two conditions.

ar_type = 'hga'
n_stim = 2
n_epochs = 300

ss = StimSpecAR()
ar = ss.fit(ar_type=ar_type, n_epochs=n_epochs, n_stim=n_stim)

print(ar)

plt.figure(figsize=(7, 8))
ss.plot(cmap='bwr')
plt.tight_layout()
plt.show()


###############################################################################
# Compute Partial Information Decomposition
# -----------------------------------------
#
# Now we can use the simulated data to estimate the PID. Here, we'll try to
# decompose at each time point, the information carried by pairs of brain
# regions about the two conditions.

# compute the PID
infotot, unique, redundancy, synergy = conn_pid(
ar, 'trials', roi='roi', times='times', mi_type='cd', verbose=False
)

# plot the results
infotot.plot(color='C3', label=r"$Info_{Tot}$", linestyle='--')
redundancy.plot(color='C0', label=r"$Redundancy_{XY}$")
synergy.plot(color='C1', label=r"$Synergy_{XY}$")
unique.sel(roi='x').squeeze().plot(color='C4', label=r"$Unique_{X}$")
unique.sel(roi='y').squeeze().plot(color='C5', label=r"$Unique_{Y}$")
plt.legend()
plt.ylabel("Information [Bits]")
plt.axvline(0., color='C3', lw=2)
plt.title("Decomposition of the information carried by a pair of brain regions"
"\nabout a task-related variable", fontweight='bold')
plt.show()

"""
from the plot above, we can see that:
1. The total information carried by the pairs of regions (Info_{Tot})
2. At the beginning, a large portion of the information is carried by the first
brain region (Unique_{X})
3. Then we can see a superimposition of redundancy (Redundancy_{XY}) and
synergy (Synergy_{XY}) carried by both regions
4. Finally, later in time most of the information is carried by the second
brain region Y (Unique_{X})
"""
1 change: 1 addition & 0 deletions frites/conn/__init__.py
Expand Up @@ -27,3 +27,4 @@
from .conn_spec import conn_spec # noqa
from .conn_te import conn_te
from .conn_ii import conn_ii # noqa
from .conn_pid import conn_pid # noqa
2 changes: 1 addition & 1 deletion frites/conn/conn_dfc.py
Expand Up @@ -127,7 +127,7 @@ def para_dfc(i_s, i_t):
# add the windows used in the attributes
cfg = dict(
win_sample=np.r_[tuple(win_sample)], win_times=np.r_[tuple(win_times)],
agg_ch=agg_ch, type='dfc', estimator=estimator.name)
agg_ch=agg_ch, type='dfc', estimator=estimator.name, unit='Bits')
dfc.attrs = check_attrs({**cfg, **attrs})

return dfc
12 changes: 6 additions & 6 deletions frites/conn/conn_ii.py
Expand Up @@ -58,6 +58,10 @@ def conn_ii(data, y, roi=None, times=None, mi_type='cc', gcrn=True, dt=1,
gcrn : bool | True
Specify if the Gaussian Copula Rank Normalization should be applied.
Default is True.
dt : int | 1
Number of successive time points to consider when computing MI.
Increasing this number increase the smoothness of the results but will
also increase computing time.
kw_links : dict | {}
Additional arguments for selecting links to compute are passed to the
function :func:`frites.conn.conn_links`
Expand All @@ -76,6 +80,7 @@ def conn_ii(data, y, roi=None, times=None, mi_type='cc', gcrn=True, dt=1,
conn_links, conn_pid
"""
set_log_level(verbose)

# _________________________________ INPUTS ________________________________
# inputs conversion
kw_links.update({'directed': False, 'net': False})
Expand Down Expand Up @@ -103,11 +108,6 @@ def conn_ii(data, y, roi=None, times=None, mi_type='cc', gcrn=True, dt=1,
if mi_type == 'cc':
y = copnorm_nd(y, axis=0)

# reshape y variable
# if (mi_type == 'cc') and (y.ndim in (1, 2)):
# y = np.atleast_2d(y)[np.newaxis, ...]
# y = np.tile(y, (x.shape[0], 1, 1))

# transpose the data to be (n_roi, n_times, 1, n_trials)
x = np.transpose(x, (1, 2, 0))

Expand Down Expand Up @@ -145,11 +145,11 @@ def conn_ii(data, y, roi=None, times=None, mi_type='cc', gcrn=True, dt=1,
attrs['mi_type'] = mi_type
attrs['gcrn'] = gcrn
attrs['dt'] = dt
attrs['unit'] = 'Bits'
interinfo = xr.DataArray(
interinfo, dims=('roi', 'times'), coords=(roi_p, times), name='II',
attrs=check_attrs(attrs)
)
interinfo.attrs['unit'] = 'bits'

return interinfo

Expand Down

0 comments on commit ac9798d

Please sign in to comment.