Skip to content

Commit

Permalink
Merge pull request #417 from hanke/fixtypo
Browse files Browse the repository at this point in the history
BF: Typo + PEP8
  • Loading branch information
yarikoptic committed Jan 22, 2016
2 parents 452d0fd + d3885f6 commit 5caff7c
Showing 1 changed file with 20 additions and 19 deletions.
39 changes: 20 additions & 19 deletions mvpa2/measures/rsa.py
Expand Up @@ -22,6 +22,7 @@
from scipy.spatial.distance import pdist, squareform
from scipy.stats import rankdata, pearsonr


class PDist(Measure):
"""Compute dissimiliarity matrix for samples in a dataset
Expand All @@ -30,7 +31,7 @@ class PDist(Measure):
n is the number of samples.
"""

is_trained = True # Indicate that this measure is always trained.
is_trained = True # Indicate that this measure is always trained.

pairwise_metric = Parameter('correlation', constraints='str', doc="""\
Distance metric to use for calculating pairwise vector distances for
Expand Down Expand Up @@ -60,15 +61,15 @@ def __init__(self, **kwargs):

Measure.__init__(self, **kwargs)

def _call(self,ds):
def _call(self, ds):

data = ds.samples
# center data if specified
if self.params.center_data:
data = data - np.mean(data,0)
data = data - np.mean(data, 0)

# get dsm
dsm = pdist(data,metric=self.params.pairwise_metric)
dsm = pdist(data, metric=self.params.pairwise_metric)

# if square return value make dsm square
if self.params.square:
Expand Down Expand Up @@ -120,7 +121,6 @@ class PDistConsistency(Measure):
If True return the square distance matrix, if False, returns the
flattened upper triangle.""")


def __init__(self, **kwargs):
"""
Returns
Expand Down Expand Up @@ -149,24 +149,25 @@ def _call(self, dataset):
chunks = []
for chunk in dataset.sa[chunks_attr].unique:
data = np.atleast_2d(
dataset.samples[dataset.sa[chunks_attr].value == chunk,:])
dataset.samples[dataset.sa[chunks_attr].value == chunk, :])
if self.params.center_data:
data = data - np.mean(data,0)
data = data - np.mean(data, 0)
dsm = pdist(data, self.params.pairwise_metric)
dsms.append(dsm)
chunks.append(chunk)
dsms = np.vstack(dsms)

if self.params.consistency_metric=='spearman':
if self.params.consistency_metric == 'spearman':
dsms = np.apply_along_axis(rankdata, 1, dsms)
corrmat = np.corrcoef(dsms)
if self.params.square:
ds = Dataset(corrmat, sa={self.params.chunks_attr: chunks})
else:
ds = Dataset(squareform(corrmat,checks=False),
ds = Dataset(squareform(corrmat, checks=False),
sa=dict(pairs=list(combinations(chunks, 2))))
return ds


class PDistTargetSimilarity(Measure):
"""Calculate the correlations of PDist measures with a target
Expand All @@ -184,9 +185,9 @@ class PDistTargetSimilarity(Measure):
all possible metrics.""")

comparison_metric = Parameter('pearson',
constraints=EnsureChoice('pearson',
'spearman'),
doc="""\
constraints=EnsureChoice('pearson',
'spearman'),
doc="""\
Similarity measure to be used for comparing dataset DSM with the
target DSM.""")

Expand All @@ -210,23 +211,23 @@ def __init__(self, target_dsm, **kwargs):
-------
Dataset
If ``corrcoef_only`` is True, contains one feature: the correlation
coefficient (rho); or otherwise two-fetaures: rho plus p.
coefficient (rho); or otherwise two-features: rho plus p.
"""
# init base classes first
Measure.__init__(self, **kwargs)
self.target_dsm = target_dsm
if self.params.comparison_metric == 'spearman':
self.target_dsm = rankdata(target_dsm)

def _call(self,dataset):
def _call(self, dataset):
data = dataset.samples
if self.params.center_data:
data = data - np.mean(data,0)
dsm = pdist(data,self.params.pairwise_metric)
if self.params.comparison_metric=='spearman':
data = data - np.mean(data, 0)
dsm = pdist(data, self.params.pairwise_metric)
if self.params.comparison_metric == 'spearman':
dsm = rankdata(dsm)
rho, p = pearsonr(dsm,self.target_dsm)
rho, p = pearsonr(dsm, self.target_dsm)
if self.params.corrcoef_only:
return Dataset([rho], fa={'metrics': ['rho']})
else:
return Dataset([[rho,p]], fa={'metrics': ['rho', 'p']})
return Dataset([[rho, p]], fa={'metrics': ['rho', 'p']})

0 comments on commit 5caff7c

Please sign in to comment.