diff --git a/nilearn/decomposition/base.py b/nilearn/decomposition/base.py index 30b59f59c6..aaaf2a46b3 100644 --- a/nilearn/decomposition/base.py +++ b/nilearn/decomposition/base.py @@ -359,8 +359,7 @@ def fit(self, imgs, y=None, confounds=None): Data on which the mask is calculated. If this is a list, the affine is considered the same for all. - confounds : list of CSV file paths or numpy.ndarrys - or pandas DataFrames, optional, + confounds : list of CSV file paths or numpy.ndarrays or pandas DataFrames, optional, This parameter is passed to nilearn.signal.clean. Please see the related documentation for details. Should match with the list of imgs given. @@ -429,8 +428,7 @@ def transform(self, imgs, confounds=None): See http://nilearn.github.io/manipulating_images/input_output.html Data to be projected - confounds: CSV file path or numpy.ndarray or pandas DataFrame, - optional, + confounds: CSV file path or numpy.ndarray or pandas DataFrame, optional, This parameter is passed to nilearn.signal.clean. Please see the related documentation for details @@ -507,8 +505,7 @@ def score(self, imgs, confounds=None, per_component=False): See http://nilearn.github.io/manipulating_images/input_output.html Data to be scored - confounds: CSV file path or numpy.ndarray or pandas DataFrame, - optional, + confounds: CSV file path or numpy.ndarray or pandas DataFrame, optional, This parameter is passed to nilearn.signal.clean. Please see the related documentation for details