Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MAINT] run isort and flake8 on the whole nilearn code base in CI #3651

Merged
merged 22 commits into from
Jun 14, 2023
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 4 additions & 2 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@
exclude =
.git,
__pycache__,
env,
venv,
auto_examples,
build,
dist,
env,
nilearn/externals/tempita
nilearn/externals/
nilearn_cache
venv,
--select = D,E,F,W,C90
docstring-convention = numpy
max-line-length = 79
Expand Down
19 changes: 1 addition & 18 deletions .github/workflows/flake8.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,21 +36,4 @@ jobs:

- name: "Run Flake8 on whole file"
shell: bash {0}
run: |
flake8 --verbose \
examples/0[0234567]* \
maint_tools \
nilearn/_utils \
nilearn/connectome \
nilearn/datasets \
nilearn/de* \
nilearn/glm \
nilearn/image \
nilearn/interfaces \
nilearn/input_data \
nilearn/maskers \
nilearn/mass_univariate \
nilearn/regions \
nilearn/reporting \
nilearn/surface \
nilearn/*.py
run: flake8 --verbose .
20 changes: 3 additions & 17 deletions .github/workflows/isort.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,22 +29,8 @@ jobs:
python -m pip install --upgrade pip isort

- name: "Run isort"
run: |
isort --diff --check --settings-path pyproject.toml \
run: isort --diff --check --settings-path pyproject.toml \
examples/0[0234567]* \
maint_tools \
nilearn/_utils \
nilearn/connectome \
nilearn/datasets \
nilearn/de* \
nilearn/glm \
nilearn/image \
nilearn/interfaces \
nilearn/input_data \
nilearn/maskers \
nilearn/mass_univariate \
nilearn/plotting \
nilearn/regions \
nilearn/rerporting \
nilearn/surface \
nilearn/*.py
nilearn \
doc
1 change: 1 addition & 0 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import sys

import sphinx

from nilearn._utils import _compare_version

# ----------------------------------------------------------------------------
Expand Down
1 change: 1 addition & 0 deletions examples/00_tutorials/plot_single_subject_single_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@
# Formally, we have taken the first design matrix, because the model is
# implictily meant to for multiple runs.
import matplotlib.pyplot as plt

from nilearn.plotting import plot_design_matrix

plot_design_matrix(design_matrix)
Expand Down
3 changes: 2 additions & 1 deletion examples/02_decoding/plot_haxby_different_estimators.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,10 @@
# classifiers
import time

from nilearn.decoding import Decoder
from sklearn.model_selection import LeaveOneGroupOut

from nilearn.decoding import Decoder

cv = LeaveOneGroupOut()
classifiers_data = {}

Expand Down
3 changes: 2 additions & 1 deletion examples/02_decoding/plot_haxby_frem.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,10 @@
# ------------------------------------

import numpy as np
from nilearn import plotting
from sklearn.metrics import confusion_matrix

from nilearn import plotting

# Calculate the confusion matrix
matrix = confusion_matrix(
y_test,
Expand Down
4 changes: 3 additions & 1 deletion examples/02_decoding/plot_haxby_full_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,12 @@
# The classifier used here is a support vector classifier (svc). We use
# class:`nilearn.decoding.Decoder` and specify the classifier.
import numpy as np
from nilearn.decoding import Decoder

# Make a data splitting object for cross validation
from sklearn.model_selection import LeaveOneGroupOut

from nilearn.decoding import Decoder

cv = LeaveOneGroupOut()

##############################################################
Expand Down Expand Up @@ -126,6 +127,7 @@
# We make a simple bar plot to summarize the results
# --------------------------------------------------
import matplotlib.pyplot as plt

from nilearn.plotting import show

plt.figure()
Expand Down
4 changes: 3 additions & 1 deletion examples/02_decoding/plot_haxby_glm_decoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
# By default 2nd subject will be fetched
import numpy as np
import pandas as pd

from nilearn import datasets

haxby_dataset = datasets.fetch_haxby()
Expand Down Expand Up @@ -161,9 +162,10 @@
# corresponding conditions labels and session labels
# (for the cross validation).

from nilearn.decoding import Decoder
from sklearn.model_selection import LeaveOneGroupOut

from nilearn.decoding import Decoder

decoder = Decoder(
estimator="svc",
mask=haxby_dataset.mask,
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_haxby_grid_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@
# Plot the prediction scores using matplotlib
# -------------------------------------------
from matplotlib import pyplot as plt

from nilearn.plotting import show

plt.figure(figsize=(6, 4))
Expand Down
4 changes: 3 additions & 1 deletion examples/02_decoding/plot_haxby_multiclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import numpy as np
import pandas as pd

from nilearn import datasets

# By default 2nd subject from haxby datasets will be fetched.
Expand Down Expand Up @@ -112,13 +113,14 @@
plt.xticks([1, 2], ["One vs All", "One vs One"])
plt.title("Prediction: accuracy score")

from sklearn.metrics import confusion_matrix

##############################################################################
# Plot a confusion matrix
# -----------------------
# We fit on the first 10 sessions and plot a confusion matrix on the
# last 2 sessions
from nilearn.plotting import plot_matrix, show
from sklearn.metrics import confusion_matrix

svc_ovo.fit(X[session < 10], y[session < 10])
y_pred_ovo = svc_ovo.predict(X[session >= 10])
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_haxby_searchlight.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# Load Haxby dataset
# ------------------
import pandas as pd

from nilearn import datasets
from nilearn.image import get_data, load_img, new_img_like

Expand Down
13 changes: 8 additions & 5 deletions examples/02_decoding/plot_haxby_searchlight_surface.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# Load Haxby dataset
# ------------------
import pandas as pd

from nilearn import datasets

# We fetch 2nd subject from haxby datasets (which is default)
Expand All @@ -33,11 +34,12 @@
fmri_img = index_img(fmri_filename, condition_mask)
y, session = y[condition_mask], session[condition_mask]

from sklearn import neighbors

#########################################################################
# Surface bold response
# ---------------------
from nilearn import datasets, surface
from sklearn import neighbors

# Fetch a coarse surface of the left hemisphere only for speed
fsaverage = datasets.fetch_surf_fsaverage(mesh="fsaverage5")
Expand All @@ -59,15 +61,16 @@
nn = neighbors.NearestNeighbors(radius=radius)
adjacency = nn.fit(coords).radius_neighbors_graph(coords).tolil()

#########################################################################
# Searchlight computation
# -----------------------
from nilearn.decoding.searchlight import search_light
from sklearn.linear_model import RidgeClassifier
from sklearn.model_selection import KFold
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler

#########################################################################
# Searchlight computation
# -----------------------
from nilearn.decoding.searchlight import search_light

# Simple linear estimator preceded by a normalization step
estimator = make_pipeline(StandardScaler(), RidgeClassifier(alpha=10.0))

Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_haxby_stimuli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"""

import matplotlib.pyplot as plt

from nilearn import datasets
from nilearn.plotting import show

Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_miyawaki_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
# data, clean and mask it.

import numpy as np

from nilearn.maskers import MultiNiftiMasker

masker = MultiNiftiMasker(
Expand Down
2 changes: 2 additions & 0 deletions examples/02_decoding/plot_miyawaki_reconstruction.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
# Then we prepare and mask the data
# ---------------------------------
import numpy as np

from nilearn.maskers import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
Expand Down Expand Up @@ -295,6 +296,7 @@ def split_multi_scale(y, y_shape):
# ground truth

from matplotlib import pyplot as plt

from nilearn.plotting import show

for i in range(6):
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_oasis_vbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@

import matplotlib.pyplot as plt
import numpy as np

from nilearn import datasets
from nilearn.image import get_data
from nilearn.maskers import NiftiMasker
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_oasis_vbm_space_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# Load the Oasis VBM dataset
# --------------------------
import numpy as np

from nilearn import datasets

n_subjects = 200 # increase this number if you have more RAM on your box
Expand Down
7 changes: 4 additions & 3 deletions examples/02_decoding/plot_simulated_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@

import matplotlib.pyplot as plt
import nibabel
import nilearn.masking
import numpy as np
from nilearn import decoding
from nilearn.plotting import show
from scipy import linalg
from scipy.ndimage import gaussian_filter
from sklearn import linear_model, svm
Expand All @@ -49,6 +46,10 @@
from sklearn.preprocessing import StandardScaler
from sklearn.utils import check_random_state

import nilearn.masking
from nilearn import decoding
from nilearn.plotting import show


##############################################################################
# A function to generate data
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_data_driven_parcellations.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import patches, ticker

from nilearn import datasets, plotting
from nilearn.image import get_data, index_img, mean_img
from nilearn.regions import Parcellations
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_multi_subject_connectome.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

"""
import numpy as np

from nilearn import plotting

n_subjects = 4 # subjects to consider for group-sparse covariance (max: 40)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@

# Display the correlation matrix
import numpy as np

from nilearn import plotting

# Mask out the major diagonal
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_signal_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@

# Plot the correlation matrix
import numpy as np

from nilearn import plotting

# Make a large figure
Expand Down
1 change: 1 addition & 0 deletions examples/04_glm_first_level/plot_adhd_dmn.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

"""
import numpy as np

from nilearn import datasets, plotting
from nilearn.glm.first_level import (
FirstLevelModel,
Expand Down
3 changes: 2 additions & 1 deletion examples/04_glm_first_level/plot_bids_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,9 +154,10 @@
)

import matplotlib.pyplot as plt
from nilearn import plotting
from scipy.stats import norm

from nilearn import plotting

plotting.plot_glass_brain(
z_map,
colorbar=True,
Expand Down
2 changes: 2 additions & 0 deletions examples/04_glm_first_level/plot_fir_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
#########################################################################
# At first, we grab the localizer data.
import pandas as pd

from nilearn.datasets import func

data = func.fetch_localizer_first_level()
Expand Down Expand Up @@ -131,6 +132,7 @@
#
# We can now proceed by estimating the contrasts and displaying them.
import matplotlib.pyplot as plt

from nilearn.plotting import plot_stat_map

fig = plt.figure(figsize=(11, 3))
Expand Down
1 change: 1 addition & 0 deletions examples/04_glm_first_level/plot_hrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ def mion_time_derivative(tr, oversampling=16.0):
# ----------------------------------

import matplotlib.pyplot as plt

from nilearn.glm.first_level import compute_regressor

oversampling = 16
Expand Down
1 change: 1 addition & 0 deletions examples/04_glm_first_level/plot_predictions_residuals.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
# Import modules
# --------------
import pandas as pd

from nilearn import image, masking
from nilearn.datasets import fetch_spm_auditory

Expand Down
1 change: 1 addition & 0 deletions examples/04_glm_first_level/plot_spm_multimodal_faces.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@
# Make the design matrices.
import numpy as np
import pandas as pd

from nilearn.glm.first_level import make_first_level_design_matrix

design_matrices = []
Expand Down