Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MAINT] run isort and flake8 on the whole nilearn code base in CI #3651

Merged
merged 22 commits into from
Jun 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 5 additions & 3 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@
exclude =
.git,
__pycache__,
env,
venv,
auto_examples,
build,
dist,
env,
nilearn/externals/tempita
nilearn/externals/
nilearn_cache
venv,
--select = D,E,F,W,C90
docstring-convention = numpy
max-line-length = 79
Expand Down Expand Up @@ -36,7 +38,7 @@ per-file-ignores =
examples/*/*: D103, D205, D301, D400
# - docstrings rules that should not be applied to doc
doc/*: D100, D103, F401
ignore = D105, D107, E402, W503, W504, W605
ignore = D105, D107, E402, W503, W504, W605, BLK100
# for compatibility with black
# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#flake8
extend-ignore = E203
22 changes: 2 additions & 20 deletions .github/workflows/flake8.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,26 +31,8 @@ jobs:

- name: "Install Flake8"
shell: bash {0}
run: |
python -m pip install --upgrade pip flake8 flake8-docstrings
run: python -m pip install --upgrade pip flake8 flake8-docstrings

- name: "Run Flake8 on whole file"
shell: bash {0}
run: |
flake8 --verbose \
examples/0[0234567]* \
maint_tools \
nilearn/_utils \
nilearn/connectome \
nilearn/datasets \
nilearn/de* \
nilearn/glm \
nilearn/image \
nilearn/interfaces \
nilearn/input_data \
nilearn/maskers \
nilearn/mass_univariate \
nilearn/regions \
nilearn/reporting \
nilearn/surface \
nilearn/*.py
run: flake8 --verbose .
23 changes: 2 additions & 21 deletions .github/workflows/isort.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,26 +25,7 @@ jobs:

- name: "Install isort"
shell: bash {0}
run: |
python -m pip install --upgrade pip isort
run: python -m pip install --upgrade pip isort

- name: "Run isort"
run: |
isort --diff --check --settings-path pyproject.toml \
examples/0[0234567]* \
maint_tools \
nilearn/_utils \
nilearn/connectome \
nilearn/datasets \
nilearn/de* \
nilearn/glm \
nilearn/image \
nilearn/interfaces \
nilearn/input_data \
nilearn/maskers \
nilearn/mass_univariate \
nilearn/plotting \
nilearn/regions \
nilearn/rerporting \
nilearn/surface \
nilearn/*.py
run: isort --diff --check --settings-path pyproject.toml .
1 change: 1 addition & 0 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import sys

import sphinx

from nilearn._utils import _compare_version

# ----------------------------------------------------------------------------
Expand Down
1 change: 1 addition & 0 deletions examples/00_tutorials/plot_single_subject_single_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@
# Formally, we have taken the first design matrix, because the model is
# implictily meant to for multiple runs.
import matplotlib.pyplot as plt

from nilearn.plotting import plot_design_matrix

plot_design_matrix(design_matrix)
Expand Down
1 change: 1 addition & 0 deletions examples/01_plotting/plot_3d_map_to_surface_projection.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
# 1 if the curvature is positive, -1 if the curvature is negative.

import numpy as np

from nilearn import surface

curv_right = surface.load_surf_data(fsaverage.curv_right)
Expand Down
2 changes: 2 additions & 0 deletions examples/01_plotting/plot_carpet.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
# Visualizing global patterns over time
# -------------------------------------
import matplotlib.pyplot as plt

from nilearn.plotting import plot_carpet

display = plot_carpet(adhd_dataset.func[0], mask_img, t_r=t_r)
Expand All @@ -49,6 +50,7 @@
# Create a gray matter/white matter/cerebrospinal fluid mask from
# ICBM152 tissue probability maps.
import numpy as np

from nilearn import image

atlas = datasets.fetch_icbm152_2009()
Expand Down
1 change: 1 addition & 0 deletions examples/01_plotting/plot_colormaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import matplotlib.pyplot as plt
import numpy as np

from nilearn.plotting import show
from nilearn.plotting.cm import _cmap_d as nilearn_cmaps

Expand Down
1 change: 1 addition & 0 deletions examples/01_plotting/plot_haxby_masks.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@


import matplotlib.pyplot as plt

from nilearn import datasets

haxby_dataset = datasets.fetch_haxby()
Expand Down
3 changes: 1 addition & 2 deletions examples/01_plotting/plot_prob_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@
See :ref:`plotting` for more information to know how to tune the parameters.
"""
# Load 4D probabilistic atlases
from nilearn import plotting
from nilearn import datasets
from nilearn import datasets, plotting

# Harvard Oxford Atlasf
harvard_oxford = datasets.fetch_atlas_harvard_oxford("cort-prob-2mm")
Expand Down
1 change: 1 addition & 0 deletions examples/01_plotting/plot_surf_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
# the mean location to obtain the coordinates.

import numpy as np

from nilearn import surface

atlas = destrieux_atlas
Expand Down
1 change: 1 addition & 0 deletions examples/01_plotting/plot_surface_projection_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import matplotlib
import numpy as np
from matplotlib import pyplot as plt

from nilearn.plotting import show
from nilearn.surface import surface

Expand Down
3 changes: 2 additions & 1 deletion examples/02_decoding/plot_haxby_different_estimators.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,10 @@
# classifiers
import time

from nilearn.decoding import Decoder
from sklearn.model_selection import LeaveOneGroupOut

from nilearn.decoding import Decoder

cv = LeaveOneGroupOut()
classifiers_data = {}

Expand Down
3 changes: 2 additions & 1 deletion examples/02_decoding/plot_haxby_frem.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,10 @@
# ------------------------------------

import numpy as np
from nilearn import plotting
from sklearn.metrics import confusion_matrix

from nilearn import plotting

# Calculate the confusion matrix
matrix = confusion_matrix(
y_test,
Expand Down
4 changes: 3 additions & 1 deletion examples/02_decoding/plot_haxby_full_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,12 @@
# The classifier used here is a support vector classifier (svc). We use
# class:`nilearn.decoding.Decoder` and specify the classifier.
import numpy as np
from nilearn.decoding import Decoder

# Make a data splitting object for cross validation
from sklearn.model_selection import LeaveOneGroupOut

from nilearn.decoding import Decoder

cv = LeaveOneGroupOut()

##############################################################
Expand Down Expand Up @@ -126,6 +127,7 @@
# We make a simple bar plot to summarize the results
# --------------------------------------------------
import matplotlib.pyplot as plt

from nilearn.plotting import show

plt.figure()
Expand Down
4 changes: 3 additions & 1 deletion examples/02_decoding/plot_haxby_glm_decoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
# By default 2nd subject will be fetched
import numpy as np
import pandas as pd

from nilearn import datasets

haxby_dataset = datasets.fetch_haxby()
Expand Down Expand Up @@ -161,9 +162,10 @@
# corresponding conditions labels and session labels
# (for the cross validation).

from nilearn.decoding import Decoder
from sklearn.model_selection import LeaveOneGroupOut

from nilearn.decoding import Decoder

decoder = Decoder(
estimator="svc",
mask=haxby_dataset.mask,
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_haxby_grid_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@
# Plot the prediction scores using matplotlib
# -------------------------------------------
from matplotlib import pyplot as plt

from nilearn.plotting import show

plt.figure(figsize=(6, 4))
Expand Down
4 changes: 3 additions & 1 deletion examples/02_decoding/plot_haxby_multiclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import numpy as np
import pandas as pd

from nilearn import datasets

# By default 2nd subject from haxby datasets will be fetched.
Expand Down Expand Up @@ -117,9 +118,10 @@
# -----------------------
# We fit on the first 10 sessions and plot a confusion matrix on the
# last 2 sessions
from nilearn.plotting import plot_matrix, show
from sklearn.metrics import confusion_matrix

from nilearn.plotting import plot_matrix, show

svc_ovo.fit(X[session < 10], y[session < 10])
y_pred_ovo = svc_ovo.predict(X[session >= 10])

Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_haxby_searchlight.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# Load Haxby dataset
# ------------------
import pandas as pd

from nilearn import datasets
from nilearn.image import get_data, load_img, new_img_like

Expand Down
7 changes: 5 additions & 2 deletions examples/02_decoding/plot_haxby_searchlight_surface.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# Load Haxby dataset
# ------------------
import pandas as pd

from nilearn import datasets

# We fetch 2nd subject from haxby datasets (which is default)
Expand All @@ -36,9 +37,10 @@
#########################################################################
# Surface bold response
# ---------------------
from nilearn import datasets, surface
from sklearn import neighbors

from nilearn import datasets, surface

# Fetch a coarse surface of the left hemisphere only for speed
fsaverage = datasets.fetch_surf_fsaverage(mesh="fsaverage5")
hemi = "left"
Expand All @@ -62,12 +64,13 @@
#########################################################################
# Searchlight computation
# -----------------------
from nilearn.decoding.searchlight import search_light
from sklearn.linear_model import RidgeClassifier
from sklearn.model_selection import KFold
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler

from nilearn.decoding.searchlight import search_light

# Simple linear estimator preceded by a normalization step
estimator = make_pipeline(StandardScaler(), RidgeClassifier(alpha=10.0))

Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_haxby_stimuli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"""

import matplotlib.pyplot as plt

from nilearn import datasets
from nilearn.plotting import show

Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_miyawaki_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
# data, clean and mask it.

import numpy as np

from nilearn.maskers import MultiNiftiMasker

masker = MultiNiftiMasker(
Expand Down
2 changes: 2 additions & 0 deletions examples/02_decoding/plot_miyawaki_reconstruction.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
# Then we prepare and mask the data
# ---------------------------------
import numpy as np

from nilearn.maskers import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
Expand Down Expand Up @@ -295,6 +296,7 @@ def split_multi_scale(y, y_shape):
# ground truth

from matplotlib import pyplot as plt

from nilearn.plotting import show

for i in range(6):
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_oasis_vbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@

import matplotlib.pyplot as plt
import numpy as np

from nilearn import datasets
from nilearn.image import get_data
from nilearn.maskers import NiftiMasker
Expand Down
1 change: 1 addition & 0 deletions examples/02_decoding/plot_oasis_vbm_space_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# Load the Oasis VBM dataset
# --------------------------
import numpy as np

from nilearn import datasets

n_subjects = 200 # increase this number if you have more RAM on your box
Expand Down
7 changes: 4 additions & 3 deletions examples/02_decoding/plot_simulated_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@

import matplotlib.pyplot as plt
import nibabel
import nilearn.masking
import numpy as np
from nilearn import decoding
from nilearn.plotting import show
from scipy import linalg
from scipy.ndimage import gaussian_filter
from sklearn import linear_model, svm
Expand All @@ -49,6 +46,10 @@
from sklearn.preprocessing import StandardScaler
from sklearn.utils import check_random_state

import nilearn.masking
from nilearn import decoding
from nilearn.plotting import show


##############################################################################
# A function to generate data
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_data_driven_parcellations.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import patches, ticker

from nilearn import datasets, plotting
from nilearn.image import get_data, index_img, mean_img
from nilearn.regions import Parcellations
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_multi_subject_connectome.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

"""
import numpy as np

from nilearn import plotting

n_subjects = 4 # subjects to consider for group-sparse covariance (max: 40)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@

# Display the correlation matrix
import numpy as np

from nilearn import plotting

# Mask out the major diagonal
Expand Down