Skip to content

Commit

Permalink
Add isort to pre-commit hooks, package resorting (#4647)
Browse files Browse the repository at this point in the history
  • Loading branch information
charlesbluca committed Mar 31, 2021
1 parent bb067a1 commit 20a55e9
Show file tree
Hide file tree
Showing 193 changed files with 1,187 additions and 1,057 deletions.
2 changes: 1 addition & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
@@ -1,3 +1,3 @@
- [ ] Closes #xxxx
- [ ] Tests added / passed
- [ ] Passes `black distributed` / `flake8 distributed`
- [ ] Passes `black distributed` / `flake8 distributed` / `isort distributed`
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
@@ -1,4 +1,9 @@
repos:
- repo: https://github.com/pycqa/isort
rev: 5.8.0
hooks:
- id: isort
language_version: python3
- repo: https://github.com/psf/black
rev: 20.8b1
hooks:
Expand Down
1 change: 0 additions & 1 deletion conftest.py
@@ -1,7 +1,6 @@
# https://pytest.org/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option
import pytest


# Uncomment to enable more logging and checks
# (https://docs.python.org/3/library/asyncio-dev.html)
# Note this makes things slower and might consume much memory.
Expand Down
30 changes: 15 additions & 15 deletions distributed/__init__.py
@@ -1,42 +1,42 @@
from . import config
from . import config # isort:skip; load distributed configuration first
import dask
from dask.config import config

from ._version import get_versions
from .actor import Actor, ActorFuture
from .core import connect, rpc, Status
from .deploy import LocalCluster, Adaptive, SpecCluster, SSHCluster
from .diagnostics.progressbar import progress
from .diagnostics.plugin import WorkerPlugin, SchedulerPlugin, PipInstall
from .client import (
Client,
Executor,
CompatibleExecutor,
wait,
Executor,
Future,
as_completed,
default_client,
fire_and_forget,
Future,
futures_of,
get_task_metadata,
get_task_stream,
performance_report,
get_task_metadata,
wait,
)
from .core import Status, connect, rpc
from .deploy import Adaptive, LocalCluster, SpecCluster, SSHCluster
from .diagnostics.plugin import PipInstall, SchedulerPlugin, WorkerPlugin
from .diagnostics.progressbar import progress
from .event import Event
from .lock import Lock
from .multi_lock import MultiLock
from .nanny import Nanny
from .pubsub import Pub, Sub
from .queues import Queue
from .scheduler import Scheduler
from .security import Security
from .semaphore import Semaphore
from .event import Event
from .scheduler import Scheduler
from .threadpoolexecutor import rejoin
from .utils import sync, TimeoutError, CancelledError
from .utils import CancelledError, TimeoutError, sync
from .variable import Variable
from .worker import Worker, get_worker, get_client, secede, Reschedule
from .worker import Reschedule, Worker, get_client, get_worker, secede
from .worker_client import local_client, worker_client

from ._version import get_versions

versions = get_versions()
__version__ = versions["version"]
__git_revision__ = versions["full-revisionid"]
Expand Down
5 changes: 3 additions & 2 deletions distributed/_concurrent_futures_thread.py
Expand Up @@ -8,16 +8,17 @@
__author__ = "Brian Quinlan (brian@sweetapp.com)"

import atexit
from concurrent.futures import _base
import itertools
from concurrent.futures import _base

try:
import queue
except ImportError:
import Queue as queue

import os
import threading
import weakref
import os

# Workers are created as daemon threads. This is done to allow the interpreter
# to exit when there are still idle threads in a ThreadPoolExecutor's thread
Expand Down
12 changes: 5 additions & 7 deletions distributed/_ipython_utils.py
Expand Up @@ -12,19 +12,17 @@
except ImportError:
# Python 2
import Queue as queue
from subprocess import Popen

import sys
from threading import Thread
from subprocess import Popen
from threading import Event, Thread
from uuid import uuid4

from tornado.gen import TimeoutError
from tornado.ioloop import IOLoop
from threading import Event

from IPython import get_ipython
from jupyter_client import BlockingKernelClient, write_connection_file
from jupyter_core.paths import jupyter_runtime_dir

from tornado.gen import TimeoutError
from tornado.ioloop import IOLoop

OUTPUT_TIMEOUT = 10

Expand Down
2 changes: 1 addition & 1 deletion distributed/actor.py
Expand Up @@ -5,7 +5,7 @@

from .client import Future, default_client
from .protocol import to_serialize
from .utils import iscoroutinefunction, thread_state, sync
from .utils import iscoroutinefunction, sync, thread_state
from .utils_comm import WrappedKey
from .worker import get_worker

Expand Down
6 changes: 3 additions & 3 deletions distributed/batched.py
@@ -1,14 +1,14 @@
from collections import deque
import logging
from collections import deque

import dask
from tornado import gen, locks
from tornado.ioloop import IOLoop

import dask

from .core import CommClosedError
from .utils import parse_timedelta


logger = logging.getLogger(__name__)


Expand Down
3 changes: 1 addition & 2 deletions distributed/cfexecutor.py
Expand Up @@ -2,11 +2,10 @@
import weakref

from tlz import merge

from tornado import gen

from .metrics import time
from .utils import sync, TimeoutError, parse_timedelta
from .utils import TimeoutError, parse_timedelta, sync


@gen.coroutine
Expand Down
5 changes: 2 additions & 3 deletions distributed/cli/dask_scheduler.py
@@ -1,18 +1,17 @@
import atexit
import logging
import gc
import logging
import os
import re
import sys
import warnings

import click

from tornado.ioloop import IOLoop

from distributed import Scheduler
from distributed.preloading import validate_preload_argv
from distributed.cli.utils import check_python_3, install_signal_handlers
from distributed.preloading import validate_preload_argv
from distributed.proctitle import (
enable_proctitle_on_children,
enable_proctitle_on_current,
Expand Down
3 changes: 2 additions & 1 deletion distributed/cli/dask_spec.py
@@ -1,7 +1,8 @@
import asyncio
import click
import json
import sys

import click
import yaml

from distributed.deploy.spec import run_spec
Expand Down
2 changes: 1 addition & 1 deletion distributed/cli/dask_ssh.py
@@ -1,7 +1,7 @@
from distributed.deploy.old_ssh import SSHCluster
import click

from distributed.cli.utils import check_python_3
from distributed.deploy.old_ssh import SSHCluster


@click.command(
Expand Down
11 changes: 6 additions & 5 deletions distributed/cli/dask_worker.py
@@ -1,16 +1,20 @@
import asyncio
import atexit
from contextlib import suppress
import logging
import gc
import logging
import os
import signal
import sys
import warnings
from contextlib import suppress

import click
from tlz import valmap
from tornado.ioloop import IOLoop, TimeoutError

import dask
from dask.system import CPU_COUNT

from distributed import Nanny
from distributed.cli.utils import check_python_3, install_signal_handlers
from distributed.comm import get_address_host_port
Expand All @@ -22,9 +26,6 @@
)
from distributed.utils import import_term

from tlz import valmap
from tornado.ioloop import IOLoop, TimeoutError

logger = logging.getLogger("distributed.dask_worker")


Expand Down
14 changes: 7 additions & 7 deletions distributed/cli/tests/test_dask_scheduler.py
Expand Up @@ -3,26 +3,26 @@
pytest.importorskip("requests")

import os
import requests
import socket
import shutil
import socket
import sys
import tempfile
from time import sleep

import requests
from click.testing import CliRunner

import distributed
from distributed import Scheduler, Client
import distributed.cli.dask_scheduler
from distributed import Client, Scheduler
from distributed.metrics import time
from distributed.utils import get_ip, get_ip_interface, tmpfile
from distributed.utils_test import loop # noqa: F401
from distributed.utils_test import (
popen,
assert_can_connect_from_everywhere_4_6,
assert_can_connect_locally_4,
popen,
)
from distributed.utils_test import loop # noqa: F401
from distributed.metrics import time
import distributed.cli.dask_scheduler


def test_defaults(loop):
Expand Down
5 changes: 3 additions & 2 deletions distributed/cli/tests/test_dask_spec.py
@@ -1,10 +1,11 @@
import pytest
import sys

import pytest
import yaml

from distributed import Client
from distributed.utils_test import popen
from distributed.utils_test import cleanup # noqa: F401
from distributed.utils_test import popen


@pytest.mark.asyncio
Expand Down
1 change: 1 addition & 0 deletions distributed/cli/tests/test_dask_ssh.py
@@ -1,4 +1,5 @@
from click.testing import CliRunner

from distributed.cli.dask_ssh import main


Expand Down
19 changes: 13 additions & 6 deletions distributed/cli/tests/test_dask_worker.py
@@ -1,22 +1,29 @@
import asyncio

import pytest
from click.testing import CliRunner

pytest.importorskip("requests")

import requests
import sys
import os
from time import sleep
import sys
from multiprocessing import cpu_count
from time import sleep

import requests

import distributed.cli.dask_worker
from distributed import Client, Scheduler
from distributed.deploy.utils import nprocesses_nthreads
from distributed.metrics import time
from distributed.utils import sync, tmpfile, parse_ports
from distributed.utils_test import popen, terminate_process, wait_for_port
from distributed.utils_test import loop, cleanup # noqa: F401
from distributed.utils import parse_ports, sync, tmpfile
from distributed.utils_test import ( # noqa: F401
cleanup,
loop,
popen,
terminate_process,
wait_for_port,
)


def test_nanny_worker_ports(loop):
Expand Down
9 changes: 4 additions & 5 deletions distributed/cli/tests/test_tls_cli.py
@@ -1,16 +1,15 @@
from time import sleep

from distributed import Client
from distributed.metrics import time
from distributed.utils_test import loop # noqa: F401
from distributed.utils_test import (
popen,
get_cert,
new_config_file,
tls_security,
popen,
tls_only_config,
tls_security,
)
from distributed.utils_test import loop # noqa: F401
from distributed.metrics import time


ca_file = get_cert("tls-ca-cert.pem")
cert = get_cert("tls-cert.pem")
Expand Down
1 change: 0 additions & 1 deletion distributed/cli/utils.py
@@ -1,6 +1,5 @@
from tornado.ioloop import IOLoop


py3_err_msg = """
Warning: Your terminal does not set locales.
Expand Down

0 comments on commit 20a55e9

Please sign in to comment.