Skip to content

Commit

Permalink
Only really import packages when needed (#410)
Browse files Browse the repository at this point in the history
* Only really import packages when needed

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
basnijholt and pre-commit-ci[bot] committed Apr 10, 2023
1 parent 815fd31 commit 295516b
Show file tree
Hide file tree
Showing 10 changed files with 63 additions and 48 deletions.
2 changes: 1 addition & 1 deletion adaptive/learner/triangulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
ones,
square,
subtract,
zeros,
)
from numpy import sum as np_sum
from numpy import zeros
from numpy.linalg import det as ndet
from numpy.linalg import matrix_rank, norm, slogdet, solve

Expand Down
57 changes: 32 additions & 25 deletions adaptive/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import warnings
from contextlib import suppress
from datetime import datetime, timedelta
from importlib.util import find_spec
from typing import TYPE_CHECKING, Any, Callable, Union

import loky
Expand Down Expand Up @@ -49,35 +50,32 @@
except ImportError:
from typing_extensions import Literal

try:
import ipyparallel
from ipyparallel.client.asyncresult import AsyncResult

with_ipyparallel = True
ExecutorTypes: TypeAlias = Union[
ExecutorTypes, ipyparallel.Client, ipyparallel.client.view.ViewExecutor
]
FutureTypes: TypeAlias = Union[FutureTypes, AsyncResult]
except ModuleNotFoundError:
with_ipyparallel = False
with_ipyparallel = find_spec("ipyparallel") is not None
with_distributed = find_spec("distributed") is not None
with_mpi4py = find_spec("mpi4py") is not None

try:
import distributed
if TYPE_CHECKING:
if with_distributed:
import distributed

with_distributed = True
ExecutorTypes: TypeAlias = Union[
ExecutorTypes, distributed.Client, distributed.cfexecutor.ClientExecutor
]
except ModuleNotFoundError:
with_distributed = False
ExecutorTypes: TypeAlias = Union[
ExecutorTypes, distributed.Client, distributed.cfexecutor.ClientExecutor
]

try:
import mpi4py.futures
if with_mpi4py:
import mpi4py.futures

with_mpi4py = True
ExecutorTypes: TypeAlias = Union[ExecutorTypes, mpi4py.futures.MPIPoolExecutor]
except ModuleNotFoundError:
with_mpi4py = False
ExecutorTypes: TypeAlias = Union[ExecutorTypes, mpi4py.futures.MPIPoolExecutor]

if with_ipyparallel:
import ipyparallel
from ipyparallel.client.asyncresult import AsyncResult

ExecutorTypes: TypeAlias = Union[
ExecutorTypes, ipyparallel.Client, ipyparallel.client.view.ViewExecutor
]
FutureTypes: TypeAlias = Union[FutureTypes, AsyncResult]

with suppress(ModuleNotFoundError):
import uvloop
Expand Down Expand Up @@ -934,9 +932,12 @@ def replay_log(


def _ensure_executor(executor: ExecutorTypes | None) -> concurrent.Executor:
if with_ipyparallel:
import ipyparallel
if with_distributed:
import distributed
if executor is None:
executor = _default_executor()

if isinstance(executor, concurrent.Executor):
return executor
elif with_ipyparallel and isinstance(executor, ipyparallel.Client):
Expand All @@ -955,6 +956,12 @@ def _get_ncores(
ex: (ExecutorTypes),
) -> int:
"""Return the maximum number of cores that an executor can use."""
if with_ipyparallel:
import ipyparallel
if with_distributed:
import distributed
if with_mpi4py:
import mpi4py.futures
if with_ipyparallel and isinstance(ex, ipyparallel.client.view.ViewExecutor):
return len(ex.view)
elif isinstance(
Expand Down
2 changes: 1 addition & 1 deletion docs/logo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@

import holoviews
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.tri as mtri
import numpy as np
from PIL import Image, ImageDraw

sys.path.insert(0, os.path.abspath("..")) # to get adaptive on the path
Expand Down
6 changes: 4 additions & 2 deletions docs/source/algorithms_and_examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,13 @@ Click on the *Play* {fa}`play` button or move the sliders.
:tags: [hide-cell]
import itertools
import adaptive
from adaptive.learner.learner1D import uniform_loss, default_loss
import holoviews as hv
import numpy as np
import adaptive
from adaptive.learner.learner1D import default_loss, uniform_loss
adaptive.notebook_extension()
hv.output(holomap="scrubber")
```
Expand Down
14 changes: 8 additions & 6 deletions docs/source/tutorial/tutorial.AverageLearner1D.md
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
---
kernelspec:
name: python3
display_name: python3
jupytext:
text_representation:
extension: .md
format_name: myst
format_version: '0.13'
jupytext_version: 1.13.8
format_version: 0.13
jupytext_version: 1.14.5
kernelspec:
display_name: python3
name: python3
---

# Tutorial {class}`~adaptive.AverageLearner1D`

```{note}
Expand All @@ -23,9 +24,10 @@ import adaptive
adaptive.notebook_extension()
from functools import partial
import holoviews as hv
import numpy as np
from functools import partial
```

## General use
Expand Down
5 changes: 3 additions & 2 deletions docs/source/tutorial/tutorial.BalancingLearner.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,11 @@ import adaptive
adaptive.notebook_extension()
import random
from functools import partial
import holoviews as hv
import numpy as np
from functools import partial
import random
```

The balancing learner is a “meta-learner” that takes a list of learners.
Expand Down
9 changes: 5 additions & 4 deletions docs/source/tutorial/tutorial.Learner1D.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ import adaptive
adaptive.notebook_extension()
import numpy as np
from functools import partial
import random
from functools import partial
import numpy as np
```

## scalar output: `f:ℝ → ℝ`
Expand All @@ -41,8 +42,8 @@ offset = random.uniform(-0.5, 0.5)
def f(x, offset=offset, wait=True):
from time import sleep
from random import random
from time import sleep
a = 0.01
if wait:
Expand Down Expand Up @@ -155,8 +156,8 @@ To do this, you need to tell the learner to look at the curvature by specifying
```{code-cell} ipython3
from adaptive.learner.learner1D import (
curvature_loss_function,
uniform_loss,
default_loss,
uniform_loss,
)
curvature_loss = curvature_loss_function()
Expand Down
10 changes: 6 additions & 4 deletions docs/source/tutorial/tutorial.Learner2D.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,12 @@ Download the notebook in order to see the real behaviour. [^download]
```{code-cell} ipython3
:tags: [hide-cell]
import adaptive
from functools import partial
import holoviews as hv
import numpy as np
from functools import partial
import adaptive
adaptive.notebook_extension()
```
Expand All @@ -33,9 +34,10 @@ Besides 1D functions, we can also learn 2D functions: $f: ℝ^2 → ℝ$.

```{code-cell} ipython3
def ring(xy, wait=True):
import numpy as np
from time import sleep
from random import random
from time import sleep
import numpy as np
if wait:
sleep(random() / 10)
Expand Down
4 changes: 2 additions & 2 deletions docs/source/tutorial/tutorial.custom_loss.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ import adaptive
adaptive.notebook_extension()
# Import modules that are used in multiple cells
import numpy as np
import holoviews as hv
import numpy as np
```

{class}`~adaptive.Learner1D` and {class}`~adaptive.Learner2D` both work on the principle of subdividing their domain into subdomains, and assigning a property to each subdomain, which we call the *loss*.
Expand Down Expand Up @@ -137,7 +137,7 @@ def resolution_loss_function(min_distance=0, max_distance=1):
because the total area is normalized to 1."""
def resolution_loss(ip):
from adaptive.learner.learner2D import default_loss, areas
from adaptive.learner.learner2D import areas, default_loss
loss = default_loss(ip)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ python_version = "3.7"
[tool.ruff]
line-length = 150
target-version = "py37"
select = ["B", "C", "E", "F", "W", "T", "B9"]
select = ["B", "C", "E", "F", "W", "T", "B9", "I"]
ignore = [
"T20", # flake8-print
"ANN101", # Missing type annotation for {name} in method
Expand Down

0 comments on commit 295516b

Please sign in to comment.