Skip to content

Commit

Permalink
Remove numpy dependencies in src/lightning/pytorch (#19841)
Browse files Browse the repository at this point in the history
  • Loading branch information
Peiffap committed Jun 4, 2024
1 parent bac82b8 commit 785f15d
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
6 changes: 3 additions & 3 deletions src/lightning/pytorch/loggers/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@

import functools
import operator
import statistics
from abc import ABC
from collections import defaultdict
from typing import Any, Callable, Dict, Mapping, Optional, Sequence

import numpy as np
from typing_extensions import override

from lightning.fabric.loggers import Logger as FabricLogger
Expand Down Expand Up @@ -100,7 +100,7 @@ def method(*args: Any, **kwargs: Any) -> None:
def merge_dicts( # pragma: no cover
dicts: Sequence[Mapping],
agg_key_funcs: Optional[Mapping] = None,
default_func: Callable[[Sequence[float]], float] = np.mean,
default_func: Callable[[Sequence[float]], float] = statistics.mean,
) -> Dict:
"""Merge a sequence with dictionaries into one dictionary by aggregating the same keys with some given function.
Expand All @@ -126,7 +126,7 @@ def merge_dicts( # pragma: no cover
>>> d2 = {'a': 1.1, 'b': 2.2, 'v': 1, 'd': {'d1': 2, 'd2': 3}}
>>> d3 = {'a': 1.1, 'v': 2.3, 'd': {'d3': 3, 'd4': {'d5': 1}}}
>>> dflt_func = min
>>> agg_funcs = {'a': np.mean, 'v': max, 'd': {'d1': sum}}
>>> agg_funcs = {'a': statistics.mean, 'v': max, 'd': {'d1': sum}}
>>> pprint.pprint(merge_dicts([d1, d2, d3], agg_funcs, dflt_func))
{'a': 1.3,
'b': 2.0,
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/tuner/lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def suggestion(self, skip_begin: int = 10, skip_end: int = 1) -> Optional[float]
losses = losses[torch.isfinite(losses)]

if len(losses) < 2:
# computing np.gradient requires at least 2 points
# computing torch.gradient requires at least 2 points
log.error(
"Failed to compute suggestion for learning rate because there are not enough points. Increase the loop"
" iteration limits or the size of your dataset/dataloader."
Expand Down
8 changes: 4 additions & 4 deletions src/lightning/pytorch/utilities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.
"""General utilities."""

import numpy
import torch

from lightning.fabric.utilities import (
LightningEnum,
Expand Down Expand Up @@ -55,6 +55,6 @@
"suggested_max_num_workers",
]

FLOAT16_EPSILON = numpy.finfo(numpy.float16).eps
FLOAT32_EPSILON = numpy.finfo(numpy.float32).eps
FLOAT64_EPSILON = numpy.finfo(numpy.float64).eps
FLOAT16_EPSILON = torch.finfo(torch.float16).eps
FLOAT32_EPSILON = torch.finfo(torch.float32).eps
FLOAT64_EPSILON = torch.finfo(torch.float64).eps

0 comments on commit 785f15d

Please sign in to comment.