Skip to content

Commit

Permalink
typing: ignore some cases (#21)
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda committed Jul 3, 2023
1 parent 8b15a8a commit f5f43d7
Showing 1 changed file with 8 additions and 6 deletions.
14 changes: 8 additions & 6 deletions src/lightning_hivemind/strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@
from lightning.pytorch.utilities.model_helpers import is_overridden
from lightning.pytorch.utilities.rank_zero import rank_zero_warn
elif module_available("pytorch_lightning") and module_available("lightning_fabric"):
from lightning_fabric.strategies.strategy import TBroadcast
from lightning_fabric.utilities.types import LRScheduler, ReduceLROnPlateau
from pytorch_lightning import Trainer
from pytorch_lightning.strategies import Strategy
from lightning_fabric.strategies.strategy import TBroadcast # type: ignore[no-redef]
from lightning_fabric.utilities.types import LRScheduler, ReduceLROnPlateau # type: ignore[no-redef]
from pytorch_lightning import Trainer # type: ignore[assignment]
from pytorch_lightning.strategies import Strategy # type: ignore[assignment]
from pytorch_lightning.utilities.data import extract_batch_size
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.model_helpers import is_overridden
Expand Down Expand Up @@ -267,7 +267,7 @@ def _disable_zero_grad(self) -> None:
" as this would delete the gradients before they are averaged."
)
assert lightning_module is not None
lightning_module.optimizer_zero_grad = None
lightning_module.optimizer_zero_grad = None # type: ignore[method-assign,assignment]

def _wrap_schedulers(self, opt: "hivemind.Optimizer") -> None:
# wrap schedulers so that they only update when the hivemind optimizer updates
Expand Down Expand Up @@ -315,7 +315,9 @@ def broadcast(self, obj: TBroadcast, src: int = 0) -> TBroadcast:
def teardown(self) -> None:
if self._optimizer_zero_grad_original is not None and self.lightning_module is not None:
# re-enable `optimizer_zero_grad`
self.lightning_module.optimizer_zero_grad = self._optimizer_zero_grad_original
self.lightning_module.optimizer_zero_grad = ( # type: ignore[method-assign]
self._optimizer_zero_grad_original
)

if self._opt:
self._opt.shutdown()
Expand Down

0 comments on commit f5f43d7

Please sign in to comment.