Skip to content

Commit

Permalink
[minor] fixing exception on the error path (#984)
Browse files Browse the repository at this point in the history
Co-authored-by: Min Xu <min.xu.public@gmail.com>
  • Loading branch information
min-xu-ai and flying-x committed May 12, 2022
1 parent ae8a0bc commit b33e705
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion fairscale/optim/adascale.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,10 @@ def __init__(
num_gradients_to_accumulate: int = 1,
debias_ewma: bool = True,
):
# Init hook_handles list, otherwise, a partial init'ed object may fail in ``__del__``.
self._hook_handles: List[Any] = []

# Init other fields.
self._optimizer = optimizer
self._local_grad_sqr: Optional[torch.Tensor] = None
self._world_size: int = (
Expand Down Expand Up @@ -183,7 +187,7 @@ def __init__(
self._scale = 1.0 # Assign to inform mypy about the typing of this variable.
self.set_scale(self._world_size * self._num_grads_to_accum if scale is None else scale)

self._hook_handles: List[Any] = []
# Safer to register hooks after all init actions are done.
self._hook()

def _hook(self) -> None:
Expand Down

0 comments on commit b33e705

Please sign in to comment.