Skip to content

Commit

Permalink
small fixes to layerwise gradient scaler (#910)
Browse files Browse the repository at this point in the history
  • Loading branch information
Anupam Bhatnagar committed Jan 14, 2022
1 parent 39e7821 commit 10d21b3
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 1 addition & 1 deletion fairscale/optim/layerwise_gradient_scaler.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def unscale(self) -> None:
layers_with_finite_values = self._get_layers_with_finite_values()
for item in layers_with_finite_values:
for param_name, param in item.layer.named_parameters():
if hasattr(param, "grad"):
if hasattr(param, "grad") and param.grad is not None:
logging.debug("%s scaling down %s by %s" % (item.layer_name, param_name, 1.0 / item.scaling_factor))
param.grad.mul_(1.0 / item.scaling_factor)

Expand Down
2 changes: 2 additions & 0 deletions tests/optim/test_layerwise_gradient_scaler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import os
from typing import Any, List, Tuple, Union

import numpy as np
Expand Down Expand Up @@ -200,6 +201,7 @@ def test_vision_model() -> None:
# Remove randomness from various sources while testing.
torch.use_deterministic_algorithms(True) # type: ignore
# set environment variable in CircleCI for test to pass: CUBLAS_WORKSPACE_CONFIG = :4096:8
os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":4096:8"

m1 = SimpleConvNet()
m2 = SimpleConvNet()
Expand Down

0 comments on commit 10d21b3

Please sign in to comment.