Skip to content

Commit

Permalink
Apply isort and black reformatting
Browse files Browse the repository at this point in the history
Signed-off-by: akoumpa <akoumpa@users.noreply.github.com>
  • Loading branch information
akoumpa committed May 15, 2024
1 parent b1df810 commit dcd692e
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
1 change: 1 addition & 0 deletions nemo/collections/nlp/parts/nlp_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@

from nemo.core.optim.distributed_adam import MegatronDistributedFusedAdam
from nemo.core.optim.mcore_optim import McoreDistributedOptimizer

HAVE_APEX = True

except (ImportError, ModuleNotFoundError):
Expand Down
3 changes: 2 additions & 1 deletion nemo/core/optim/mcore_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def load_state_dict(self, state_dict):

def sharded_state_dict(self, model_sharded_state_dict, optimizer_state_dict=None):
return self.mcore_optimizer.sharded_state_dict(
model_sharded_state_dict, is_loading=False, sharding_type='dp_zero_gather_scatter')
model_sharded_state_dict, is_loading=False, sharding_type='dp_zero_gather_scatter'
)

def step(self, closure):
"""Clip gradients (if needed) and step the base optimizer.
Expand Down

0 comments on commit dcd692e

Please sign in to comment.