Skip to content

Commit

Permalink
Apply isort and black reformatting
Browse files Browse the repository at this point in the history
Signed-off-by: huvunvidia <huvunvidia@users.noreply.github.com>
  • Loading branch information
huvunvidia committed May 10, 2024
1 parent 4032726 commit d8dc8c9
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 3 deletions.
10 changes: 8 additions & 2 deletions examples/nlp/language_modeling/megatron_retro_eval_legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,10 @@ def main(cfg) -> None:
save_restore_connector.model_extracted_dir = model_path

model_cfg = MegatronRetrievalModel.restore_from(
model_path, trainer=trainer, return_config=True, save_restore_connector=save_restore_connector,
model_path,
trainer=trainer,
return_config=True,
save_restore_connector=save_restore_connector,
)

with open_dict(model_cfg):
Expand All @@ -97,7 +100,10 @@ def main(cfg) -> None:
cfg.pipeline_model_parallel_split_rank = model_cfg.get('pipeline_model_parallel_split_rank', 0)

model = MegatronRetrievalModel.restore_from(
model_path, trainer=trainer, save_restore_connector=save_restore_connector, override_config_path=model_cfg,
model_path,
trainer=trainer,
save_restore_connector=save_restore_connector,
override_config_path=model_cfg,
)

length_params: LengthParam = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def main(cfg) -> None:
scaler = None
if cfg.trainer.precision in [16, '16', '16-mixed']:
scaler = GradScaler(
init_scale=cfg.model.get('native_amp_init_scale', 2 ** 32),
init_scale=cfg.model.get('native_amp_init_scale', 2**32),
growth_interval=cfg.model.get('native_amp_growth_interval', 1000),
hysteresis=cfg.model.get('hysteresis', 2),
)
Expand Down

0 comments on commit d8dc8c9

Please sign in to comment.