From 31a23f520bcbcd4fbf4da7c2e4425d7300e8352f Mon Sep 17 00:00:00 2001 From: Alexandros Koumparoulis <153118171+akoumpa@users.noreply.github.com> Date: Mon, 20 May 2024 12:42:16 -0700 Subject: [PATCH] fix import (#9240) * fix import Signed-off-by: Alexandros Koumparoulis * Apply isort and black reformatting Signed-off-by: akoumpa --------- Signed-off-by: Alexandros Koumparoulis Signed-off-by: akoumpa Co-authored-by: akoumpa --- .../nlp/language_modeling/megatron_lm_ckpt_to_nemo.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py b/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py index 03d6fd94e4e28..72252a03d5be6 100644 --- a/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py +++ b/examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py @@ -291,9 +291,9 @@ def load_from_checkpoint( **kwargs, ): """ - Loads Megatron_LM checkpoints, convert it, with some maintenance of restoration. - For documentation, please refer to LightningModule.load_from_checkpoin() documentation. - """ + Loads Megatron_LM checkpoints, convert it, with some maintenance of restoration. + For documentation, please refer to LightningModule.load_from_checkpoin() documentation. + """ checkpoint = None try: cls._set_model_restore_state(is_being_restored=True) @@ -470,7 +470,7 @@ def convert(local_rank, rank, world_size, args): ) if mcore_output and not args.mcore_input: # convert from legacy Megatron-LM to MCore NeMo. Initialize an mcore translation dict - from scripts.nlp_language_modeling.convert_nemo_gpt_to_mcore import build_key_mapping + from scripts.checkpoint_converters.convert_gpt_nemo_to_mcore import build_key_mapping mcore_translate = {} for k, v in build_key_mapping(model_cfg).items():