From 2c2d7efcda0b39f7ce32e444c8dcd3100f8ceff5 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 17:26:48 -0700 Subject: [PATCH] remove assertion (#8302) (#8321) Signed-off-by: dimapihtar Co-authored-by: Dmytro Pykhtar <37850217+dimapihtar@users.noreply.github.com> --- .../nlp/models/language_modeling/megatron_gpt_model.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py index 3bdc1182dda31..0a9c65be42ab7 100644 --- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py +++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py @@ -1303,11 +1303,6 @@ def setup(self, stage=None): self.init_global_step = self.trainer.global_step if self.rampup_batch_size: - optimizer = self.cfg.optim.get('name', None) - assert ( - optimizer == 'fused_adam' - ), f'{optimizer} optimizer is not supported yet with rampup batch size. Please, use fused_adam optimizer instead.' - num_microbatch_calculator = apex.transformer.pipeline_parallel.utils._GLOBAL_NUM_MICROBATCHES_CALCULATOR num_microbatch_calculator.update(self.init_consumed_samples, consistency_check=False) self.prev_consumed_samples = self.init_consumed_samples