Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change accelerator to 'auto' in nlp_checkpoint_port.py #7747

Merged
merged 5 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions nemo/core/config/pytorch_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class TrainerConfig:
limit_test_batches: Any = 1.0
val_check_interval: Any = 1.0
log_every_n_steps: int = 50
accelerator: Optional[str] = None
accelerator: Optional[str] = 'auto'
sync_batchnorm: bool = False
precision: Any = 32
num_sanity_val_steps: int = 2
Expand All @@ -68,8 +68,8 @@ class TrainerConfig:
gradient_clip_algorithm: str = 'norm'
max_time: Optional[Any] = None # can be one of Union[str, timedelta, Dict[str, int], None]
reload_dataloaders_every_n_epochs: int = 0
devices: Any = None
strategy: Any = None
devices: Any = 'auto'
strategy: Any = 'auto'
enable_checkpointing: bool = False
enable_model_summary: bool = True
inference_mode: bool = True
Expand Down
4 changes: 3 additions & 1 deletion scripts/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import sys

import torch
from omegaconf import OmegaConf
from pytorch_lightning import Trainer

import nemo
Expand Down Expand Up @@ -103,7 +104,8 @@ def nemo_export(argv):
logger=False,
enable_checkpointing=False,
)
trainer = Trainer(cfg_trainer)
cfg_trainer = OmegaConf.to_container(OmegaConf.create(cfg_trainer))
trainer = Trainer(**cfg_trainer)

logging.info("Restoring NeMo model from '{}'".format(nemo_in))
try:
Expand Down
5 changes: 3 additions & 2 deletions scripts/nemo_legacy_import/nlp_checkpoint_port.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,13 +82,14 @@ def nemo_convert(argv):
# Create a PL trainer object which is required for restoring Megatron models
cfg_trainer = TrainerConfig(
devices=1,
accelerator="ddp",
accelerator='auto',
num_nodes=1,
# Need to set the following two to False as ExpManager will take care of them differently.
logger=False,
enable_checkpointing=False,
)
trainer = pl.Trainer(cfg_trainer)
cfg_trainer = OmegaConf.to_container(OmegaConf.create(cfg_trainer))
trainer = pl.Trainer(**cfg_trainer)

logging.info("Restoring NeMo model from '{}'".format(nemo_in))
try:
Expand Down
Loading