diff --git a/train.py b/train.py index e93917bbc..eb45b4247 100644 --- a/train.py +++ b/train.py @@ -71,7 +71,7 @@ def prepare_directories_and_logger(output_directory, log_directory, rank): def load_model(hparams): - model = Tacotron2(hparams).cuda() + model = Tacotron2(hparams).cuda() if torch.cuda.is_available() else Tacotron2(hparams) if hparams.fp16_run: model.decoder.attention_layer.score_mask_value = finfo('float16').min diff --git a/utils.py b/utils.py index c843d95d6..39c12d96e 100644 --- a/utils.py +++ b/utils.py @@ -5,7 +5,10 @@ def get_mask_from_lengths(lengths): max_len = torch.max(lengths).item() - ids = torch.arange(0, max_len, out=torch.cuda.LongTensor(max_len)) + if torch.cuda.is_available(): + ids = torch.arange(0, max_len, out=torch.cuda.LongTensor(max_len)) + else: + ids = torch.arange(0, max_len, out=torch.LongTensor(max_len)) mask = (ids < lengths.unsqueeze(1)).byte() return mask