From a8c8c7d42c710027fb1ecadb345d8193cd66232f Mon Sep 17 00:00:00 2001 From: Sergii Dymchenko Date: Thu, 1 Aug 2024 16:57:53 -0700 Subject: [PATCH] Use weights_only for loading --- sample.py | 2 +- train.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sample.py b/sample.py index d25d6e0861..ab664ec239 100644 --- a/sample.py +++ b/sample.py @@ -35,7 +35,7 @@ if init_from == 'resume': # init from a model saved in a specific directory ckpt_path = os.path.join(out_dir, 'ckpt.pt') - checkpoint = torch.load(ckpt_path, map_location=device) + checkpoint = torch.load(ckpt_path, map_location=device, weights_only=True) gptconf = GPTConfig(**checkpoint['model_args']) model = GPT(gptconf) state_dict = checkpoint['model'] diff --git a/train.py b/train.py index 951bda9914..3a58d97f4f 100644 --- a/train.py +++ b/train.py @@ -159,7 +159,7 @@ def get_batch(split): print(f"Resuming training from {out_dir}") # resume training from a checkpoint. ckpt_path = os.path.join(out_dir, 'ckpt.pt') - checkpoint = torch.load(ckpt_path, map_location=device) + checkpoint = torch.load(ckpt_path, map_location=device, weights_only=True) checkpoint_model_args = checkpoint['model_args'] # force these config attributes to be equal otherwise we can't even resume training # the rest of the attributes (e.g. dropout) can stay as desired from command line