From bdf478f0347e4f4f7499ec83c5cbab38060817ee Mon Sep 17 00:00:00 2001 From: Mark <75219117+krammnic@users.noreply.github.com> Date: Tue, 14 Jan 2025 23:54:47 +0300 Subject: [PATCH] Update torchtune/models/phi4/_model_builders.py Co-authored-by: Felipe Mello --- torchtune/models/phi4/_model_builders.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/torchtune/models/phi4/_model_builders.py b/torchtune/models/phi4/_model_builders.py index 2e1c0c3b9a..81fcb4e46b 100644 --- a/torchtune/models/phi4/_model_builders.py +++ b/torchtune/models/phi4/_model_builders.py @@ -75,9 +75,7 @@ def lora_phi4_mini( """ Builder for creating a Phi4 (14b) model with LoRA enabled. - The Phi4 defaults are the same as in :func:`~torchtune.models.phi4.phi4_mini`, - while LoRA default params are based on - https://github.com/tloen/alpaca-lora/blob/8bb8579e403dc78e37fe81ffbb253c413007323f/finetune.py#L41-L43. + The Phi4 defaults are the same as in :func:`~torchtune.models.phi4.phi4_mini`. Args: lora_attn_modules (List[LORA_ATTN_MODULES]): list of which linear layers