From d8a7dd8b4442e58bc02421cfdb820e780cb4420e Mon Sep 17 00:00:00 2001 From: Matt Buchovecky Date: Mon, 3 Mar 2025 11:09:10 -0800 Subject: [PATCH] fixes https://github.com/flairNLP/flair/issues/3623: save PEFT config to transformer embeddings and export as param so that it loads correctly. --- flair/embeddings/transformer.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/flair/embeddings/transformer.py b/flair/embeddings/transformer.py index fdb16eea28..919297773e 100644 --- a/flair/embeddings/transformer.py +++ b/flair/embeddings/transformer.py @@ -1128,6 +1128,7 @@ def is_supported_t5_model(config: PretrainedConfig) -> bool: if "Please use the model as it is" not in str(e): raise e + self.peft_config = peft_config if peft_config is not None: # add adapters for finetuning try: @@ -1376,6 +1377,7 @@ def to_params(self): "subtoken_pooling": self.subtoken_pooling, "cls_pooling": self.cls_pooling, "config_state_dict": config_dict, + "peft_config": self.peft_config, } return model_state