Skip to content

Commit

Permalink
handle missing keys and unexpected keys better.
Browse files Browse the repository at this point in the history
  • Loading branch information
sayakpaul committed Oct 9, 2024
1 parent a15b9b3 commit 58da14b
Showing 1 changed file with 5 additions and 2 deletions.
7 changes: 5 additions & 2 deletions src/diffusers/loaders/lora_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -1893,6 +1893,7 @@ def load_lora_into_transformer(
state_dict = {
k.replace(f"{cls.transformer_name}.", ""): v for k, v in state_dict.items() if k in transformer_keys
}
print(f'{any("transformer_blocks." in k for k in state_dict)=}')

if len(state_dict.keys()) > 0:
# check with first key if is not in peft format
Expand Down Expand Up @@ -1944,17 +1945,19 @@ def load_lora_into_transformer(
# Check only for unexpected keys.
unexpected_keys = getattr(incompatible_keys, "unexpected_keys", None)
if unexpected_keys:
lora_unexpected_keys = [k for k in unexpected_keys if "lora" in k and adapter_name in k]
logger.warning(
f"Loading adapter weights from state_dict led to unexpected keys not found in the model: "
f" {unexpected_keys}. "
f" {lora_unexpected_keys}. "
)

# Filter missing keys specific to the current adapter.
missing_keys = getattr(incompatible_keys, "missing_keys", None)
if missing_keys:
lora_missing_keys = [k for k in missing_keys if "lora" in k and adapter_name in k]
logger.warning(
f"Loading adapter weights from state_dict led to missing keys in the model: "
f" {missing_keys}. "
f" {lora_missing_keys}. "
)

# Offload back.
Expand Down

0 comments on commit 58da14b

Please sign in to comment.