Skip to content

Commit 08d0ca4

Browse files
Fix for model_config.head_dim = None
Doesn't happen in from source install of transfomers ...
1 parent a28321d commit 08d0ca4

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/peft/utils/other.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1107,7 +1107,7 @@ def _prepare_prompt_learning_config(peft_config, model_config):
11071107
# For grouped-query attention, see #1901.
11081108
if (peft_config.peft_type == "PREFIX_TUNING") and ("num_key_value_heads" in model_config):
11091109
num_key_value_heads = model_config["num_key_value_heads"]
1110-
if "head_dim" in model_config:
1110+
if getattr(model_config, "head_dim", None) is not None:
11111111
head_dim = model_config["head_dim"]
11121112
else:
11131113
head_dim = peft_config.token_dim // peft_config.num_attention_heads

0 commit comments

Comments
 (0)