Skip to content

Commit ce4558b

Browse files
authored
fix (#2051)
1 parent b1ec22d commit ce4558b

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

unsloth/models/llama.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1548,7 +1548,7 @@ def unsloth_fast_generate(
15481548
if "input_ids" in kwargs and kwargs["input_ids"] is not None and "max_new_tokens" in kwargs:
15491549
if kwargs["input_ids"].shape[-1] + kwargs["max_new_tokens"] > self.config.max_position_embeddings:
15501550
raise ValueError(
1551-
f'Unsloth: input length {kwargs["input_ids"].shape[-1]} + max_new_tokens {kwargs["max_new_tokens"]} exceeds the maximum sequence length of {model.config.max_position_embeddings}!\n'\
1551+
f'Unsloth: input length {kwargs["input_ids"].shape[-1]} + max_new_tokens {kwargs["max_new_tokens"]} exceeds the maximum sequence length of {self.config.max_position_embeddings}!\n'\
15521552
'You will need to do long context extension by increasing the `max_seq_length` in `FastLanguageModel.from_pretrained`.'
15531553
)
15541554
pass

0 commit comments

Comments
 (0)