We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 68e1ee0 commit 5394ad7Copy full SHA for 5394ad7
vllm/entrypoints/openai/serving_chat.py
@@ -1111,7 +1111,8 @@ def _create_chat_logprobs(
1111
return_as_token_id is not None else self.return_tokens_as_token_ids
1112
for i, token_id in enumerate(token_ids):
1113
step_top_logprobs = top_logprobs[i]
1114
- if step_top_logprobs is None:
+ if step_top_logprobs is None or step_top_logprobs.get(
1115
+ token_id) is None:
1116
token = tokenizer.decode(token_id)
1117
if should_return_as_token_id:
1118
token = f"token_id:{token_id}"
0 commit comments