We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6682b97 commit 3a28c36Copy full SHA for 3a28c36
verl/workers/rollout/schemas.py
@@ -181,8 +181,8 @@ def initialize_request(cls, values):
181
# Only log the warning to avoid truncating in the middle of generation prompt. Consider raising an
182
# error for this case in the future.
183
# Ensure batch_data_id exists with default value if not provided
184
- if 'batch_data_id' not in values:
185
- values['batch_data_id'] = cls.model_fields['batch_data_id'].default
+ if "batch_data_id" not in values:
+ values["batch_data_id"] = cls.model_fields["batch_data_id"].default
186
logger.warning(
187
f"Prompt {values['batch_data_id']} has length {values['input_ids'].shape[-1]} "
188
f"which is greater than max_prompt_len {max_prompt_len} after applied chat template with tools."
0 commit comments