Skip to content

Commit f38c4ad

Browse files
authored
better logging and help (#9203)
1 parent e0e255b commit f38c4ad

2 files changed

Lines changed: 3 additions & 1 deletion

File tree

examples/seq2seq/finetune_trainer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ class DataTrainingArguments:
9898
metadata={
9999
"help": "The maximum total sequence length for validation target text after tokenization. Sequences longer "
100100
"than this will be truncated, sequences shorter will be padded."
101+
" This argument is also used to override the ``max_length`` param of ``model.generate``, which is used during ``evaluate`` and ``predict``"
101102
},
102103
)
103104
test_max_target_length: Optional[int] = field(

examples/seq2seq/utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -434,7 +434,8 @@ def use_task_specific_params(model, task):
434434

435435
if task_specific_params is not None:
436436
pars = task_specific_params.get(task, {})
437-
logger.info(f"using task specific params for {task}: {pars}")
437+
logger.info(f"setting model.config to task specific params for {task}:\n {pars}")
438+
logger.info("note: command line args may override some of these")
438439
model.config.update(pars)
439440

440441

0 commit comments

Comments
 (0)