File tree Expand file tree Collapse file tree 1 file changed +4
-3
lines changed Expand file tree Collapse file tree 1 file changed +4
-3
lines changed Original file line number Diff line number Diff line change @@ -360,7 +360,7 @@ The [`~Dataset.map`] also works with the rank of the process if you set `with_ra
360360>> > dataset = load_dataset(" fka/awesome-chatgpt-prompts" , split = " train" )
361361>> >
362362>> > # Get an example model and its tokenizer
363- >> > model = AutoModelForCausalLM.from_pretrained(" Qwen/Qwen1.5-0.5B-Chat" )
363+ >> > model = AutoModelForCausalLM.from_pretrained(" Qwen/Qwen1.5-0.5B-Chat" ).eval()
364364>> > tokenizer = AutoTokenizer.from_pretrained(" Qwen/Qwen1.5-0.5B-Chat" )
365365>> >
366366>> > def gpu_computation (batch , rank ):
@@ -378,8 +378,9 @@ The [`~Dataset.map`] also works with the rank of the process if you set `with_ra
378378... tokenize = False ,
379379... add_generation_prompt = True
380380... ) for chat in chats]
381- ... model_inputs = tokenizer(texts, return_tensors = " pt" ).to(device)
382- ... outputs = model.generate(** model_inputs, max_new_tokens = 512 )
381+ ... model_inputs = tokenizer(texts, padding = True , return_tensors = " pt" ).to(device)
382+ ... with torch.no_grad():
383+ ... outputs = model.generate(** model_inputs, max_new_tokens = 512 )
383384... batch[" output" ] = tokenizer.batch_decode(outputs, skip_special_tokens = True )
384385... return batch
385386>> >
You can’t perform that action at this time.
0 commit comments