We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent bc91dc6 commit eaca5b2Copy full SHA for eaca5b2
1 file changed
paddlenlp/trainer/trainer.py
@@ -1013,6 +1013,7 @@ def _inner_training_loop(
1013
self.timers and self.timers("optimizer-step").start()
1014
1015
if self.args.gradient_accumulation_steps > 1 and self._enable_delay_scale_loss():
1016
+ paddle.device.synchronize()
1017
for p in model._layers.parameters():
1018
with paddle.no_grad():
1019
if hasattr(p, "main_grad") and p.main_grad is not None:
0 commit comments