File tree Expand file tree Collapse file tree 2 files changed +4
-4
lines changed
Expand file tree Collapse file tree 2 files changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -211,9 +211,9 @@ def broadcast_tensor_dict(
211211 if tensor .numel () == 0 :
212212 continue
213213 if self .device_communicator and tensor .is_cuda :
214- self .device_communicator .broadcast (tensor , src )
214+ tensor . copy_ ( self .device_communicator .broadcast (tensor , src ) )
215215 else :
216- self .tcp_store_group .broadcast (tensor , src )
216+ tensor . copy_ ( self .tcp_store_group .broadcast (tensor , src ) )
217217
218218 return tensor_dict
219219
Original file line number Diff line number Diff line change 1515from collections import deque
1616from collections .abc import Sequence
1717from datetime import timedelta
18- from typing import Any , Optional
18+ from typing import Any , Optional , Union
1919
2020import torch
2121from torch .distributed import ProcessGroup , Store , TCPStore
@@ -536,7 +536,7 @@ def stateless_init_torch_distributed_process_group(
536536 backend : str ,
537537 group_name : Optional [str ] = None ,
538538 return_store : bool = False ,
539- ) -> ProcessGroup | tuple [ProcessGroup , Store ]:
539+ ) -> Union [ ProcessGroup , tuple [ProcessGroup , Store ] ]:
540540 """
541541 A replacement for `torch.distributed.init_process_group` that does not
542542 pollute the global state. The created ProcessGroup object can be used for
You can’t perform that action at this time.
0 commit comments