torch.distributed

API名称

是否支持

限制与说明

torch.distributed._backend

  

torch.distributed.group

  

torch.distributed.GroupMember

  

torch.distributed.destroy_process_group

  

torch.distributed.all_reduce_coalesced

  

torch.distributed.all_gather_coalesced

  

torch.distributed.Reducer

  

torch.distributed.torch.distributed._DEFAULT_FIRST_BUCKET_BYTES

  

torch.distributed.Logger

  

torch.distributed.all_gather_togather

  

torch.distributed.is_available

  

torch.distributed.init_process_group

  

torch.distributed.is_initialized

  

torch.distributed.is_mpi_available

  

torch.distributed.is_nccl_available

  

torch.distributed.is_torchelastic_launched

  

torch.distributed.Backend

  

torch.distributed.Backend.register_backend

  

torch.distributed.get_backend

  

torch.distributed.get_rank

  

torch.distributed.get_world_size

  

torch.distributed.Store

  

torch.distributed.TCPStore

  

torch.distributed.HashStore

  

torch.distributed.FileStore

  

torch.distributed.PrefixStore

  

torch.distributed.Store.set

  

torch.distributed.Store.get

  

torch.distributed.Store.add

  

torch.distributed.Store.compare_set

  

torch.distributed.Store.wait

  

torch.distributed.Store.num_keys

  

torch.distributed.Store.delete_key

  

torch.distributed.Store.set_timeout

  

torch.distributed.new_group

  

torch.distributed.send

  

torch.distributed.recv

  

torch.distributed.isend

  

torch.distributed.irecv

  

torch.distributed.broadcast

  

torch.distributed.broadcast_object_list

  

torch.distributed.all_reduce

  

torch.distributed.reduce

  

torch.distributed.all_gather

  

torch.distributed.all_gather_object

  

torch.distributed.gather

  

torch.distributed.gather_object

  

torch.distributed.scatter

  

torch.distributed.scatter_object_list

  

torch.distributed.reduce_scatter

  

torch.distributed.all_to_all

  

torch.distributed.barrier

  

torch.distributed.monitored_barrier

  

torch.distributed.ReduceOp

  

torch.distributed.reduce_op

  

torch.distributed.broadcast_multigpu

  

torch.distributed.all_reduce_multigpu

  

torch.distributed.reduce_multigpu

  

torch.distributed.all_gather_multigpu

  

torch.distributed.reduce_scatter_multigpu

  

torch.distributed.launch

  

torch.multiprocessing.spawn

  

is_completed

  

wait

  

get_future