|
|
@ -370,7 +370,7 @@ class ParallelContext(metaclass=SingletonMeta):
|
|
|
|
port (str): the master port for distributed training
|
|
|
|
port (str): the master port for distributed training
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
# initialize the default process group
|
|
|
|
# initialize the default process group
|
|
|
|
init_method = f'tcp://{host}:{port}'
|
|
|
|
init_method = f'tcp://[{host}]:{port}'
|
|
|
|
dist.init_process_group(rank=rank, world_size=world_size, backend=backend, init_method=init_method)
|
|
|
|
dist.init_process_group(rank=rank, world_size=world_size, backend=backend, init_method=init_method)
|
|
|
|
|
|
|
|
|
|
|
|
# None will give the default global process group for pytorch dist operations
|
|
|
|
# None will give the default global process group for pytorch dist operations
|
|
|
|