mirror of https://github.com/hpcaitech/ColossalAI
[hotfix] Fixing the bug related to ipv6 support
Co-authored-by: ByteDance <tongping.liu@bytedance.com>pull/2206/head^2
parent
ac85a18043
commit
8e22c38b89
|
@ -370,7 +370,7 @@ class ParallelContext(metaclass=SingletonMeta):
|
||||||
port (str): the master port for distributed training
|
port (str): the master port for distributed training
|
||||||
"""
|
"""
|
||||||
# initialize the default process group
|
# initialize the default process group
|
||||||
init_method = f'tcp://{host}:{port}'
|
init_method = f'tcp://[{host}]:{port}'
|
||||||
dist.init_process_group(rank=rank, world_size=world_size, backend=backend, init_method=init_method)
|
dist.init_process_group(rank=rank, world_size=world_size, backend=backend, init_method=init_method)
|
||||||
|
|
||||||
# None will give the default global process group for pytorch dist operations
|
# None will give the default global process group for pytorch dist operations
|
||||||
|
|
Loading…
Reference in New Issue