Browse Source

[doc] update docstring in ProcessGroup (#1468)

pull/1469/head
Jiarui Fang 2 years ago committed by GitHub
parent
commit
1b491ad7de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 6
      colossalai/tensor/process_group.py

6
colossalai/tensor/process_group.py

@ -31,7 +31,7 @@ PYTORCHPGDICT_ = PyTorchProcessGroupDict()
class ProcessGroup:
"""ProcessGroup
Process Group contains group partition for Tensor Parallel and Data Parallel.
Process Group indicates how processes are organized in groups for parallel execution using Tensor Parallelism and Data Parallelism.
NOTE, the ProcessGroup must be used after `torch.distributed.initialize()`
@ -40,8 +40,8 @@ class ProcessGroup:
rank: the global rank of the current process.
ranks: List[int], a list of rank id belongings to this process group.
backend: str, the backend of the process group.
tp_degree: Optional[int], tensor parallelism degree, default None means 1
dp_degree: Optional[int], data parallelism degree, default None means len(ranks)
tp_degree: Optional[int], tensor parallelism degree. How many processes are inside a tp process group. default None means 1.
dp_degree: Optional[int], data parallelism degree. How many processes are inside a dp process group. . default None means len(ranks).
"""
def __init__(self,

Loading…
Cancel
Save