add default setting for expert parallel size

pull/375/head
Wenwen Qu 2023-08-24 18:52:12 +08:00
parent 0e2eb90d22
commit 86bcda5ca9
2 changed files with 4 additions and 1 deletions

View File

@ -143,7 +143,7 @@ class ParallelContext(metaclass=SingletonMeta):
self.pipeline_parallel_size = 1
self.tensor_parallel_size = 1
self.zero1_parallel_size = -1
self.expert_parallel_size = -1
self.expert_parallel_size = 1
self.num_processes_on_current_node = -1
self.virtual_pipeline_parallel_size = None
self.virtual_pipeline_parallel_rank = None

View File

@ -60,6 +60,9 @@ def args_sanity_check():
if "tensor" not in gpc.config.parallel:
gpc.config.parallel._add_item("tensor", 1)
if "expert" not in gpc.config.parallel:
gpc.config.parallel._add_item("expert", 1)
# processing the data config in gpc
data = gpc.config.data