mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
7 lines
327 B
7 lines
327 B
2 years ago
|
from colossalai.zero.shard_utils import TensorShardStrategy
|
||
|
|
||
|
zero = dict(model_config=dict(shard_strategy=TensorShardStrategy(),
|
||
|
tensor_placement_policy="auto",
|
||
|
reuse_fp16_shard=True),
|
||
|
optimizer_config=dict(gpu_margin_mem_ratio=0.8, initial_scale=16384))
|