mirror of https://github.com/hpcaitech/ColossalAI
Browse Source
* [zero] update legacy import * [zero] update examples * [example] fix opt tutorial * [example] fix opt tutorial * [example] fix opt tutorial * [example] fix opt tutorial * [example] fix importpull/3445/head
ver217
2 years ago
committed by
GitHub
8 changed files with 50 additions and 6 deletions
@ -1,4 +1,9 @@
|
||||
from colossalai.zero.shard_utils import TensorShardStrategy |
||||
from colossalai.nn.optimizer import FusedAdam |
||||
|
||||
try: |
||||
from colossalai.zero.shard_utils import TensorShardStrategy |
||||
except ImportError: |
||||
# colossalai > 0.2.8 |
||||
from colossalai.zero.legacy import TensorShardStrategy |
||||
|
||||
clip_grad_norm = 1.0 |
||||
|
@ -0,0 +1,21 @@
|
||||
#!/bin/bash |
||||
|
||||
set -xue |
||||
|
||||
pip install -r requirements.txt |
||||
|
||||
BS=8 |
||||
MEMCAP=0 |
||||
GPUNUM=2 |
||||
MODLE="facebook/opt-125m" |
||||
|
||||
torchrun \ |
||||
--nproc_per_node ${GPUNUM} \ |
||||
--master_port 19198 \ |
||||
run_clm.py \ |
||||
-s \ |
||||
--output_dir $PWD \ |
||||
--mem_cap ${MEMCAP} \ |
||||
--model_name_or_path ${MODLE} \ |
||||
--per_device_train_batch_size ${BS} \ |
||||
--num_train_epochs 1 |
Loading…
Reference in new issue