mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
13 lines
453 B
13 lines
453 B
1 year ago
|
#!/bin/bash
|
||
|
|
||
|
set -xe
|
||
|
BASE=$(realpath $(dirname $0))
|
||
|
|
||
|
export RAY_NAMESPACE=admin
|
||
|
export DATA=/data/scratch/chatgpt/prompts.csv
|
||
|
|
||
|
# install requirements
|
||
|
pip install -r ${BASE}/requirements.txt
|
||
|
|
||
|
python ${BASE}/mmmt_prompt.py --prompt_path $DATA --num_makers 2 --num_trainers 2 --trainer_strategy colossalai_gemini --model opt --critic_model opt --pretrain facebook/opt-350m --critic_pretrain facebook/opt-125m --experience_batch_size 4 --train_batch_size 2
|