mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
19 lines
402 B
19 lines
402 B
from .albert import * |
|
from .bert import * |
|
from .blip2 import * |
|
from .bloom import * |
|
from .chatglm2 import * |
|
from .command import * |
|
from .deepseek import * |
|
from .falcon import * |
|
from .gpt import * |
|
from .gptj import * |
|
from .llama import * |
|
from .mistral import * |
|
from .mixtral import * |
|
from .opt import * |
|
from .qwen2 import * |
|
from .sam import * |
|
from .t5 import * |
|
from .vit import * |
|
from .whisper import *
|
|
|