mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
571 B
12 lines
571 B
from .initialize import launch, launch_from_openmpi, launch_from_slurm, launch_from_torch |
|
|
|
try: |
|
# .version will be created by setup.py |
|
from .version import __version__ |
|
except ModuleNotFoundError: |
|
# this will only happen if the user did not run `pip install` |
|
# and directly set PYTHONPATH to use Colossal-AI which is a bad practice |
|
__version__ = "0.0.0" |
|
print("please install Colossal-AI from https://www.colossalai.org/download or from source") |
|
|
|
__all__ = ["launch", "launch_from_openmpi", "launch_from_slurm", "launch_from_torch", "__version__"]
|
|
|