mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
14 lines
396 B
14 lines
396 B
import click |
|
|
|
from .check_installation import check_installation |
|
|
|
__all__ = ["check"] |
|
|
|
|
|
@click.command(help="Check if Colossal-AI is correct based on the given option") |
|
@click.option("-i", "--installation", is_flag=True, help="Check if Colossal-AI is built correctly") |
|
def check(installation): |
|
if installation: |
|
check_installation() |
|
return |
|
click.echo("No option is given")
|
|
|