mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
26 lines
373 B
26 lines
373 B
import click
|
|
|
|
from .benchmark import benchmark
|
|
from .check import check
|
|
from .launcher import run
|
|
|
|
|
|
class Arguments():
|
|
|
|
def __init__(self, arg_dict):
|
|
for k, v in arg_dict.items():
|
|
self.__dict__[k] = v
|
|
|
|
|
|
@click.group()
|
|
def cli():
|
|
pass
|
|
|
|
|
|
cli.add_command(run)
|
|
cli.add_command(check)
|
|
cli.add_command(benchmark)
|
|
|
|
if __name__ == '__main__':
|
|
cli()
|