2021-10-28 16:21:23 +00:00
import os
2023-03-07 10:04:10 +00:00
import sys
2023-01-09 08:21:44 +00:00
from datetime import datetime
2023-03-03 13:45:05 +00:00
from typing import List
2022-11-17 05:42:33 +00:00
2023-01-03 12:29:39 +00:00
from setuptools import find_packages , setup
2021-10-28 16:21:23 +00:00
2023-03-03 13:45:05 +00:00
from op_builder . utils import (
check_cuda_availability ,
check_pytorch_version ,
check_system_pytorch_cuda_match ,
get_cuda_bare_metal_version ,
get_pytorch_version ,
set_cuda_arch_list ,
)
2022-12-23 08:05:13 +00:00
2022-11-30 08:45:15 +00:00
try :
2023-03-03 13:45:05 +00:00
from torch . utils . cpp_extension import CUDA_HOME , BuildExtension
2023-09-19 06:20:26 +00:00
2023-01-05 05:53:28 +00:00
TORCH_AVAILABLE = True
2022-11-30 08:45:15 +00:00
except ImportError :
2023-01-05 05:53:28 +00:00
TORCH_AVAILABLE = False
2023-01-05 07:13:11 +00:00
CUDA_HOME = None
2022-11-30 08:45:15 +00:00
2023-03-03 13:45:05 +00:00
# Some constants for installation checks
MIN_PYTORCH_VERSION_MAJOR = 1
MIN_PYTORCH_VERSION_MINOR = 10
THIS_DIR = os . path . dirname ( os . path . abspath ( __file__ ) )
2023-09-19 06:20:26 +00:00
BUILD_CUDA_EXT = int ( os . environ . get ( " CUDA_EXT " , " 0 " ) ) == 1
IS_NIGHTLY = int ( os . environ . get ( " NIGHTLY " , " 0 " ) ) == 1
2023-03-03 13:45:05 +00:00
# a variable to store the op builder
2022-02-14 09:09:30 +00:00
ext_modules = [ ]
2023-03-07 10:04:10 +00:00
# we do not support windows currently
2023-09-19 06:20:26 +00:00
if sys . platform == " win32 " :
2023-03-07 10:04:10 +00:00
raise RuntimeError ( " Windows is not supported yet. Please try again within the Windows Subsystem for Linux (WSL). " )
2023-03-03 13:45:05 +00:00
# check for CUDA extension dependencies
def environment_check_for_cuda_extension_build ( ) :
2023-01-05 07:13:11 +00:00
if not TORCH_AVAILABLE :
2023-01-09 08:21:44 +00:00
raise ModuleNotFoundError (
2023-03-03 13:45:05 +00:00
" [extension] PyTorch is not found while CUDA_EXT=1. You need to install PyTorch first in order to build CUDA extensions "
2023-01-09 08:21:44 +00:00
)
2023-01-05 07:13:11 +00:00
if not CUDA_HOME :
2023-01-09 08:21:44 +00:00
raise RuntimeError (
2023-04-19 09:28:15 +00:00
" [extension] CUDA_HOME is not found while CUDA_EXT=1. You need to export CUDA_HOME environment variable or install CUDA Toolkit first in order to build CUDA extensions "
2023-01-09 08:21:44 +00:00
)
2023-01-05 07:13:11 +00:00
2023-03-03 13:45:05 +00:00
check_system_pytorch_cuda_match ( CUDA_HOME )
check_pytorch_version ( MIN_PYTORCH_VERSION_MAJOR , MIN_PYTORCH_VERSION_MINOR )
check_cuda_availability ( )
def fetch_requirements ( path ) - > List [ str ] :
"""
This function reads the requirements file .
Args :
path ( str ) : the path to the requirements file .
Returns :
The lines in the requirements file .
"""
2023-09-19 06:20:26 +00:00
with open ( path , " r " ) as fd :
2021-11-15 08:43:28 +00:00
return [ r . strip ( ) for r in fd . readlines ( ) ]
2023-03-03 13:45:05 +00:00
def fetch_readme ( ) - > str :
"""
This function reads the README . md file in the current directory .
Returns :
The lines in the README file .
"""
2023-09-19 06:20:26 +00:00
with open ( " README.md " , encoding = " utf-8 " ) as f :
2022-02-15 07:15:03 +00:00
return f . read ( )
2023-03-03 13:45:05 +00:00
def get_version ( ) - > str :
"""
This function reads the version . txt and generates the colossalai / version . py file .
Returns :
The library version stored in version . txt .
"""
2022-11-30 08:45:15 +00:00
setup_file_path = os . path . abspath ( __file__ )
project_path = os . path . dirname ( setup_file_path )
2023-09-19 06:20:26 +00:00
version_txt_path = os . path . join ( project_path , " version.txt " )
version_py_path = os . path . join ( project_path , " colossalai/version.py " )
2022-11-30 08:45:15 +00:00
with open ( version_txt_path ) as f :
2022-04-27 07:26:12 +00:00
version = f . read ( ) . strip ( )
2022-02-15 07:15:03 +00:00
2022-11-30 08:45:15 +00:00
# write version into version.py
2023-09-19 06:20:26 +00:00
with open ( version_py_path , " w " ) as f :
2022-11-30 08:45:15 +00:00
f . write ( f " __version__ = ' { version } ' \n " )
2023-03-03 13:45:05 +00:00
# look for pytorch and cuda version
if BUILD_CUDA_EXT :
torch_major , torch_minor , _ = get_pytorch_version ( )
2023-09-19 06:20:26 +00:00
torch_version = f " { torch_major } . { torch_minor } "
cuda_version = " . " . join ( get_cuda_bare_metal_version ( CUDA_HOME ) )
2023-02-06 05:48:20 +00:00
else :
torch_version = None
cuda_version = None
2023-03-03 13:45:05 +00:00
# write the version into the python file
2023-02-06 05:48:20 +00:00
if torch_version :
f . write ( f ' torch = " { torch_version } " \n ' )
else :
2023-09-19 06:20:26 +00:00
f . write ( " torch = None \n " )
2023-02-06 05:48:20 +00:00
if cuda_version :
f . write ( f ' cuda = " { cuda_version } " \n ' )
else :
2023-09-19 06:20:26 +00:00
f . write ( " cuda = None \n " )
2022-11-30 08:45:15 +00:00
return version
2022-10-03 09:13:30 +00:00
2023-03-03 13:45:05 +00:00
if BUILD_CUDA_EXT :
environment_check_for_cuda_extension_build ( )
set_cuda_arch_list ( CUDA_HOME )
2023-01-06 12:50:26 +00:00
from op_builder import ALL_OPS
2023-09-19 06:20:26 +00:00
2023-03-03 13:45:05 +00:00
op_names = [ ]
# load all builders
2023-01-06 12:50:26 +00:00
for name , builder_cls in ALL_OPS . items ( ) :
2023-03-03 13:45:05 +00:00
op_names . append ( name )
2023-01-06 12:50:26 +00:00
ext_modules . append ( builder_cls ( ) . builder ( ) )
2022-11-17 05:42:33 +00:00
2023-03-03 13:45:05 +00:00
# show log
2023-09-19 06:20:26 +00:00
op_name_list = " , " . join ( op_names )
2023-03-03 13:45:05 +00:00
print ( f " [extension] loaded builders for { op_name_list } " )
2023-01-10 04:10:13 +00:00
# always put not nightly branch as the if branch
# otherwise github will treat colossalai-nightly as the project name
# and it will mess up with the dependency graph insights
2023-03-03 13:45:05 +00:00
if not IS_NIGHTLY :
2023-01-10 04:10:13 +00:00
version = get_version ( )
2023-09-19 06:20:26 +00:00
package_name = " colossalai "
2023-01-10 04:10:13 +00:00
else :
2023-01-09 08:21:44 +00:00
# use date as the nightly version
2023-09-19 06:20:26 +00:00
version = datetime . today ( ) . strftime ( " % Y. % m. %d " )
package_name = " colossalai-nightly "
setup (
name = package_name ,
version = version ,
packages = find_packages (
exclude = (
" op_builder " ,
" benchmark " ,
" docker " ,
" tests " ,
" docs " ,
" examples " ,
" tests " ,
" scripts " ,
" requirements " ,
" *.egg-info " ,
)
) ,
description = " An integrated large-scale model training system with efficient parallelization techniques " ,
long_description = fetch_readme ( ) ,
long_description_content_type = " text/markdown " ,
license = " Apache Software License 2.0 " ,
url = " https://www.colossalai.org " ,
project_urls = {
" Forum " : " https://github.com/hpcaitech/ColossalAI/discussions " ,
" Bug Tracker " : " https://github.com/hpcaitech/ColossalAI/issues " ,
" Examples " : " https://github.com/hpcaitech/ColossalAI-Examples " ,
" Documentation " : " http://colossalai.readthedocs.io " ,
" Github " : " https://github.com/hpcaitech/ColossalAI " ,
} ,
ext_modules = ext_modules ,
cmdclass = { " build_ext " : BuildExtension } if ext_modules else { } ,
install_requires = fetch_requirements ( " requirements/requirements.txt " ) ,
entry_points = """
2022-04-19 02:59:44 +00:00
[ console_scripts ]
2022-04-19 07:14:54 +00:00
colossalai = colossalai . cli : cli
2023-09-19 06:20:26 +00:00
""" ,
python_requires = " >=3.6 " ,
classifiers = [
" Programming Language :: Python :: 3 " ,
" License :: OSI Approved :: Apache Software License " ,
" Environment :: GPU :: NVIDIA CUDA " ,
" Topic :: Scientific/Engineering :: Artificial Intelligence " ,
" Topic :: System :: Distributed Computing " ,
] ,
package_data = {
" colossalai " : [
" _C/*.pyi " ,
" kernel/cuda_native/csrc/* " ,
" kernel/cuda_native/csrc/kernel/* " ,
" kernel/cuda_native/csrc/kernels/include/* " ,
]
} ,
)