mirror of https://github.com/hpcaitech/ColossalAI
[setup] supported conda-installed torch (#2048)
* [setup] supported conda-installed torch * polish codepull/2050/head
parent
e37f3db40c
commit
81e0da7fa8
|
@ -141,3 +141,6 @@ docs/.build
|
|||
|
||||
# pytorch checkpoint
|
||||
*.pt
|
||||
|
||||
# ignore version.py generated by setup.py
|
||||
colossalai/version.py
|
||||
|
|
|
@ -7,4 +7,11 @@ from .initialize import (
|
|||
launch_from_torch,
|
||||
)
|
||||
|
||||
__version__ = '0.1.11rc4'
|
||||
try:
|
||||
# .version will be created by setup.py
|
||||
from .version import __version__
|
||||
except ModuleNotFoundError:
|
||||
# this will only happen if the user did not run `pip install`
|
||||
# and directly set PYTHONPATH to use Colossal-AI which is a bad practice
|
||||
__version__ = '0.0.0'
|
||||
print('please install Colossal-AI from https://www.colossalai.org/download or from source')
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
torch>=1.8
|
||||
numpy
|
||||
tqdm
|
||||
psutil
|
||||
|
@ -7,4 +6,4 @@ pre-commit
|
|||
rich
|
||||
click
|
||||
fabric
|
||||
contexttimer
|
||||
contexttimer
|
||||
|
|
38
setup.py
38
setup.py
|
@ -4,6 +4,19 @@ import subprocess
|
|||
|
||||
from setuptools import Extension, find_packages, setup
|
||||
|
||||
try:
|
||||
import torch
|
||||
from torch.utils.cpp_extension import CUDA_HOME, BuildExtension, CUDAExtension
|
||||
print("\n\ntorch.__version__ = {}\n\n".format(torch.__version__))
|
||||
TORCH_MAJOR = int(torch.__version__.split('.')[0])
|
||||
TORCH_MINOR = int(torch.__version__.split('.')[1])
|
||||
|
||||
if TORCH_MAJOR < 1 or (TORCH_MAJOR == 1 and TORCH_MINOR < 10):
|
||||
raise RuntimeError("Colossal-AI requires Pytorch 1.10 or newer.\n"
|
||||
"The latest stable release can be obtained from https://pytorch.org/")
|
||||
except ImportError:
|
||||
raise ModuleNotFoundError('torch is not found. You need to install PyTorch before installing Colossal-AI.')
|
||||
|
||||
# ninja build does not work unless include_dirs are abs path
|
||||
this_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
build_cuda_ext = True
|
||||
|
@ -93,29 +106,24 @@ def fetch_readme():
|
|||
|
||||
|
||||
def get_version():
|
||||
with open('version.txt') as f:
|
||||
setup_file_path = os.path.abspath(__file__)
|
||||
project_path = os.path.dirname(setup_file_path)
|
||||
version_txt_path = os.path.join(project_path, 'version.txt')
|
||||
version_py_path = os.path.join(project_path, 'colossalai/version.py')
|
||||
|
||||
with open(version_txt_path) as f:
|
||||
version = f.read().strip()
|
||||
if build_cuda_ext:
|
||||
torch_version = '.'.join(torch.__version__.split('.')[:2])
|
||||
cuda_version = '.'.join(get_cuda_bare_metal_version(CUDA_HOME)[1:])
|
||||
version += f'+torch{torch_version}cu{cuda_version}'
|
||||
return version
|
||||
|
||||
# write version into version.py
|
||||
with open(version_py_path, 'w') as f:
|
||||
f.write(f"__version__ = '{version}'\n")
|
||||
|
||||
if build_cuda_ext:
|
||||
try:
|
||||
import torch
|
||||
from torch.utils.cpp_extension import CUDA_HOME, BuildExtension, CUDAExtension
|
||||
print("\n\ntorch.__version__ = {}\n\n".format(torch.__version__))
|
||||
TORCH_MAJOR = int(torch.__version__.split('.')[0])
|
||||
TORCH_MINOR = int(torch.__version__.split('.')[1])
|
||||
return version
|
||||
|
||||
if TORCH_MAJOR < 1 or (TORCH_MAJOR == 1 and TORCH_MINOR < 8):
|
||||
raise RuntimeError("Colossal-AI requires Pytorch 1.8 or newer.\n"
|
||||
"The latest stable release can be obtained from https://pytorch.org/")
|
||||
except ImportError:
|
||||
print('torch is not found. CUDA extension will not be installed')
|
||||
build_cuda_ext = False
|
||||
|
||||
if build_cuda_ext:
|
||||
build_cuda_ext = check_cuda_availability(CUDA_HOME) and check_cuda_torch_binary_vs_bare_metal(CUDA_HOME)
|
||||
|
|
Loading…
Reference in New Issue