ColossalAI/colossalai/kernel/jit/option.py

33 lines
1.2 KiB
Python
Raw Normal View History

2021-12-21 04:19:52 +00:00
import torch
JIT_OPTIONS_SET = False
2022-01-13 08:47:17 +00:00
2022-01-18 05:37:20 +00:00
def set_jit_fusion_options():
"""Set PyTorch JIT layer fusion options.
"""
2022-01-20 05:44:51 +00:00
# LSG: the latest pytorch and CUDA versions may not support
2022-01-18 05:37:20 +00:00
# the following jit settings
2021-12-21 04:19:52 +00:00
global JIT_OPTIONS_SET
if JIT_OPTIONS_SET == False:
# flags required to enable jit fusion kernels
TORCH_MAJOR = int(torch.__version__.split('.')[0])
TORCH_MINOR = int(torch.__version__.split('.')[1])
if (TORCH_MAJOR > 1) or (TORCH_MAJOR == 1 and TORCH_MINOR >= 10):
# nvfuser
torch._C._jit_set_profiling_executor(True)
torch._C._jit_set_profiling_mode(True)
torch._C._jit_override_can_fuse_on_cpu(False)
torch._C._jit_override_can_fuse_on_gpu(False)
torch._C._jit_set_texpr_fuser_enabled(False)
torch._C._jit_set_nvfuser_enabled(True)
torch._C._debug_set_autodiff_subgraph_inlining(False)
else:
# legacy pytorch fuser
torch._C._jit_set_profiling_mode(False)
torch._C._jit_set_profiling_executor(False)
torch._C._jit_override_can_fuse_on_cpu(True)
torch._C._jit_override_can_fuse_on_gpu(True)
JIT_OPTIONS_SET = True