mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
10 lines
240 B
10 lines
240 B
3 years ago
|
import torch
|
||
|
from colossalai.utils import get_current_device
|
||
|
|
||
|
|
||
|
def col_cuda_memory_capacity():
|
||
|
"""
|
||
|
Get cuda memory capacity of the current cuda.
|
||
|
"""
|
||
|
return torch.cuda.get_device_properties(get_current_device()).total_memory
|