You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/utils/commons/memory.py

10 lines
240 B

import torch
from colossalai.utils import get_current_device
def col_cuda_memory_capacity():
"""
Get cuda memory capacity of the current cuda.
"""
return torch.cuda.get_device_properties(get_current_device()).total_memory