mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
10 lines
231 B
10 lines
231 B
3 years ago
|
import torch
|
||
|
|
||
|
|
||
|
def _format_number(val, prec=5):
|
||
|
if isinstance(val, float):
|
||
|
return f'{val:.{prec}g}'
|
||
|
elif torch.is_tensor(val) and torch.is_floating_point(val):
|
||
|
return f'{val.item():.{prec}g}'
|
||
|
return val
|