mirror of https://github.com/hpcaitech/ColossalAI
[NFC] polish colossalai/amp/torch_amp/torch_amp.py code style (#2290)
parent
d1e5bafcd4
commit
b965585d05
|
@ -1,17 +1,17 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
import torch.nn as nn
|
|
||||||
import torch.cuda.amp as torch_amp
|
import torch.cuda.amp as torch_amp
|
||||||
|
import torch.nn as nn
|
||||||
from torch import Tensor
|
from torch import Tensor
|
||||||
from torch.nn.modules.loss import _Loss
|
from torch.nn.modules.loss import _Loss
|
||||||
from torch.optim import Optimizer
|
from torch.optim import Optimizer
|
||||||
from ._grad_scaler import GradScaler
|
|
||||||
|
|
||||||
from colossalai.nn.optimizer import ColossalaiOptimizer
|
from colossalai.nn.optimizer import ColossalaiOptimizer
|
||||||
from colossalai.utils import clip_grad_norm_fp32
|
from colossalai.utils import clip_grad_norm_fp32
|
||||||
|
|
||||||
|
from ._grad_scaler import GradScaler
|
||||||
|
|
||||||
|
|
||||||
class TorchAMPOptimizer(ColossalaiOptimizer):
|
class TorchAMPOptimizer(ColossalaiOptimizer):
|
||||||
"""A wrapper class which integrate Pytorch AMP with an optimizer
|
"""A wrapper class which integrate Pytorch AMP with an optimizer
|
||||||
|
|
Loading…
Reference in New Issue