mirror of https://github.com/hpcaitech/ColossalAI
[NFC] polish colossalai/fx/profiler/experimental/profiler_module/embedding.py code style (#3256)
Co-authored-by: Minghao Huang <huangminghao@luchentech.com>pull/3313/head
parent
0fbadce79c
commit
204ca2f09a
|
@ -1,5 +1,7 @@
|
|||
from typing import Tuple
|
||||
|
||||
import torch
|
||||
|
||||
from ..registry import meta_profiler_module
|
||||
|
||||
|
||||
|
@ -8,4 +10,4 @@ def torch_nn_embedding(self: torch.nn.Embedding, input: torch.Tensor) -> Tuple[i
|
|||
# nn.Embedding is a dictionary lookup, so technically it has 0 FLOPs. (https://discuss.pytorch.org/t/correct-way-to-calculate-flops-in-model/67198/6)
|
||||
flops = 0
|
||||
macs = 0
|
||||
return flops, macs
|
||||
return flops, macs
|
||||
|
|
Loading…
Reference in New Issue