mirror of https://github.com/hpcaitech/ColossalAI
[profile] added example for ProfilerContext (#349)
parent
532ae79cb0
commit
c57e089824
|
@ -35,15 +35,25 @@ class ProfilerContext(object):
|
||||||
"""
|
"""
|
||||||
Profiler context manager
|
Profiler context manager
|
||||||
Usage:
|
Usage:
|
||||||
from colossalai.utils.profiler import CommProf, ProfilerContext
|
|
||||||
from torch.utils.tensorboard import SummaryWriter
|
```python
|
||||||
cc_prof = CommProf()
|
world_size = 4
|
||||||
with ProfilerContext([cc_prof]) as prof:
|
inputs = torch.randn(10, 10, dtype=torch.float32, device=get_current_device())
|
||||||
train()
|
outputs = torch.empty(world_size, 10, 10, dtype=torch.float32, device=get_current_device())
|
||||||
writer = SummaryWriter('tb/path')
|
outputs_list = list(torch.chunk(outputs, chunks=world_size, dim=0))
|
||||||
prof.to_tensorboard(writer)
|
|
||||||
prof.to_file('./prof_logs/')
|
cc_prof = CommProfiler()
|
||||||
prof.show()
|
|
||||||
|
with ProfilerContext([cc_prof]) as prof:
|
||||||
|
op = dist.all_reduce(inputs, async_op=True)
|
||||||
|
dist.all_gather(outputs_list, inputs)
|
||||||
|
op.wait()
|
||||||
|
dist.reduce_scatter(inputs, outputs_list)
|
||||||
|
dist.broadcast(inputs, 0)
|
||||||
|
dist.reduce(inputs, 0)
|
||||||
|
|
||||||
|
prof.show()
|
||||||
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, profilers: List[BaseProfiler] = None, enable: bool = True):
|
def __init__(self, profilers: List[BaseProfiler] = None, enable: bool = True):
|
||||||
|
|
Loading…
Reference in New Issue