From c57e089824edc3335fc3aed25b8a9a355bf1de76 Mon Sep 17 00:00:00 2001 From: HELSON <72907851+1SAA@users.noreply.github.com> Date: Wed, 9 Mar 2022 17:35:28 +0800 Subject: [PATCH] [profile] added example for ProfilerContext (#349) --- colossalai/utils/profiler/prof_utils.py | 28 +++++++++++++++++-------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/colossalai/utils/profiler/prof_utils.py b/colossalai/utils/profiler/prof_utils.py index 5d9b23178..01a08d483 100644 --- a/colossalai/utils/profiler/prof_utils.py +++ b/colossalai/utils/profiler/prof_utils.py @@ -35,15 +35,25 @@ class ProfilerContext(object): """ Profiler context manager Usage: - from colossalai.utils.profiler import CommProf, ProfilerContext - from torch.utils.tensorboard import SummaryWriter - cc_prof = CommProf() - with ProfilerContext([cc_prof]) as prof: - train() - writer = SummaryWriter('tb/path') - prof.to_tensorboard(writer) - prof.to_file('./prof_logs/') - prof.show() + + ```python + world_size = 4 + inputs = torch.randn(10, 10, dtype=torch.float32, device=get_current_device()) + outputs = torch.empty(world_size, 10, 10, dtype=torch.float32, device=get_current_device()) + outputs_list = list(torch.chunk(outputs, chunks=world_size, dim=0)) + + cc_prof = CommProfiler() + + with ProfilerContext([cc_prof]) as prof: + op = dist.all_reduce(inputs, async_op=True) + dist.all_gather(outputs_list, inputs) + op.wait() + dist.reduce_scatter(inputs, outputs_list) + dist.broadcast(inputs, 0) + dist.reduce(inputs, 0) + + prof.show() + ``` """ def __init__(self, profilers: List[BaseProfiler] = None, enable: bool = True):