mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
28 lines
662 B
28 lines
662 B
import time
|
|
|
|
import torch
|
|
|
|
|
|
class DummyProfiler:
|
|
def __init__(self):
|
|
self.step_number = 0
|
|
|
|
def step(self):
|
|
self.step_number += 1
|
|
|
|
|
|
# Randomly Generated Data
|
|
def get_data(batch_size, seq_len, vocab_size):
|
|
input_ids = torch.randint(0, vocab_size, (batch_size, seq_len), device=torch.cuda.current_device())
|
|
attention_mask = torch.ones_like(input_ids)
|
|
return input_ids, attention_mask
|
|
|
|
|
|
def get_tflops(model_numel, batch_size, seq_len, step_time):
|
|
return model_numel * batch_size * seq_len * 8 / 1e12 / (step_time + 1e-12)
|
|
|
|
|
|
def get_time_stamp():
|
|
cur_time = time.strftime("%d-%H:%M", time.localtime())
|
|
return cur_time
|