mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
419 B
12 lines
419 B
import torch |
|
|
|
|
|
# Randomly Generated Data |
|
def get_data(batch_size, seq_len, vocab_size): |
|
input_ids = torch.randint(0, vocab_size, (batch_size, seq_len), device=torch.cuda.current_device()) |
|
attention_mask = torch.ones_like(input_ids) |
|
return input_ids, attention_mask |
|
|
|
|
|
def get_tflops(model_numel, batch_size, seq_len, step_time): |
|
return model_numel * batch_size * seq_len * 8 / 1e12 / (step_time + 1e-12)
|
|
|