mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
14 lines
443 B
14 lines
443 B
import torch.nn as nn |
|
|
|
from colossalai.booster.accelerator import Accelerator |
|
from colossalai.testing import clear_cache_before_run, parameterize |
|
|
|
|
|
@clear_cache_before_run() |
|
@parameterize('device', ['cpu', 'cuda']) |
|
def test_accelerator(device): |
|
accelerator = Accelerator(device) |
|
model = nn.Linear(8, 8) |
|
model = accelerator.configure_model(model) |
|
assert next(model.parameters()).device.type == device |
|
del model, accelerator
|
|
|