mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
26 lines
628 B
26 lines
628 B
#!/usr/bin/env python
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
train_data = dict(
|
|
dataset=dict(
|
|
type='CIFAR10Dataset',
|
|
root='/path/to/data',
|
|
download=True,
|
|
transform_pipeline=[
|
|
dict(type='RandomResizedCrop', size=224),
|
|
dict(type='RandomHorizontalFlip'),
|
|
dict(type='ToTensor'),
|
|
dict(type='Normalize', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))
|
|
]
|
|
),
|
|
dataloader=dict(
|
|
batch_size=64,
|
|
pin_memory=True,
|
|
num_workers=4,
|
|
sampler=dict(
|
|
type='DataParallelSampler',
|
|
shuffle=True,
|
|
)
|
|
)
|
|
)
|