mirror of https://github.com/hpcaitech/ColossalAI
[tutorial] updated auto parallel demo with latest data path (#1917)
parent
d53415bc10
commit
acd9abc5ca
|
@ -2,7 +2,8 @@
|
|||
|
||||
## Prepare Dataset
|
||||
|
||||
We use CIFAR10 dataset in this example. The dataset will be downloaded to `./data` by default.
|
||||
We use CIFAR10 dataset in this example. You should invoke the `donwload_cifar10.py` in the tutorial root directory or directly run the `auto_parallel_with_resnet.py`.
|
||||
The dataset will be downloaded to `colossalai/examples/tutorials/data` by default.
|
||||
If you wish to use customized directory for the dataset. You can set the environment variable `DATA` via the following command.
|
||||
|
||||
```bash
|
||||
|
@ -13,7 +14,7 @@ export DATA=/path/to/data
|
|||
## Run on 2*2 device mesh
|
||||
|
||||
```bash
|
||||
colossalai run --nproc_per_node 4 auto_parallel_demo.py
|
||||
colossalai run --nproc_per_node 4 auto_parallel_with_resnet.py
|
||||
```
|
||||
|
||||
## Auto Checkpoint Benchmarking
|
||||
|
|
|
@ -24,7 +24,7 @@ from colossalai.logging import get_dist_logger
|
|||
from colossalai.nn.lr_scheduler import CosineAnnealingLR
|
||||
from colossalai.utils import get_dataloader
|
||||
|
||||
DATA_ROOT = Path(os.environ.get('DATA', './data'))
|
||||
DATA_ROOT = Path(os.environ.get('DATA', '../data')).absolute()
|
||||
BATCH_SIZE = 1024
|
||||
NUM_EPOCHS = 10
|
||||
|
Loading…
Reference in New Issue