mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
22 lines
440 B
22 lines
440 B
"""
|
|
torchrun --standalone --nproc_per_node=1 debug.py
|
|
"""
|
|
|
|
from diffusers import AutoencoderKL
|
|
|
|
import colossalai
|
|
from colossalai.zero import ColoInitContext
|
|
|
|
path = "/data/scratch/diffuser/stable-diffusion-v1-4"
|
|
|
|
colossalai.launch_from_torch()
|
|
with ColoInitContext(device="cpu"):
|
|
vae = AutoencoderKL.from_pretrained(
|
|
path,
|
|
subfolder="vae",
|
|
revision=None,
|
|
)
|
|
|
|
for n, p in vae.named_parameters():
|
|
print(n)
|