You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/examples/images/dreambooth/debug.py

22 lines
449 B

"""
torchrun --standalone --nproc_per_node=1 debug.py
"""
from diffusers import AutoencoderKL
import colossalai
from colossalai.zero import ColoInitContext
path = "/data/scratch/diffuser/stable-diffusion-v1-4"
colossalai.launch_from_torch(config={})
with ColoInitContext(device="cpu"):
vae = AutoencoderKL.from_pretrained(
path,
subfolder="vae",
revision=None,
)
for n, p in vae.named_parameters():
print(n)