update lightning version (#1954)

pull/1958/head
Fazzie-Maqianli 2022-11-15 16:57:48 +08:00 committed by GitHub
parent 52c6ad26e0
commit 6bdd0a90ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 29 additions and 35 deletions

View File

@ -44,13 +44,6 @@ pip install -e .
pip install colossalai==0.1.10+torch1.11cu11.3 -f https://release.colossalai.org
```
### Install [Lightning](https://github.com/Lightning-AI/lightning)
We use the Sep. 2022 version with commit id as `b04a7aa`.
```
git clone https://github.com/Lightning-AI/lightning && cd lightning && git reset --hard b04a7aa
pip install -r requirements.txt && pip install .
```
> The specified version is due to the interface incompatibility caused by the latest update of [Lightning](https://github.com/Lightning-AI/lightning), which will be fixed in the near future.
## Dataset

View File

@ -94,7 +94,7 @@ lightning:
precision: 16
auto_select_gpus: False
strategy:
target: pytorch_lightning.strategies.ColossalAIStrategy
target: lightning.pytorch.strategies.ColossalAIStrategy
params:
use_chunk: False
enable_distributed_storage: True,
@ -108,7 +108,7 @@ lightning:
logger_config:
wandb:
target: pytorch_lightning.loggers.WandbLogger
target: lightning.pytorch.loggers.WandbLogger
params:
name: nowname
save_dir: "/tmp/diff_log/"

View File

@ -101,7 +101,7 @@ lightning:
precision: 16
auto_select_gpus: False
strategy:
target: pytorch_lightning.strategies.ColossalAIStrategy
target: lightning.pytorch.strategies.ColossalAIStrategy
params:
use_chunk: False
enable_distributed_storage: True,
@ -115,7 +115,7 @@ lightning:
logger_config:
wandb:
target: pytorch_lightning.loggers.WandbLogger
target: lightning.pytorch.loggers.WandbLogger
params:
name: nowname
save_dir: "/tmp/diff_log/"

View File

@ -94,7 +94,7 @@ lightning:
precision: 16
auto_select_gpus: False
strategy:
target: pytorch_lightning.strategies.DDPStrategy
target: lightning.pytorch.strategies.DDPStrategy
params:
find_unused_parameters: False
log_every_n_steps: 2
@ -105,7 +105,7 @@ lightning:
logger_config:
wandb:
target: pytorch_lightning.loggers.WandbLogger
target: lightning.pytorch.loggers.WandbLogger
params:
name: nowname
save_dir: "/tmp/diff_log/"

View File

@ -95,7 +95,7 @@ lightning:
precision: 16
auto_select_gpus: False
strategy:
target: pytorch_lightning.strategies.ColossalAIStrategy
target: lightning.pytorch.strategies.ColossalAIStrategy
params:
use_chunk: False
enable_distributed_storage: True,
@ -113,7 +113,7 @@ lightning:
logger_config:
wandb:
target: pytorch_lightning.loggers.WandbLogger
target: lightning.pytorch.loggers.WandbLogger
params:
name: nowname
save_dir: "/tmp/diff_log/"

View File

@ -18,7 +18,7 @@ dependencies:
- invisible-watermark
- imageio==2.9.0
- imageio-ffmpeg==0.4.2
- pytorch-lightning==1.8.0
- lightning==1.8.1
- omegaconf==2.1.1
- test-tube>=0.7.5
- streamlit>=0.73.1

View File

@ -1,5 +1,5 @@
import torch
import pytorch_lightning as pl
import lightning.pytorch as pl
import torch.nn.functional as F
from contextlib import contextmanager

View File

@ -1,6 +1,6 @@
import os
import torch
import pytorch_lightning as pl
import lightning.pytorch as pl
from omegaconf import OmegaConf
from torch.nn import functional as F
from torch.optim import AdamW

View File

@ -1,7 +1,7 @@
import torch
import torch.nn as nn
import numpy as np
import pytorch_lightning as pl
import lightning.pytorch as pl
from torch.optim.lr_scheduler import LambdaLR
from einops import rearrange, repeat
from contextlib import contextmanager
@ -9,8 +9,8 @@ from functools import partial
from tqdm import tqdm
from torchvision.utils import make_grid
from pytorch_lightning.utilities.rank_zero import rank_zero_only
from pytorch_lightning.utilities import rank_zero_info
from lightning.pytorch.utilities.rank_zero import rank_zero_only
from lightning.pytorch.utilities import rank_zero_info
from ldm.util import log_txt_as_img, exists, default, ismap, isimage, mean_flat, count_params, instantiate_from_config
from ldm.modules.ema import LitEma

View File

@ -3,23 +3,23 @@ import numpy as np
import time
import torch
import torchvision
import pytorch_lightning as pl
import lightning.pytorch as pl
from packaging import version
from omegaconf import OmegaConf
from torch.utils.data import random_split, DataLoader, Dataset, Subset
from functools import partial
from PIL import Image
# from pytorch_lightning.strategies.colossalai import ColossalAIStrategy
# from lightning.pytorch.strategies.colossalai import ColossalAIStrategy
# from colossalai.nn.lr_scheduler import CosineAnnealingWarmupLR
from colossalai.nn.optimizer import HybridAdam
from prefetch_generator import BackgroundGenerator
from pytorch_lightning import seed_everything
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint, Callback, LearningRateMonitor
from pytorch_lightning.utilities.rank_zero import rank_zero_only
from pytorch_lightning.utilities import rank_zero_info
from lightning.pytorch import seed_everything
from lightning.pytorch.trainer import Trainer
from lightning.pytorch.callbacks import ModelCheckpoint, Callback, LearningRateMonitor
from lightning.pytorch.utilities.rank_zero import rank_zero_only
from lightning.pytorch.utilities import rank_zero_info
from diffusers.models.unet_2d import UNet2DModel
from clip.model import Bottleneck
@ -610,7 +610,7 @@ if __name__ == "__main__":
# default logger configs
default_logger_cfgs = {
"wandb": {
"target": "pytorch_lightning.loggers.WandbLogger",
"target": "lightning.pytorch.loggers.WandbLogger",
"params": {
"name": nowname,
"save_dir": logdir,
@ -619,7 +619,7 @@ if __name__ == "__main__":
}
},
"tensorboard":{
"target": "pytorch_lightning.loggers.TensorBoardLogger",
"target": "lightning.pytorch.loggers.TensorBoardLogger",
"params":{
"save_dir": logdir,
"name": "diff_tb",
@ -642,7 +642,7 @@ if __name__ == "__main__":
print("Using strategy: {}".format(strategy_cfg["target"]))
else:
strategy_cfg = {
"target": "pytorch_lightning.strategies.DDPStrategy",
"target": "lightning.pytorch.strategies.DDPStrategy",
"params": {
"find_unused_parameters": False
}
@ -654,7 +654,7 @@ if __name__ == "__main__":
# modelcheckpoint - use TrainResult/EvalResult(checkpoint_on=metric) to
# specify which metric is used to determine best models
default_modelckpt_cfg = {
"target": "pytorch_lightning.callbacks.ModelCheckpoint",
"target": "lightning.pytorch.callbacks.ModelCheckpoint",
"params": {
"dirpath": ckptdir,
"filename": "{epoch:06}",
@ -722,7 +722,7 @@ if __name__ == "__main__":
'Caution: Saving checkpoints every n train steps without deleting. This might require some free space.')
default_metrics_over_trainsteps_ckpt_dict = {
'metrics_over_trainsteps_checkpoint':
{"target": 'pytorch_lightning.callbacks.ModelCheckpoint',
{"target": 'lightning.pytorch.callbacks.ModelCheckpoint',
'params': {
"dirpath": os.path.join(ckptdir, 'trainstep_checkpoints'),
"filename": "{epoch:06}-{step:09}",

View File

@ -7,6 +7,7 @@ imageio==2.9.0
imageio-ffmpeg==0.4.2
omegaconf==2.1.1
multiprocess
lightning==1.8.1
test-tube>=0.7.5
streamlit>=0.73.1
einops==0.3.0

View File

@ -13,7 +13,7 @@ from torchvision.utils import make_grid
from torch import autocast
from contextlib import nullcontext
import time
from pytorch_lightning import seed_everything
from lightning.pytorch import seed_everything
from ldm.util import instantiate_from_config
from ldm.models.diffusion.ddim import DDIMSampler

View File

@ -10,7 +10,7 @@ from itertools import islice
from einops import rearrange
from torchvision.utils import make_grid
import time
from pytorch_lightning import seed_everything
from lightning.pytorch import seed_everything
from torch import autocast
from contextlib import contextmanager, nullcontext