You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/interface/pretrained.py

17 lines
333 B

from typing import Optional
from torch.nn import Module
__all__ = [
"get_pretrained_path",
"set_pretrained_path",
]
def get_pretrained_path(model: Module) -> Optional[str]:
return getattr(model, "_pretrained", None)
def set_pretrained_path(model: Module, path: str) -> None:
setattr(model, "_pretrained", path)