mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
17 lines
333 B
17 lines
333 B
1 year ago
|
from typing import Optional
|
||
|
|
||
|
from torch.nn import Module
|
||
|
|
||
|
__all__ = [
|
||
|
"get_pretrained_path",
|
||
|
"set_pretrained_path",
|
||
|
]
|
||
|
|
||
|
|
||
|
def get_pretrained_path(model: Module) -> Optional[str]:
|
||
|
return getattr(model, "_pretrained", None)
|
||
|
|
||
|
|
||
|
def set_pretrained_path(model: Module, path: str) -> None:
|
||
|
setattr(model, "_pretrained", path)
|