mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
28 lines
671 B
28 lines
671 B
import torch
|
|
import torch.nn as nn
|
|
|
|
from .model_utils import find_layers
|
|
from .quant import make_quant
|
|
|
|
|
|
def load_quant(model: nn.Module, checkpoint: str, wbits: int, groupsize: int):
|
|
model = model.eval()
|
|
layers = find_layers(model)
|
|
|
|
# ignore lm head
|
|
layers = find_layers(model)
|
|
for name in ["lm_head"]:
|
|
if name in layers:
|
|
del layers[name]
|
|
|
|
make_quant(model, layers, wbits, groupsize)
|
|
|
|
if checkpoint.endswith(".safetensors"):
|
|
from safetensors.torch import load_file as safe_load
|
|
|
|
model.load_state_dict(safe_load(checkpoint))
|
|
else:
|
|
model.load_state_dict(torch.load(checkpoint))
|
|
|
|
return model
|