From efec8ab87ea86d3c694f796eb5f26bb40df9c3cc Mon Sep 17 00:00:00 2001 From: x54-729 <17307130121@fudan.edu.cn> Date: Fri, 7 Jul 2023 12:27:13 +0800 Subject: [PATCH] Use tempfile for convert2hf.py --- tools/{ => transformers}/convert2hf.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) rename tools/{ => transformers}/convert2hf.py (97%) diff --git a/tools/convert2hf.py b/tools/transformers/convert2hf.py similarity index 97% rename from tools/convert2hf.py rename to tools/transformers/convert2hf.py index 49ee1f6..ac5ae8e 100644 --- a/tools/convert2hf.py +++ b/tools/transformers/convert2hf.py @@ -4,6 +4,7 @@ import os import random import re import shutil +import tempfile import torch from modeling_internlm import InternLMConfig, InternLMForCausalLM @@ -15,10 +16,8 @@ NUM_SHARDS = { def convert2hf(model_config, states_tp_pps): - folder = f"/dev/shm/wait_to_upload_weight_tmp_{random.random()}/" - os.makedirs(folder, exist_ok=True) - try: + with tempfile.TemporaryDirectory() as folder: states = merge_pp(states_tp_pps)[0] if "embedding.word_embeddings.weight" in states: @@ -91,9 +90,6 @@ def convert2hf(model_config, states_tp_pps): model = InternLMForCausalLM.from_pretrained(folder, torch_dtype=torch.float16, low_cpu_mem_usage=True) del model.config._name_or_path - finally: - shutil.rmtree(folder) - return config, model