likewendy commited on
Commit
5eee87f
·
1 Parent(s): da8ad43
Files changed (1) hide show
  1. app.py +26 -0
app.py CHANGED
@@ -6,6 +6,32 @@ import os
6
 
7
  os.system('huggingface-cli download matteogeniaccio/phi-4 --local-dir ./phi-4 --include "phi-4/*"')
8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  # 加载 phi-4 模型和 tokenizer
10
  torch.random.manual_seed(0)
11
 
 
6
 
7
  os.system('huggingface-cli download matteogeniaccio/phi-4 --local-dir ./phi-4 --include "phi-4/*"')
8
 
9
+ from safetensors.torch import load_file, save_file
10
+
11
+ @spaces.GPU
12
+ def merge_safetensors(input_dir, output_file):
13
+ # 获取所有分片文件
14
+ files = sorted([f for f in os.listdir(input_dir) if f.startswith('model-') and f.endswith('.safetensors')])
15
+
16
+ # 合并所有张量
17
+ merged_state_dict = {}
18
+ for file in files:
19
+ file_path = os.path.join(input_dir, file)
20
+ print(f"Loading {file}...")
21
+ state_dict = load_file(file_path)
22
+ merged_state_dict.update(state_dict)
23
+
24
+ # 保存合并后的文件
25
+ print(f"Saving merged model to {output_file}...")
26
+ save_file(merged_state_dict, output_file)
27
+ print("Done!")
28
+
29
+ # 使用示例
30
+ input_dir = "./phi-4/phi-4" # 包含分片文件的目录
31
+ output_file = "./phi-4/phi-4/model.safetensors" # 合并后的文件路径
32
+
33
+ merge_safetensors(input_dir, output_file)
34
+
35
  # 加载 phi-4 模型和 tokenizer
36
  torch.random.manual_seed(0)
37