model: add merge code
Browse files
merge.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import argparse
|
3 |
+
from safetensors import safe_open
|
4 |
+
from safetensors.torch import save_file
|
5 |
+
import json
|
6 |
+
from tqdm import tqdm
|
7 |
+
|
8 |
+
def get_tensor_locations(input_dir):
|
9 |
+
tensor_locations = {}
|
10 |
+
for i in tqdm(range(1, 52), desc="Scanning input files"): # 51 splits
|
11 |
+
file_path = os.path.join(input_dir, f"model-{i:05d}-of-00051.safetensors")
|
12 |
+
with safe_open(file_path, framework="pt", device="cpu") as f:
|
13 |
+
for key in f.keys():
|
14 |
+
tensor_locations[key] = i
|
15 |
+
return tensor_locations
|
16 |
+
|
17 |
+
def create_merge_plan(tensor_locations, layer_config):
|
18 |
+
merge_plan = []
|
19 |
+
new_layer_idx = 0
|
20 |
+
new_file_idx = 1
|
21 |
+
|
22 |
+
# Special handling for specific weights
|
23 |
+
special_weights = {
|
24 |
+
"model.embed_tokens.weight": 1,
|
25 |
+
"lm_head.weight": 48,
|
26 |
+
"model.norm.weight": 48
|
27 |
+
}
|
28 |
+
|
29 |
+
for slice_config in layer_config:
|
30 |
+
start, end = slice_config['layer_range']
|
31 |
+
for i in range(start, end):
|
32 |
+
layer_tensors = []
|
33 |
+
for key in tensor_locations.keys():
|
34 |
+
if key.startswith(f"model.layers.{i}."):
|
35 |
+
new_key = key.replace(f"model.layers.{i}", f"model.layers.{new_layer_idx}")
|
36 |
+
layer_tensors.append({
|
37 |
+
'old_key': key,
|
38 |
+
'new_key': new_key,
|
39 |
+
'original_file_index': tensor_locations[key],
|
40 |
+
'new_file_index': new_file_idx
|
41 |
+
})
|
42 |
+
if layer_tensors:
|
43 |
+
merge_plan.extend(layer_tensors)
|
44 |
+
new_file_idx += 1
|
45 |
+
new_layer_idx += 1
|
46 |
+
|
47 |
+
# Add special weights to their original locations
|
48 |
+
for key, file_index in special_weights.items():
|
49 |
+
merge_plan.append({
|
50 |
+
'old_key': key,
|
51 |
+
'new_key': key,
|
52 |
+
'original_file_index': file_index,
|
53 |
+
'new_file_index': file_index
|
54 |
+
})
|
55 |
+
|
56 |
+
# Add any remaining non-layer tensors to the first file
|
57 |
+
for key, file_index in tensor_locations.items():
|
58 |
+
if not key.startswith("model.layers.") and key not in special_weights:
|
59 |
+
merge_plan.append({
|
60 |
+
'old_key': key,
|
61 |
+
'new_key': key,
|
62 |
+
'original_file_index': file_index,
|
63 |
+
'new_file_index': 1
|
64 |
+
})
|
65 |
+
|
66 |
+
return merge_plan
|
67 |
+
|
68 |
+
def merge_layers(input_dir, output_dir, merge_plan):
|
69 |
+
output_tensors = {}
|
70 |
+
current_new_file_index = 1
|
71 |
+
max_file_index = max(item['new_file_index'] for item in merge_plan)
|
72 |
+
|
73 |
+
with tqdm(total=len(merge_plan), desc="Merging layers") as pbar:
|
74 |
+
for file_index in range(1, max_file_index + 1):
|
75 |
+
for item in merge_plan:
|
76 |
+
if item['new_file_index'] == file_index:
|
77 |
+
input_file = os.path.join(input_dir, f"model-{item['original_file_index']:05d}-of-00051.safetensors")
|
78 |
+
with safe_open(input_file, framework="pt", device="cpu") as f:
|
79 |
+
tensor = f.get_tensor(item['old_key'])
|
80 |
+
output_tensors[item['new_key']] = tensor
|
81 |
+
pbar.update(1)
|
82 |
+
|
83 |
+
if output_tensors:
|
84 |
+
output_file = os.path.join(output_dir, f"model-{file_index:05d}-of-{max_file_index:05d}.safetensors")
|
85 |
+
save_file(output_tensors, output_file)
|
86 |
+
output_tensors = {}
|
87 |
+
|
88 |
+
print(f"Merged model saved to {output_dir}")
|
89 |
+
|
90 |
+
def main():
|
91 |
+
parser = argparse.ArgumentParser(description="Merge and split Mistral model")
|
92 |
+
parser.add_argument("input_dir", help="Directory containing input safetensors files")
|
93 |
+
parser.add_argument("output_dir", help="Directory for output safetensors files")
|
94 |
+
parser.add_argument("--dry-run", action="store_true", help="Perform a dry run and output merge plan")
|
95 |
+
args = parser.parse_args()
|
96 |
+
|
97 |
+
layer_config = [
|
98 |
+
{'layer_range': [0, 20]},
|
99 |
+
{'layer_range': [10, 30]},
|
100 |
+
{'layer_range': [20, 40]},
|
101 |
+
{'layer_range': [30, 50]},
|
102 |
+
{'layer_range': [40, 60]},
|
103 |
+
{'layer_range': [50, 70]},
|
104 |
+
{'layer_range': [60, 80]},
|
105 |
+
{'layer_range': [70, 87]}
|
106 |
+
]
|
107 |
+
|
108 |
+
tensor_locations = get_tensor_locations(args.input_dir)
|
109 |
+
merge_plan = create_merge_plan(tensor_locations, layer_config)
|
110 |
+
|
111 |
+
if args.dry_run:
|
112 |
+
print("Merge plan:")
|
113 |
+
print(json.dumps(merge_plan, indent=2))
|
114 |
+
with open("merge_plan_large.json", "w") as f:
|
115 |
+
json.dump(merge_plan, f, indent=2)
|
116 |
+
print("Merge plan saved to merge_plan.json")
|
117 |
+
else:
|
118 |
+
os.makedirs(args.output_dir, exist_ok=True)
|
119 |
+
merge_layers(args.input_dir, args.output_dir, merge_plan)
|
120 |
+
print(f"Merged model saved to {args.output_dir}")
|
121 |
+
|
122 |
+
if __name__ == "__main__":
|
123 |
+
main()
|