File size: 457 Bytes
f639002
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23

slices:
- sources:
  - layer_range: [0, 22]
    model: aipib/Dopey-karasu-MoE3
    parameters:
      density: [1, 0.7, 0.1]
      weight: 1.0
  - layer_range: [0, 22]
    model: AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE
    parameters:
      density: 0.53
      weight:
        - filter: mlp
          value: 0.5
        - value: 0
merge_method: dare_ties
base_model: aipib/Dopey-karasu-MoE3
parameters:
  normalize: true
  int8_mask: true
dtype: bfloat16