File size: 551 Bytes
f4e49f6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
base_model: meta-llama/Llama-3.1-8B-Instruct
dtype: bfloat16
merge_method: slerp
slices:
- sources:
- model: meta-llama/Llama-3.1-8B-Instruct
layer_range: [0, 32]
- model: grimjim/llama-3-Nephilim-v3-8B
layer_range: [0, 32]
value: [0.0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.08, 0.06, 0.04, 0.02, 0.0]
parameters:
t:
- filter: embed_tokens
value: 0.0
- filter: lm_head
value: 0.0
- value: 0.1
|