File size: 436 Bytes
18b2a62 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
models:
- model: FuseAI/FuseChat-Qwen-2.5-7B-Instruct
parameters:
weight: 0.5
- model: prithivMLmods/QwQ-LCoT-7B-Instruct
parameters:
weight: 1.0
- model: fblgit/cybertron-v4-qw7B-UNAMGS
parameters:
weight: 0.3
- model: fblgit/cybertron-v4-qw7B-UNAMGS+bunnycore/Qwen-2.1-7b-Persona-lora_model
parameters:
weight: 0.6
merge_method: linear
normalize: false
int8_mask: true
dtype: bfloat16
|