models: | |
- model: NousResearch/Meta-Llama-3-8B-Instruct | |
- model: NousResearch/Hermes-2-Theta-Llama-3-8B # 6/10 | |
parameters: | |
density: 0.4 | |
weight: 0.15 | |
- model: openchat/openchat-3.6-8b-20240522 # 5/10 | |
parameters: | |
density: 0.3 | |
weight: 0.11 | |
- model: DevQuasar/llama3_8b_chat_brainstorm # 6/10 | |
parameters: | |
density: 0.4 | |
weight: 0.15 | |
- model: AwanLLM/Awanllm-Llama-3-8B-Cumulus-v1.0 # 6/10 | |
parameters: | |
density: 0.4 | |
weight: 0.15 | |
- model: chujiezheng/Llama-3-Instruct-8B-SimPO-ExPO # 6/10 | |
parameters: | |
density: 0.4 | |
weight: 0.15 | |
- model: saishf/Neural-SOVLish-Devil-8B-L3 # 6/10 | |
parameters: | |
density: 0.4 | |
weight: 0.15 | |
- model: nbeerbower/llama-3-gutenberg-8B # 6/10 | |
parameters: | |
density: 0.4 | |
weight: 0.15 | |
merge_method: breadcrumbs_ties | |
base_model: NousResearch/Meta-Llama-3-8B-Instruct | |
parameters: | |
normalize: false | |
rescale: true | |
gamma: 0.01 | |
dtype: float16 |