ultra_llm_merged / mergekit_config.yml
Yerim's picture
Upload folder using huggingface_hub
e83ff81 verified
raw
history blame
No virus
493 Bytes
base_model: mistralai/Mistral-7B-v0.1
dtype: float16
merge_method: ties
parameters:
int8_mask: 1.0
normalize: 1.0
slices:
- sources:
- layer_range: [0, 32]
model: mistralai/Mistral-7B-v0.1
- layer_range: [0, 32]
model: WizardLM/WizardMath-7B-V1.1
parameters:
density: 0.5
weight:
- filter: mlp
value: 0.5
- value: 0.0
- layer_range: [0, 32]
model: codellama/CodeLlama-7b-Instruct-hf
parameters:
density: 0.5
weight: 0.5