base_model: mistralai/Mistral-7B-v0.1 dtype: float16 merge_method: task_arithmetic slices: - sources: - layer_range: [0, 32] model: mistralai/Mistral-7B-v0.1 - layer_range: [0, 32] model: NeverSleep/Noromaid-7B-0.4-DPO parameters: weight: 0.37 - layer_range: [0, 32] model: cgato/Thespis-CurtainCall-7b-v0.2.2 parameters: weight: 0.32 - layer_range: [0, 32] model: NurtureAI/neural-chat-7b-v3-1-16k parameters: weight: 0.15 - layer_range: [0, 32] model: cgato/Thespis-7b-v0.5-SFTTest-2Epoch parameters: weight: 0.38 - layer_range: [0, 32] model: tavtav/eros-7b-test parameters: weight: 0.18